1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context
*outer
;
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map
;
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec
<tree
> task_reductions
;
121 /* A hash map from the reduction clauses to the registered array
123 hash_map
<tree
, unsigned> *task_reduction_map
;
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
129 /* And a hash map from the allocate variables to their corresponding
131 hash_map
<tree
, tree
> *allocate_map
;
133 /* A tree_list of the reduction clauses in this context. This is
134 only used for checking the consistency of OpenACC reduction
135 clauses in scan_omp_for and is not guaranteed to contain a valid
136 value outside of this function. */
137 tree local_reduction_clauses
;
139 /* A tree_list of the reduction clauses in outer contexts. This is
140 only used for checking the consistency of OpenACC reduction
141 clauses in scan_omp_for and is not guaranteed to contain a valid
142 value outside of this function. */
143 tree outer_reduction_clauses
;
145 /* Nesting depth of this context. Used to beautify error messages re
146 invalid gotos. The outermost ctx is depth 1, with depth 0 being
147 reserved for the main body of the function. */
150 /* True if this parallel directive is nested within another. */
153 /* True if this construct can be cancelled. */
156 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 bool combined_into_simd_safelen1
;
160 /* True if there is nested scan context with inclusive clause. */
163 /* True if there is nested scan context with exclusive clause. */
166 /* True in the second simd loop of for simd with inscan reductions. */
167 bool for_simd_scan_phase
;
169 /* True if there is order(concurrent) clause on the construct. */
170 bool order_concurrent
;
172 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 static splay_tree all_contexts
;
177 static int taskreg_nesting_level
;
178 static int target_nesting_level
;
179 static bitmap task_shared_vars
;
180 static bitmap global_nonaddressable_vars
;
181 static vec
<omp_context
*> taskreg_contexts
;
183 static void scan_omp (gimple_seq
*, omp_context
*);
184 static tree
scan_omp_1_op (tree
*, int *, void *);
186 #define WALK_SUBSTMTS \
190 case GIMPLE_EH_FILTER: \
191 case GIMPLE_TRANSACTION: \
192 /* The sub-statements for these should be walked. */ \
193 *handled_ops_p = false; \
196 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
200 is_oacc_parallel_or_serial (omp_context
*ctx
)
202 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
203 return ((outer_type
== GIMPLE_OMP_TARGET
)
204 && ((gimple_omp_target_kind (ctx
->stmt
)
205 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
206 || (gimple_omp_target_kind (ctx
->stmt
)
207 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
210 /* Return true if CTX corresponds to an oacc kernels region. */
213 is_oacc_kernels (omp_context
*ctx
)
215 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
216 return ((outer_type
== GIMPLE_OMP_TARGET
)
217 && (gimple_omp_target_kind (ctx
->stmt
)
218 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
221 /* Return true if STMT corresponds to an OpenMP target region. */
223 is_omp_target (gimple
*stmt
)
225 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
227 int kind
= gimple_omp_target_kind (stmt
);
228 return (kind
== GF_OMP_TARGET_KIND_REGION
229 || kind
== GF_OMP_TARGET_KIND_DATA
230 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
231 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
236 /* If DECL is the artificial dummy VAR_DECL created for non-static
237 data member privatization, return the underlying "this" parameter,
238 otherwise return NULL. */
241 omp_member_access_dummy_var (tree decl
)
244 || !DECL_ARTIFICIAL (decl
)
245 || !DECL_IGNORED_P (decl
)
246 || !DECL_HAS_VALUE_EXPR_P (decl
)
247 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
250 tree v
= DECL_VALUE_EXPR (decl
);
251 if (TREE_CODE (v
) != COMPONENT_REF
)
255 switch (TREE_CODE (v
))
261 case POINTER_PLUS_EXPR
:
262 v
= TREE_OPERAND (v
, 0);
265 if (DECL_CONTEXT (v
) == current_function_decl
266 && DECL_ARTIFICIAL (v
)
267 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
275 /* Helper for unshare_and_remap, called through walk_tree. */
278 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
280 tree
*pair
= (tree
*) data
;
283 *tp
= unshare_expr (pair
[1]);
286 else if (IS_TYPE_OR_DECL_P (*tp
))
291 /* Return unshare_expr (X) with all occurrences of FROM
295 unshare_and_remap (tree x
, tree from
, tree to
)
297 tree pair
[2] = { from
, to
};
298 x
= unshare_expr (x
);
299 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
303 /* Convenience function for calling scan_omp_1_op on tree operands. */
306 scan_omp_op (tree
*tp
, omp_context
*ctx
)
308 struct walk_stmt_info wi
;
310 memset (&wi
, 0, sizeof (wi
));
312 wi
.want_locations
= true;
314 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
317 static void lower_omp (gimple_seq
*, omp_context
*);
318 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
319 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
321 /* Return true if CTX is for an omp parallel. */
324 is_parallel_ctx (omp_context
*ctx
)
326 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
330 /* Return true if CTX is for an omp task. */
333 is_task_ctx (omp_context
*ctx
)
335 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
339 /* Return true if CTX is for an omp taskloop. */
342 is_taskloop_ctx (omp_context
*ctx
)
344 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
345 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
349 /* Return true if CTX is for a host omp teams. */
352 is_host_teams_ctx (omp_context
*ctx
)
354 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
355 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
358 /* Return true if CTX is for an omp parallel or omp task or host omp teams
359 (the last one is strictly not a task region in OpenMP speak, but we
360 need to treat it similarly). */
363 is_taskreg_ctx (omp_context
*ctx
)
365 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
368 /* Return true if EXPR is variable sized. */
371 is_variable_sized (const_tree expr
)
373 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
376 /* Lookup variables. The "maybe" form
377 allows for the variable form to not have been entered, otherwise we
378 assert that the variable must have been entered. */
381 lookup_decl (tree var
, omp_context
*ctx
)
383 tree
*n
= ctx
->cb
.decl_map
->get (var
);
388 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
390 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
391 return n
? *n
: NULL_TREE
;
395 lookup_field (tree var
, omp_context
*ctx
)
398 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
399 return (tree
) n
->value
;
403 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
406 n
= splay_tree_lookup (ctx
->sfield_map
407 ? ctx
->sfield_map
: ctx
->field_map
, key
);
408 return (tree
) n
->value
;
412 lookup_sfield (tree var
, omp_context
*ctx
)
414 return lookup_sfield ((splay_tree_key
) var
, ctx
);
418 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
421 n
= splay_tree_lookup (ctx
->field_map
, key
);
422 return n
? (tree
) n
->value
: NULL_TREE
;
426 maybe_lookup_field (tree var
, omp_context
*ctx
)
428 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
431 /* Return true if DECL should be copied by pointer. SHARED_CTX is
432 the parallel context if DECL is to be shared. */
435 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
437 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
438 || TYPE_ATOMIC (TREE_TYPE (decl
)))
441 /* We can only use copy-in/copy-out semantics for shared variables
442 when we know the value is not accessible from an outer scope. */
445 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
447 /* ??? Trivially accessible from anywhere. But why would we even
448 be passing an address in this case? Should we simply assert
449 this to be false, or should we have a cleanup pass that removes
450 these from the list of mappings? */
451 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
454 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
455 without analyzing the expression whether or not its location
456 is accessible to anyone else. In the case of nested parallel
457 regions it certainly may be. */
458 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
461 /* Do not use copy-in/copy-out for variables that have their
463 if (is_global_var (decl
))
465 /* For file scope vars, track whether we've seen them as
466 non-addressable initially and in that case, keep the same
467 answer for the duration of the pass, even when they are made
468 addressable later on e.g. through reduction expansion. Global
469 variables which weren't addressable before the pass will not
470 have their privatized copies address taken. See PR91216. */
471 if (!TREE_ADDRESSABLE (decl
))
473 if (!global_nonaddressable_vars
)
474 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
475 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
477 else if (!global_nonaddressable_vars
478 || !bitmap_bit_p (global_nonaddressable_vars
,
482 else if (TREE_ADDRESSABLE (decl
))
485 /* lower_send_shared_vars only uses copy-in, but not copy-out
487 if (TREE_READONLY (decl
)
488 || ((TREE_CODE (decl
) == RESULT_DECL
489 || TREE_CODE (decl
) == PARM_DECL
)
490 && DECL_BY_REFERENCE (decl
)))
493 /* Disallow copy-in/out in nested parallel if
494 decl is shared in outer parallel, otherwise
495 each thread could store the shared variable
496 in its own copy-in location, making the
497 variable no longer really shared. */
498 if (shared_ctx
->is_nested
)
502 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
503 if ((is_taskreg_ctx (up
)
504 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
505 && is_gimple_omp_offloaded (up
->stmt
)))
506 && maybe_lookup_decl (decl
, up
))
513 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
515 for (c
= gimple_omp_target_clauses (up
->stmt
);
516 c
; c
= OMP_CLAUSE_CHAIN (c
))
517 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
518 && OMP_CLAUSE_DECL (c
) == decl
)
522 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
523 c
; c
= OMP_CLAUSE_CHAIN (c
))
524 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
525 && OMP_CLAUSE_DECL (c
) == decl
)
529 goto maybe_mark_addressable_and_ret
;
533 /* For tasks avoid using copy-in/out. As tasks can be
534 deferred or executed in different thread, when GOMP_task
535 returns, the task hasn't necessarily terminated. */
536 if (is_task_ctx (shared_ctx
))
539 maybe_mark_addressable_and_ret
:
540 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
541 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
543 /* Taking address of OUTER in lower_send_shared_vars
544 might need regimplification of everything that uses the
546 if (!task_shared_vars
)
547 task_shared_vars
= BITMAP_ALLOC (NULL
);
548 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
549 TREE_ADDRESSABLE (outer
) = 1;
558 /* Construct a new automatic decl similar to VAR. */
561 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
563 tree copy
= copy_var_decl (var
, name
, type
);
565 DECL_CONTEXT (copy
) = current_function_decl
;
566 DECL_CHAIN (copy
) = ctx
->block_vars
;
567 /* If VAR is listed in task_shared_vars, it means it wasn't
568 originally addressable and is just because task needs to take
569 it's address. But we don't need to take address of privatizations
571 if (TREE_ADDRESSABLE (var
)
572 && ((task_shared_vars
573 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
574 || (global_nonaddressable_vars
575 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
576 TREE_ADDRESSABLE (copy
) = 0;
577 ctx
->block_vars
= copy
;
583 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
585 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
588 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
591 omp_build_component_ref (tree obj
, tree field
)
593 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
594 if (TREE_THIS_VOLATILE (field
))
595 TREE_THIS_VOLATILE (ret
) |= 1;
596 if (TREE_READONLY (field
))
597 TREE_READONLY (ret
) |= 1;
601 /* Build tree nodes to access the field for VAR on the receiver side. */
604 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
606 tree x
, field
= lookup_field (var
, ctx
);
608 /* If the receiver record type was remapped in the child function,
609 remap the field into the new record type. */
610 x
= maybe_lookup_field (field
, ctx
);
614 x
= build_simple_mem_ref (ctx
->receiver_decl
);
615 TREE_THIS_NOTRAP (x
) = 1;
616 x
= omp_build_component_ref (x
, field
);
619 x
= build_simple_mem_ref (x
);
620 TREE_THIS_NOTRAP (x
) = 1;
626 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
627 of a parallel, this is a component reference; for workshare constructs
628 this is some variable. */
631 build_outer_var_ref (tree var
, omp_context
*ctx
,
632 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
635 omp_context
*outer
= ctx
->outer
;
636 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
637 outer
= outer
->outer
;
639 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
641 else if (is_variable_sized (var
))
643 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
644 x
= build_outer_var_ref (x
, ctx
, code
);
645 x
= build_simple_mem_ref (x
);
647 else if (is_taskreg_ctx (ctx
))
649 bool by_ref
= use_pointer_for_field (var
, NULL
);
650 x
= build_receiver_ref (var
, by_ref
, ctx
);
652 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
653 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
655 || (code
== OMP_CLAUSE_PRIVATE
656 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
657 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
658 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
660 /* #pragma omp simd isn't a worksharing construct, and can reference
661 even private vars in its linear etc. clauses.
662 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
663 to private vars in all worksharing constructs. */
665 if (outer
&& is_taskreg_ctx (outer
))
666 x
= lookup_decl (var
, outer
);
668 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
672 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
676 = splay_tree_lookup (outer
->field_map
,
677 (splay_tree_key
) &DECL_UID (var
));
680 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
683 x
= lookup_decl (var
, outer
);
687 tree field
= (tree
) n
->value
;
688 /* If the receiver record type was remapped in the child function,
689 remap the field into the new record type. */
690 x
= maybe_lookup_field (field
, outer
);
694 x
= build_simple_mem_ref (outer
->receiver_decl
);
695 x
= omp_build_component_ref (x
, field
);
696 if (use_pointer_for_field (var
, outer
))
697 x
= build_simple_mem_ref (x
);
701 x
= lookup_decl (var
, outer
);
702 else if (omp_is_reference (var
))
703 /* This can happen with orphaned constructs. If var is reference, it is
704 possible it is shared and as such valid. */
706 else if (omp_member_access_dummy_var (var
))
713 tree t
= omp_member_access_dummy_var (var
);
716 x
= DECL_VALUE_EXPR (var
);
717 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
719 x
= unshare_and_remap (x
, t
, o
);
721 x
= unshare_expr (x
);
725 if (omp_is_reference (var
))
726 x
= build_simple_mem_ref (x
);
731 /* Build tree nodes to access the field for VAR on the sender side. */
734 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
736 tree field
= lookup_sfield (key
, ctx
);
737 return omp_build_component_ref (ctx
->sender_decl
, field
);
741 build_sender_ref (tree var
, omp_context
*ctx
)
743 return build_sender_ref ((splay_tree_key
) var
, ctx
);
746 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
747 BASE_POINTERS_RESTRICT, declare the field with restrict. */
750 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
752 tree field
, type
, sfield
= NULL_TREE
;
753 splay_tree_key key
= (splay_tree_key
) var
;
755 if ((mask
& 16) != 0)
757 key
= (splay_tree_key
) &DECL_NAME (var
);
758 gcc_checking_assert (key
!= (splay_tree_key
) var
);
762 key
= (splay_tree_key
) &DECL_UID (var
);
763 gcc_checking_assert (key
!= (splay_tree_key
) var
);
765 gcc_assert ((mask
& 1) == 0
766 || !splay_tree_lookup (ctx
->field_map
, key
));
767 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
768 || !splay_tree_lookup (ctx
->sfield_map
, key
));
769 gcc_assert ((mask
& 3) == 3
770 || !is_gimple_omp_oacc (ctx
->stmt
));
772 type
= TREE_TYPE (var
);
773 if ((mask
& 16) != 0)
774 type
= lang_hooks
.decls
.omp_array_data (var
, true);
776 /* Prevent redeclaring the var in the split-off function with a restrict
777 pointer type. Note that we only clear type itself, restrict qualifiers in
778 the pointed-to type will be ignored by points-to analysis. */
779 if (POINTER_TYPE_P (type
)
780 && TYPE_RESTRICT (type
))
781 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
785 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
786 type
= build_pointer_type (build_pointer_type (type
));
789 type
= build_pointer_type (type
);
790 else if ((mask
& 3) == 1 && omp_is_reference (var
))
791 type
= TREE_TYPE (type
);
793 field
= build_decl (DECL_SOURCE_LOCATION (var
),
794 FIELD_DECL
, DECL_NAME (var
), type
);
796 /* Remember what variable this field was created for. This does have a
797 side effect of making dwarf2out ignore this member, so for helpful
798 debugging we clear it later in delete_omp_context. */
799 DECL_ABSTRACT_ORIGIN (field
) = var
;
800 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
802 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
803 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
804 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
807 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
811 insert_field_into_struct (ctx
->record_type
, field
);
812 if (ctx
->srecord_type
)
814 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
815 FIELD_DECL
, DECL_NAME (var
), type
);
816 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
817 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
818 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
819 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
820 insert_field_into_struct (ctx
->srecord_type
, sfield
);
825 if (ctx
->srecord_type
== NULL_TREE
)
829 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
830 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
831 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
833 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
834 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
835 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
836 insert_field_into_struct (ctx
->srecord_type
, sfield
);
837 splay_tree_insert (ctx
->sfield_map
,
838 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
839 (splay_tree_value
) sfield
);
843 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
844 : ctx
->srecord_type
, field
);
848 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
849 if ((mask
& 2) && ctx
->sfield_map
)
850 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
854 install_var_local (tree var
, omp_context
*ctx
)
856 tree new_var
= omp_copy_decl_1 (var
, ctx
);
857 insert_decl_map (&ctx
->cb
, var
, new_var
);
861 /* Adjust the replacement for DECL in CTX for the new context. This means
862 copying the DECL_VALUE_EXPR, and fixing up the type. */
865 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
869 new_decl
= lookup_decl (decl
, ctx
);
871 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
873 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
874 && DECL_HAS_VALUE_EXPR_P (decl
))
876 tree ve
= DECL_VALUE_EXPR (decl
);
877 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
878 SET_DECL_VALUE_EXPR (new_decl
, ve
);
879 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
882 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
884 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
885 if (size
== error_mark_node
)
886 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
887 DECL_SIZE (new_decl
) = size
;
889 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
890 if (size
== error_mark_node
)
891 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
892 DECL_SIZE_UNIT (new_decl
) = size
;
896 /* The callback for remap_decl. Search all containing contexts for a
897 mapping of the variable; this avoids having to duplicate the splay
898 tree ahead of time. We know a mapping doesn't already exist in the
899 given context. Create new mappings to implement default semantics. */
902 omp_copy_decl (tree var
, copy_body_data
*cb
)
904 omp_context
*ctx
= (omp_context
*) cb
;
907 if (TREE_CODE (var
) == LABEL_DECL
)
909 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
911 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
912 DECL_CONTEXT (new_var
) = current_function_decl
;
913 insert_decl_map (&ctx
->cb
, var
, new_var
);
917 while (!is_taskreg_ctx (ctx
))
922 new_var
= maybe_lookup_decl (var
, ctx
);
927 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
930 return error_mark_node
;
933 /* Create a new context, with OUTER_CTX being the surrounding context. */
936 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
938 omp_context
*ctx
= XCNEW (omp_context
);
940 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
941 (splay_tree_value
) ctx
);
946 ctx
->outer
= outer_ctx
;
947 ctx
->cb
= outer_ctx
->cb
;
948 ctx
->cb
.block
= NULL
;
949 ctx
->depth
= outer_ctx
->depth
+ 1;
953 ctx
->cb
.src_fn
= current_function_decl
;
954 ctx
->cb
.dst_fn
= current_function_decl
;
955 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
956 gcc_checking_assert (ctx
->cb
.src_node
);
957 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
958 ctx
->cb
.src_cfun
= cfun
;
959 ctx
->cb
.copy_decl
= omp_copy_decl
;
960 ctx
->cb
.eh_lp_nr
= 0;
961 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
962 ctx
->cb
.adjust_array_error_bounds
= true;
963 ctx
->cb
.dont_remap_vla_if_no_change
= true;
967 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
972 static gimple_seq
maybe_catch_exception (gimple_seq
);
974 /* Finalize task copyfn. */
977 finalize_task_copyfn (gomp_task
*task_stmt
)
979 struct function
*child_cfun
;
981 gimple_seq seq
= NULL
, new_seq
;
984 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
985 if (child_fn
== NULL_TREE
)
988 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
989 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
991 push_cfun (child_cfun
);
992 bind
= gimplify_body (child_fn
, false);
993 gimple_seq_add_stmt (&seq
, bind
);
994 new_seq
= maybe_catch_exception (seq
);
997 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
999 gimple_seq_add_stmt (&seq
, bind
);
1001 gimple_set_body (child_fn
, seq
);
1004 /* Inform the callgraph about the new function. */
1005 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1006 node
->parallelized_function
= 1;
1007 cgraph_node::add_new_function (child_fn
, false);
1010 /* Destroy a omp_context data structures. Called through the splay tree
1011 value delete callback. */
1014 delete_omp_context (splay_tree_value value
)
1016 omp_context
*ctx
= (omp_context
*) value
;
1018 delete ctx
->cb
.decl_map
;
1021 splay_tree_delete (ctx
->field_map
);
1022 if (ctx
->sfield_map
)
1023 splay_tree_delete (ctx
->sfield_map
);
1025 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1026 it produces corrupt debug information. */
1027 if (ctx
->record_type
)
1030 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1031 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1033 if (ctx
->srecord_type
)
1036 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1037 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1040 if (is_task_ctx (ctx
))
1041 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1043 if (ctx
->task_reduction_map
)
1045 ctx
->task_reductions
.release ();
1046 delete ctx
->task_reduction_map
;
1049 delete ctx
->lastprivate_conditional_map
;
1050 delete ctx
->allocate_map
;
1055 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1059 fixup_child_record_type (omp_context
*ctx
)
1061 tree f
, type
= ctx
->record_type
;
1063 if (!ctx
->receiver_decl
)
1065 /* ??? It isn't sufficient to just call remap_type here, because
1066 variably_modified_type_p doesn't work the way we expect for
1067 record types. Testing each field for whether it needs remapping
1068 and creating a new record by hand works, however. */
1069 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1070 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1074 tree name
, new_fields
= NULL
;
1076 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1077 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1078 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1079 TYPE_DECL
, name
, type
);
1080 TYPE_NAME (type
) = name
;
1082 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1084 tree new_f
= copy_node (f
);
1085 DECL_CONTEXT (new_f
) = type
;
1086 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1087 DECL_CHAIN (new_f
) = new_fields
;
1088 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1089 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1091 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1095 /* Arrange to be able to look up the receiver field
1096 given the sender field. */
1097 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1098 (splay_tree_value
) new_f
);
1100 TYPE_FIELDS (type
) = nreverse (new_fields
);
1104 /* In a target region we never modify any of the pointers in *.omp_data_i,
1105 so attempt to help the optimizers. */
1106 if (is_gimple_omp_offloaded (ctx
->stmt
))
1107 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1109 TREE_TYPE (ctx
->receiver_decl
)
1110 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1113 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1114 specified by CLAUSES. */
1117 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1120 bool scan_array_reductions
= false;
1122 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1123 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1124 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1125 /* omp_default_mem_alloc is 1 */
1126 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))))
1128 if (is_task_ctx (ctx
))
1129 continue; /* For now. */
1130 if (ctx
->allocate_map
== NULL
)
1131 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1132 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
),
1133 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
1134 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
1135 : integer_zero_node
);
1138 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1142 switch (OMP_CLAUSE_CODE (c
))
1144 case OMP_CLAUSE_PRIVATE
:
1145 decl
= OMP_CLAUSE_DECL (c
);
1146 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1148 else if (!is_variable_sized (decl
))
1149 install_var_local (decl
, ctx
);
1152 case OMP_CLAUSE_SHARED
:
1153 decl
= OMP_CLAUSE_DECL (c
);
1154 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1155 ctx
->allocate_map
->remove (decl
);
1156 /* Ignore shared directives in teams construct inside of
1157 target construct. */
1158 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1159 && !is_host_teams_ctx (ctx
))
1161 /* Global variables don't need to be copied,
1162 the receiver side will use them directly. */
1163 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1164 if (is_global_var (odecl
))
1166 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1169 gcc_assert (is_taskreg_ctx (ctx
));
1170 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1171 || !is_variable_sized (decl
));
1172 /* Global variables don't need to be copied,
1173 the receiver side will use them directly. */
1174 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1176 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1178 use_pointer_for_field (decl
, ctx
);
1181 by_ref
= use_pointer_for_field (decl
, NULL
);
1182 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1183 || TREE_ADDRESSABLE (decl
)
1185 || omp_is_reference (decl
))
1187 by_ref
= use_pointer_for_field (decl
, ctx
);
1188 install_var_field (decl
, by_ref
, 3, ctx
);
1189 install_var_local (decl
, ctx
);
1192 /* We don't need to copy const scalar vars back. */
1193 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1196 case OMP_CLAUSE_REDUCTION
:
1197 if (is_oacc_parallel_or_serial (ctx
) || is_oacc_kernels (ctx
))
1198 ctx
->local_reduction_clauses
1199 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1202 case OMP_CLAUSE_IN_REDUCTION
:
1203 decl
= OMP_CLAUSE_DECL (c
);
1204 if (TREE_CODE (decl
) == MEM_REF
)
1206 tree t
= TREE_OPERAND (decl
, 0);
1207 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1208 t
= TREE_OPERAND (t
, 0);
1209 if (TREE_CODE (t
) == INDIRECT_REF
1210 || TREE_CODE (t
) == ADDR_EXPR
)
1211 t
= TREE_OPERAND (t
, 0);
1212 install_var_local (t
, ctx
);
1213 if (is_taskreg_ctx (ctx
)
1214 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1215 || (is_task_ctx (ctx
)
1216 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1217 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1218 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1219 == POINTER_TYPE
)))))
1220 && !is_variable_sized (t
)
1221 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1222 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1223 && !is_task_ctx (ctx
))))
1225 by_ref
= use_pointer_for_field (t
, NULL
);
1226 if (is_task_ctx (ctx
)
1227 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1228 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1230 install_var_field (t
, false, 1, ctx
);
1231 install_var_field (t
, by_ref
, 2, ctx
);
1234 install_var_field (t
, by_ref
, 3, ctx
);
1238 if (is_task_ctx (ctx
)
1239 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1240 && OMP_CLAUSE_REDUCTION_TASK (c
)
1241 && is_parallel_ctx (ctx
)))
1243 /* Global variables don't need to be copied,
1244 the receiver side will use them directly. */
1245 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1247 by_ref
= use_pointer_for_field (decl
, ctx
);
1248 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1249 install_var_field (decl
, by_ref
, 3, ctx
);
1251 install_var_local (decl
, ctx
);
1254 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1255 && OMP_CLAUSE_REDUCTION_TASK (c
))
1257 install_var_local (decl
, ctx
);
1262 case OMP_CLAUSE_LASTPRIVATE
:
1263 /* Let the corresponding firstprivate clause create
1265 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1269 case OMP_CLAUSE_FIRSTPRIVATE
:
1270 case OMP_CLAUSE_LINEAR
:
1271 decl
= OMP_CLAUSE_DECL (c
);
1273 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1274 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1275 && is_gimple_omp_offloaded (ctx
->stmt
))
1277 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1278 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1279 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1280 install_var_field (decl
, true, 3, ctx
);
1282 install_var_field (decl
, false, 3, ctx
);
1284 if (is_variable_sized (decl
))
1286 if (is_task_ctx (ctx
))
1287 install_var_field (decl
, false, 1, ctx
);
1290 else if (is_taskreg_ctx (ctx
))
1293 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1294 by_ref
= use_pointer_for_field (decl
, NULL
);
1296 if (is_task_ctx (ctx
)
1297 && (global
|| by_ref
|| omp_is_reference (decl
)))
1299 install_var_field (decl
, false, 1, ctx
);
1301 install_var_field (decl
, by_ref
, 2, ctx
);
1304 install_var_field (decl
, by_ref
, 3, ctx
);
1306 install_var_local (decl
, ctx
);
1309 case OMP_CLAUSE_USE_DEVICE_PTR
:
1310 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1311 decl
= OMP_CLAUSE_DECL (c
);
1313 /* Fortran array descriptors. */
1314 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1315 install_var_field (decl
, false, 19, ctx
);
1316 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1317 && !omp_is_reference (decl
)
1318 && !omp_is_allocatable_or_ptr (decl
))
1319 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1320 install_var_field (decl
, true, 11, ctx
);
1322 install_var_field (decl
, false, 11, ctx
);
1323 if (DECL_SIZE (decl
)
1324 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1326 tree decl2
= DECL_VALUE_EXPR (decl
);
1327 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1328 decl2
= TREE_OPERAND (decl2
, 0);
1329 gcc_assert (DECL_P (decl2
));
1330 install_var_local (decl2
, ctx
);
1332 install_var_local (decl
, ctx
);
1335 case OMP_CLAUSE_IS_DEVICE_PTR
:
1336 decl
= OMP_CLAUSE_DECL (c
);
1339 case OMP_CLAUSE__LOOPTEMP_
:
1340 case OMP_CLAUSE__REDUCTEMP_
:
1341 gcc_assert (is_taskreg_ctx (ctx
));
1342 decl
= OMP_CLAUSE_DECL (c
);
1343 install_var_field (decl
, false, 3, ctx
);
1344 install_var_local (decl
, ctx
);
1347 case OMP_CLAUSE_COPYPRIVATE
:
1348 case OMP_CLAUSE_COPYIN
:
1349 decl
= OMP_CLAUSE_DECL (c
);
1350 by_ref
= use_pointer_for_field (decl
, NULL
);
1351 install_var_field (decl
, by_ref
, 3, ctx
);
1354 case OMP_CLAUSE_FINAL
:
1356 case OMP_CLAUSE_NUM_THREADS
:
1357 case OMP_CLAUSE_NUM_TEAMS
:
1358 case OMP_CLAUSE_THREAD_LIMIT
:
1359 case OMP_CLAUSE_DEVICE
:
1360 case OMP_CLAUSE_SCHEDULE
:
1361 case OMP_CLAUSE_DIST_SCHEDULE
:
1362 case OMP_CLAUSE_DEPEND
:
1363 case OMP_CLAUSE_PRIORITY
:
1364 case OMP_CLAUSE_GRAINSIZE
:
1365 case OMP_CLAUSE_NUM_TASKS
:
1366 case OMP_CLAUSE_NUM_GANGS
:
1367 case OMP_CLAUSE_NUM_WORKERS
:
1368 case OMP_CLAUSE_VECTOR_LENGTH
:
1370 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1374 case OMP_CLAUSE_FROM
:
1375 case OMP_CLAUSE_MAP
:
1377 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1378 decl
= OMP_CLAUSE_DECL (c
);
1379 /* Global variables with "omp declare target" attribute
1380 don't need to be copied, the receiver side will use them
1381 directly. However, global variables with "omp declare target link"
1382 attribute need to be copied. Or when ALWAYS modifier is used. */
1383 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1385 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1386 && (OMP_CLAUSE_MAP_KIND (c
)
1387 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1388 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1389 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1390 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1391 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1392 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1393 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1394 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1395 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1396 && varpool_node::get_create (decl
)->offloadable
1397 && !lookup_attribute ("omp declare target link",
1398 DECL_ATTRIBUTES (decl
)))
1400 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1401 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1403 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1404 not offloaded; there is nothing to map for those. */
1405 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1406 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1407 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1410 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1412 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1413 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1414 && is_omp_target (ctx
->stmt
))
1416 /* If this is an offloaded region, an attach operation should
1417 only exist when the pointer variable is mapped in a prior
1419 if (is_gimple_omp_offloaded (ctx
->stmt
))
1421 (maybe_lookup_decl (decl
, ctx
)
1422 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1423 && lookup_attribute ("omp declare target",
1424 DECL_ATTRIBUTES (decl
))));
1426 /* By itself, attach/detach is generated as part of pointer
1427 variable mapping and should not create new variables in the
1428 offloaded region, however sender refs for it must be created
1429 for its address to be passed to the runtime. */
1431 = build_decl (OMP_CLAUSE_LOCATION (c
),
1432 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1433 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1434 insert_field_into_struct (ctx
->record_type
, field
);
1435 /* To not clash with a map of the pointer variable itself,
1436 attach/detach maps have their field looked up by the *clause*
1437 tree expression, not the decl. */
1438 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1439 (splay_tree_key
) c
));
1440 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1441 (splay_tree_value
) field
);
1444 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1445 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1446 || (OMP_CLAUSE_MAP_KIND (c
)
1447 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1449 if (TREE_CODE (decl
) == COMPONENT_REF
1450 || (TREE_CODE (decl
) == INDIRECT_REF
1451 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1452 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1453 == REFERENCE_TYPE
)))
1455 if (DECL_SIZE (decl
)
1456 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1458 tree decl2
= DECL_VALUE_EXPR (decl
);
1459 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1460 decl2
= TREE_OPERAND (decl2
, 0);
1461 gcc_assert (DECL_P (decl2
));
1462 install_var_local (decl2
, ctx
);
1464 install_var_local (decl
, ctx
);
1469 if (DECL_SIZE (decl
)
1470 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1472 tree decl2
= DECL_VALUE_EXPR (decl
);
1473 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1474 decl2
= TREE_OPERAND (decl2
, 0);
1475 gcc_assert (DECL_P (decl2
));
1476 install_var_field (decl2
, true, 3, ctx
);
1477 install_var_local (decl2
, ctx
);
1478 install_var_local (decl
, ctx
);
1482 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1483 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1484 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1485 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1486 install_var_field (decl
, true, 7, ctx
);
1488 install_var_field (decl
, true, 3, ctx
);
1489 if (is_gimple_omp_offloaded (ctx
->stmt
)
1490 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1491 install_var_local (decl
, ctx
);
1496 tree base
= get_base_address (decl
);
1497 tree nc
= OMP_CLAUSE_CHAIN (c
);
1500 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1501 && OMP_CLAUSE_DECL (nc
) == base
1502 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1503 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1505 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1506 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1512 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1513 decl
= OMP_CLAUSE_DECL (c
);
1515 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1516 (splay_tree_key
) decl
));
1518 = build_decl (OMP_CLAUSE_LOCATION (c
),
1519 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1520 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1521 insert_field_into_struct (ctx
->record_type
, field
);
1522 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1523 (splay_tree_value
) field
);
1528 case OMP_CLAUSE_ORDER
:
1529 ctx
->order_concurrent
= true;
1532 case OMP_CLAUSE_BIND
:
1536 case OMP_CLAUSE_NOWAIT
:
1537 case OMP_CLAUSE_ORDERED
:
1538 case OMP_CLAUSE_COLLAPSE
:
1539 case OMP_CLAUSE_UNTIED
:
1540 case OMP_CLAUSE_MERGEABLE
:
1541 case OMP_CLAUSE_PROC_BIND
:
1542 case OMP_CLAUSE_SAFELEN
:
1543 case OMP_CLAUSE_SIMDLEN
:
1544 case OMP_CLAUSE_THREADS
:
1545 case OMP_CLAUSE_SIMD
:
1546 case OMP_CLAUSE_NOGROUP
:
1547 case OMP_CLAUSE_DEFAULTMAP
:
1548 case OMP_CLAUSE_ASYNC
:
1549 case OMP_CLAUSE_WAIT
:
1550 case OMP_CLAUSE_GANG
:
1551 case OMP_CLAUSE_WORKER
:
1552 case OMP_CLAUSE_VECTOR
:
1553 case OMP_CLAUSE_INDEPENDENT
:
1554 case OMP_CLAUSE_AUTO
:
1555 case OMP_CLAUSE_SEQ
:
1556 case OMP_CLAUSE_TILE
:
1557 case OMP_CLAUSE__SIMT_
:
1558 case OMP_CLAUSE_DEFAULT
:
1559 case OMP_CLAUSE_NONTEMPORAL
:
1560 case OMP_CLAUSE_IF_PRESENT
:
1561 case OMP_CLAUSE_FINALIZE
:
1562 case OMP_CLAUSE_TASK_REDUCTION
:
1563 case OMP_CLAUSE_ALLOCATE
:
1566 case OMP_CLAUSE_ALIGNED
:
1567 decl
= OMP_CLAUSE_DECL (c
);
1568 if (is_global_var (decl
)
1569 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1570 install_var_local (decl
, ctx
);
1573 case OMP_CLAUSE__CONDTEMP_
:
1574 decl
= OMP_CLAUSE_DECL (c
);
1575 if (is_parallel_ctx (ctx
))
1577 install_var_field (decl
, false, 3, ctx
);
1578 install_var_local (decl
, ctx
);
1580 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1581 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1582 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1583 install_var_local (decl
, ctx
);
1586 case OMP_CLAUSE__CACHE_
:
1592 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1594 switch (OMP_CLAUSE_CODE (c
))
1596 case OMP_CLAUSE_LASTPRIVATE
:
1597 /* Let the corresponding firstprivate clause create
1599 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1600 scan_array_reductions
= true;
1601 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1605 case OMP_CLAUSE_FIRSTPRIVATE
:
1606 case OMP_CLAUSE_PRIVATE
:
1607 case OMP_CLAUSE_LINEAR
:
1608 case OMP_CLAUSE_IS_DEVICE_PTR
:
1609 decl
= OMP_CLAUSE_DECL (c
);
1610 if (is_variable_sized (decl
))
1612 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1613 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1614 && is_gimple_omp_offloaded (ctx
->stmt
))
1616 tree decl2
= DECL_VALUE_EXPR (decl
);
1617 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1618 decl2
= TREE_OPERAND (decl2
, 0);
1619 gcc_assert (DECL_P (decl2
));
1620 install_var_local (decl2
, ctx
);
1621 fixup_remapped_decl (decl2
, ctx
, false);
1623 install_var_local (decl
, ctx
);
1625 fixup_remapped_decl (decl
, ctx
,
1626 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1627 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1628 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1629 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1630 scan_array_reductions
= true;
1633 case OMP_CLAUSE_REDUCTION
:
1634 case OMP_CLAUSE_IN_REDUCTION
:
1635 decl
= OMP_CLAUSE_DECL (c
);
1636 if (TREE_CODE (decl
) != MEM_REF
)
1638 if (is_variable_sized (decl
))
1639 install_var_local (decl
, ctx
);
1640 fixup_remapped_decl (decl
, ctx
, false);
1642 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1643 scan_array_reductions
= true;
1646 case OMP_CLAUSE_TASK_REDUCTION
:
1647 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1648 scan_array_reductions
= true;
1651 case OMP_CLAUSE_SHARED
:
1652 /* Ignore shared directives in teams construct inside of
1653 target construct. */
1654 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1655 && !is_host_teams_ctx (ctx
))
1657 decl
= OMP_CLAUSE_DECL (c
);
1658 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1660 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1662 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1665 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1666 install_var_field (decl
, by_ref
, 11, ctx
);
1669 fixup_remapped_decl (decl
, ctx
, false);
1672 case OMP_CLAUSE_MAP
:
1673 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1675 decl
= OMP_CLAUSE_DECL (c
);
1677 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1678 && (OMP_CLAUSE_MAP_KIND (c
)
1679 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1680 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1681 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1682 && varpool_node::get_create (decl
)->offloadable
)
1684 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1685 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1686 && is_omp_target (ctx
->stmt
)
1687 && !is_gimple_omp_offloaded (ctx
->stmt
))
1691 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1692 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1693 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1694 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1696 tree new_decl
= lookup_decl (decl
, ctx
);
1697 TREE_TYPE (new_decl
)
1698 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1700 else if (DECL_SIZE (decl
)
1701 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1703 tree decl2
= DECL_VALUE_EXPR (decl
);
1704 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1705 decl2
= TREE_OPERAND (decl2
, 0);
1706 gcc_assert (DECL_P (decl2
));
1707 fixup_remapped_decl (decl2
, ctx
, false);
1708 fixup_remapped_decl (decl
, ctx
, true);
1711 fixup_remapped_decl (decl
, ctx
, false);
1715 case OMP_CLAUSE_COPYPRIVATE
:
1716 case OMP_CLAUSE_COPYIN
:
1717 case OMP_CLAUSE_DEFAULT
:
1719 case OMP_CLAUSE_NUM_THREADS
:
1720 case OMP_CLAUSE_NUM_TEAMS
:
1721 case OMP_CLAUSE_THREAD_LIMIT
:
1722 case OMP_CLAUSE_DEVICE
:
1723 case OMP_CLAUSE_SCHEDULE
:
1724 case OMP_CLAUSE_DIST_SCHEDULE
:
1725 case OMP_CLAUSE_NOWAIT
:
1726 case OMP_CLAUSE_ORDERED
:
1727 case OMP_CLAUSE_COLLAPSE
:
1728 case OMP_CLAUSE_UNTIED
:
1729 case OMP_CLAUSE_FINAL
:
1730 case OMP_CLAUSE_MERGEABLE
:
1731 case OMP_CLAUSE_PROC_BIND
:
1732 case OMP_CLAUSE_SAFELEN
:
1733 case OMP_CLAUSE_SIMDLEN
:
1734 case OMP_CLAUSE_ALIGNED
:
1735 case OMP_CLAUSE_DEPEND
:
1736 case OMP_CLAUSE_ALLOCATE
:
1737 case OMP_CLAUSE__LOOPTEMP_
:
1738 case OMP_CLAUSE__REDUCTEMP_
:
1740 case OMP_CLAUSE_FROM
:
1741 case OMP_CLAUSE_PRIORITY
:
1742 case OMP_CLAUSE_GRAINSIZE
:
1743 case OMP_CLAUSE_NUM_TASKS
:
1744 case OMP_CLAUSE_THREADS
:
1745 case OMP_CLAUSE_SIMD
:
1746 case OMP_CLAUSE_NOGROUP
:
1747 case OMP_CLAUSE_DEFAULTMAP
:
1748 case OMP_CLAUSE_ORDER
:
1749 case OMP_CLAUSE_BIND
:
1750 case OMP_CLAUSE_USE_DEVICE_PTR
:
1751 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1752 case OMP_CLAUSE_NONTEMPORAL
:
1753 case OMP_CLAUSE_ASYNC
:
1754 case OMP_CLAUSE_WAIT
:
1755 case OMP_CLAUSE_NUM_GANGS
:
1756 case OMP_CLAUSE_NUM_WORKERS
:
1757 case OMP_CLAUSE_VECTOR_LENGTH
:
1758 case OMP_CLAUSE_GANG
:
1759 case OMP_CLAUSE_WORKER
:
1760 case OMP_CLAUSE_VECTOR
:
1761 case OMP_CLAUSE_INDEPENDENT
:
1762 case OMP_CLAUSE_AUTO
:
1763 case OMP_CLAUSE_SEQ
:
1764 case OMP_CLAUSE_TILE
:
1765 case OMP_CLAUSE__SIMT_
:
1766 case OMP_CLAUSE_IF_PRESENT
:
1767 case OMP_CLAUSE_FINALIZE
:
1768 case OMP_CLAUSE__CONDTEMP_
:
1771 case OMP_CLAUSE__CACHE_
:
1777 gcc_checking_assert (!scan_array_reductions
1778 || !is_gimple_omp_oacc (ctx
->stmt
));
1779 if (scan_array_reductions
)
1781 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1782 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1783 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1784 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1785 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1787 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1788 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1790 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1791 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1792 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1793 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1794 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1795 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1799 /* Create a new name for omp child function. Returns an identifier. */
1802 create_omp_child_function_name (bool task_copy
)
1804 return clone_function_name_numbered (current_function_decl
,
1805 task_copy
? "_omp_cpyfn" : "_omp_fn");
1808 /* Return true if CTX may belong to offloaded code: either if current function
1809 is offloaded, or any enclosing context corresponds to a target region. */
1812 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1814 if (cgraph_node::get (current_function_decl
)->offloadable
)
1816 for (; ctx
; ctx
= ctx
->outer
)
1817 if (is_gimple_omp_offloaded (ctx
->stmt
))
1822 /* Build a decl for the omp child function. It'll not contain a body
1823 yet, just the bare decl. */
1826 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1828 tree decl
, type
, name
, t
;
1830 name
= create_omp_child_function_name (task_copy
);
1832 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1833 ptr_type_node
, NULL_TREE
);
1835 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1837 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1839 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1842 ctx
->cb
.dst_fn
= decl
;
1844 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1846 TREE_STATIC (decl
) = 1;
1847 TREE_USED (decl
) = 1;
1848 DECL_ARTIFICIAL (decl
) = 1;
1849 DECL_IGNORED_P (decl
) = 0;
1850 TREE_PUBLIC (decl
) = 0;
1851 DECL_UNINLINABLE (decl
) = 1;
1852 DECL_EXTERNAL (decl
) = 0;
1853 DECL_CONTEXT (decl
) = NULL_TREE
;
1854 DECL_INITIAL (decl
) = make_node (BLOCK
);
1855 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1856 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1857 /* Remove omp declare simd attribute from the new attributes. */
1858 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1860 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1863 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1864 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1865 *p
= TREE_CHAIN (*p
);
1868 tree chain
= TREE_CHAIN (*p
);
1869 *p
= copy_node (*p
);
1870 p
= &TREE_CHAIN (*p
);
1874 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1875 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1876 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1877 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1878 DECL_FUNCTION_VERSIONED (decl
)
1879 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1881 if (omp_maybe_offloaded_ctx (ctx
))
1883 cgraph_node::get_create (decl
)->offloadable
= 1;
1884 if (ENABLE_OFFLOADING
)
1885 g
->have_offload
= true;
1888 if (cgraph_node::get_create (decl
)->offloadable
1889 && !lookup_attribute ("omp declare target",
1890 DECL_ATTRIBUTES (current_function_decl
)))
1892 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1893 ? "omp target entrypoint"
1894 : "omp declare target");
1895 DECL_ATTRIBUTES (decl
)
1896 = tree_cons (get_identifier (target_attr
),
1897 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1900 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1901 RESULT_DECL
, NULL_TREE
, void_type_node
);
1902 DECL_ARTIFICIAL (t
) = 1;
1903 DECL_IGNORED_P (t
) = 1;
1904 DECL_CONTEXT (t
) = decl
;
1905 DECL_RESULT (decl
) = t
;
1907 tree data_name
= get_identifier (".omp_data_i");
1908 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1910 DECL_ARTIFICIAL (t
) = 1;
1911 DECL_NAMELESS (t
) = 1;
1912 DECL_ARG_TYPE (t
) = ptr_type_node
;
1913 DECL_CONTEXT (t
) = current_function_decl
;
1915 TREE_READONLY (t
) = 1;
1916 DECL_ARGUMENTS (decl
) = t
;
1918 ctx
->receiver_decl
= t
;
1921 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1922 PARM_DECL
, get_identifier (".omp_data_o"),
1924 DECL_ARTIFICIAL (t
) = 1;
1925 DECL_NAMELESS (t
) = 1;
1926 DECL_ARG_TYPE (t
) = ptr_type_node
;
1927 DECL_CONTEXT (t
) = current_function_decl
;
1929 TREE_ADDRESSABLE (t
) = 1;
1930 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1931 DECL_ARGUMENTS (decl
) = t
;
1934 /* Allocate memory for the function structure. The call to
1935 allocate_struct_function clobbers CFUN, so we need to restore
1937 push_struct_function (decl
);
1938 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1939 init_tree_ssa (cfun
);
1943 /* Callback for walk_gimple_seq. Check if combined parallel
1944 contains gimple_omp_for_combined_into_p OMP_FOR. */
1947 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1948 bool *handled_ops_p
,
1949 struct walk_stmt_info
*wi
)
1951 gimple
*stmt
= gsi_stmt (*gsi_p
);
1953 *handled_ops_p
= true;
1954 switch (gimple_code (stmt
))
1958 case GIMPLE_OMP_FOR
:
1959 if (gimple_omp_for_combined_into_p (stmt
)
1960 && gimple_omp_for_kind (stmt
)
1961 == *(const enum gf_mask
*) (wi
->info
))
1964 return integer_zero_node
;
1973 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1976 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1977 omp_context
*outer_ctx
)
1979 struct walk_stmt_info wi
;
1981 memset (&wi
, 0, sizeof (wi
));
1983 wi
.info
= (void *) &msk
;
1984 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1985 if (wi
.info
!= (void *) &msk
)
1987 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1988 struct omp_for_data fd
;
1989 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1990 /* We need two temporaries with fd.loop.v type (istart/iend)
1991 and then (fd.collapse - 1) temporaries with the same
1992 type for count2 ... countN-1 vars if not constant. */
1993 size_t count
= 2, i
;
1994 tree type
= fd
.iter_type
;
1996 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1998 count
+= fd
.collapse
- 1;
1999 /* If there are lastprivate clauses on the inner
2000 GIMPLE_OMP_FOR, add one more temporaries for the total number
2001 of iterations (product of count1 ... countN-1). */
2002 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2003 OMP_CLAUSE_LASTPRIVATE
)
2004 || (msk
== GF_OMP_FOR_KIND_FOR
2005 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2006 OMP_CLAUSE_LASTPRIVATE
)))
2008 tree temp
= create_tmp_var (type
);
2009 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2010 OMP_CLAUSE__LOOPTEMP_
);
2011 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2012 OMP_CLAUSE_DECL (c
) = temp
;
2013 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2014 gimple_omp_taskreg_set_clauses (stmt
, c
);
2017 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2018 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2019 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2021 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2022 tree type2
= TREE_TYPE (v
);
2024 for (i
= 0; i
< 3; i
++)
2026 tree temp
= create_tmp_var (type2
);
2027 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2028 OMP_CLAUSE__LOOPTEMP_
);
2029 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2030 OMP_CLAUSE_DECL (c
) = temp
;
2031 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2032 gimple_omp_taskreg_set_clauses (stmt
, c
);
2036 for (i
= 0; i
< count
; i
++)
2038 tree temp
= create_tmp_var (type
);
2039 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2040 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2041 OMP_CLAUSE_DECL (c
) = temp
;
2042 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2043 gimple_omp_taskreg_set_clauses (stmt
, c
);
2046 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2047 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2048 OMP_CLAUSE_REDUCTION
))
2050 tree type
= build_pointer_type (pointer_sized_int_node
);
2051 tree temp
= create_tmp_var (type
);
2052 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2053 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2054 OMP_CLAUSE_DECL (c
) = temp
;
2055 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2056 gimple_omp_task_set_clauses (stmt
, c
);
2060 /* Scan an OpenMP parallel directive. */
2063 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2067 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2069 /* Ignore parallel directives with empty bodies, unless there
2070 are copyin clauses. */
2072 && empty_body_p (gimple_omp_body (stmt
))
2073 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2074 OMP_CLAUSE_COPYIN
) == NULL
)
2076 gsi_replace (gsi
, gimple_build_nop (), false);
2080 if (gimple_omp_parallel_combined_p (stmt
))
2081 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2082 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2083 OMP_CLAUSE_REDUCTION
);
2084 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2085 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2087 tree type
= build_pointer_type (pointer_sized_int_node
);
2088 tree temp
= create_tmp_var (type
);
2089 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2091 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2092 OMP_CLAUSE_DECL (c
) = temp
;
2093 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2094 gimple_omp_parallel_set_clauses (stmt
, c
);
2097 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2100 ctx
= new_omp_context (stmt
, outer_ctx
);
2101 taskreg_contexts
.safe_push (ctx
);
2102 if (taskreg_nesting_level
> 1)
2103 ctx
->is_nested
= true;
2104 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2105 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2106 name
= create_tmp_var_name (".omp_data_s");
2107 name
= build_decl (gimple_location (stmt
),
2108 TYPE_DECL
, name
, ctx
->record_type
);
2109 DECL_ARTIFICIAL (name
) = 1;
2110 DECL_NAMELESS (name
) = 1;
2111 TYPE_NAME (ctx
->record_type
) = name
;
2112 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2113 create_omp_child_function (ctx
, false);
2114 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2116 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2117 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2119 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2120 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2123 /* Scan an OpenMP task directive. */
2126 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2130 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2132 /* Ignore task directives with empty bodies, unless they have depend
2135 && gimple_omp_body (stmt
)
2136 && empty_body_p (gimple_omp_body (stmt
))
2137 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2139 gsi_replace (gsi
, gimple_build_nop (), false);
2143 if (gimple_omp_task_taskloop_p (stmt
))
2144 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2146 ctx
= new_omp_context (stmt
, outer_ctx
);
2148 if (gimple_omp_task_taskwait_p (stmt
))
2150 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2154 taskreg_contexts
.safe_push (ctx
);
2155 if (taskreg_nesting_level
> 1)
2156 ctx
->is_nested
= true;
2157 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2158 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2159 name
= create_tmp_var_name (".omp_data_s");
2160 name
= build_decl (gimple_location (stmt
),
2161 TYPE_DECL
, name
, ctx
->record_type
);
2162 DECL_ARTIFICIAL (name
) = 1;
2163 DECL_NAMELESS (name
) = 1;
2164 TYPE_NAME (ctx
->record_type
) = name
;
2165 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2166 create_omp_child_function (ctx
, false);
2167 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2169 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2171 if (ctx
->srecord_type
)
2173 name
= create_tmp_var_name (".omp_data_a");
2174 name
= build_decl (gimple_location (stmt
),
2175 TYPE_DECL
, name
, ctx
->srecord_type
);
2176 DECL_ARTIFICIAL (name
) = 1;
2177 DECL_NAMELESS (name
) = 1;
2178 TYPE_NAME (ctx
->srecord_type
) = name
;
2179 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2180 create_omp_child_function (ctx
, true);
2183 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2185 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2187 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2188 t
= build_int_cst (long_integer_type_node
, 0);
2189 gimple_omp_task_set_arg_size (stmt
, t
);
2190 t
= build_int_cst (long_integer_type_node
, 1);
2191 gimple_omp_task_set_arg_align (stmt
, t
);
2195 /* Helper function for finish_taskreg_scan, called through walk_tree.
2196 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2197 tree, replace it in the expression. */
2200 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2204 omp_context
*ctx
= (omp_context
*) data
;
2205 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2208 if (DECL_HAS_VALUE_EXPR_P (t
))
2209 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2214 else if (IS_TYPE_OR_DECL_P (*tp
))
2219 /* If any decls have been made addressable during scan_omp,
2220 adjust their fields if needed, and layout record types
2221 of parallel/task constructs. */
2224 finish_taskreg_scan (omp_context
*ctx
)
2226 if (ctx
->record_type
== NULL_TREE
)
2229 /* If any task_shared_vars were needed, verify all
2230 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2231 statements if use_pointer_for_field hasn't changed
2232 because of that. If it did, update field types now. */
2233 if (task_shared_vars
)
2237 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2238 c
; c
= OMP_CLAUSE_CHAIN (c
))
2239 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2240 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2242 tree decl
= OMP_CLAUSE_DECL (c
);
2244 /* Global variables don't need to be copied,
2245 the receiver side will use them directly. */
2246 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2248 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2249 || !use_pointer_for_field (decl
, ctx
))
2251 tree field
= lookup_field (decl
, ctx
);
2252 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2253 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2255 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2256 TREE_THIS_VOLATILE (field
) = 0;
2257 DECL_USER_ALIGN (field
) = 0;
2258 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2259 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2260 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2261 if (ctx
->srecord_type
)
2263 tree sfield
= lookup_sfield (decl
, ctx
);
2264 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2265 TREE_THIS_VOLATILE (sfield
) = 0;
2266 DECL_USER_ALIGN (sfield
) = 0;
2267 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2268 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2269 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2274 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2276 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2277 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2280 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2281 expects to find it at the start of data. */
2282 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2283 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2287 *p
= DECL_CHAIN (*p
);
2291 p
= &DECL_CHAIN (*p
);
2292 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2293 TYPE_FIELDS (ctx
->record_type
) = f
;
2295 layout_type (ctx
->record_type
);
2296 fixup_child_record_type (ctx
);
2298 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2300 layout_type (ctx
->record_type
);
2301 fixup_child_record_type (ctx
);
2305 location_t loc
= gimple_location (ctx
->stmt
);
2306 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2307 /* Move VLA fields to the end. */
2308 p
= &TYPE_FIELDS (ctx
->record_type
);
2310 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2311 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2314 *p
= TREE_CHAIN (*p
);
2315 TREE_CHAIN (*q
) = NULL_TREE
;
2316 q
= &TREE_CHAIN (*q
);
2319 p
= &DECL_CHAIN (*p
);
2321 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2323 /* Move fields corresponding to first and second _looptemp_
2324 clause first. There are filled by GOMP_taskloop
2325 and thus need to be in specific positions. */
2326 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2327 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2328 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2329 OMP_CLAUSE__LOOPTEMP_
);
2330 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2331 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2332 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2333 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2334 p
= &TYPE_FIELDS (ctx
->record_type
);
2336 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2337 *p
= DECL_CHAIN (*p
);
2339 p
= &DECL_CHAIN (*p
);
2340 DECL_CHAIN (f1
) = f2
;
2343 DECL_CHAIN (f2
) = f3
;
2344 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2347 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2348 TYPE_FIELDS (ctx
->record_type
) = f1
;
2349 if (ctx
->srecord_type
)
2351 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2352 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2354 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2355 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2357 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2358 *p
= DECL_CHAIN (*p
);
2360 p
= &DECL_CHAIN (*p
);
2361 DECL_CHAIN (f1
) = f2
;
2362 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2365 DECL_CHAIN (f2
) = f3
;
2366 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2369 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2370 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2373 layout_type (ctx
->record_type
);
2374 fixup_child_record_type (ctx
);
2375 if (ctx
->srecord_type
)
2376 layout_type (ctx
->srecord_type
);
2377 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2378 TYPE_SIZE_UNIT (ctx
->record_type
));
2379 if (TREE_CODE (t
) != INTEGER_CST
)
2381 t
= unshare_expr (t
);
2382 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2384 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2385 t
= build_int_cst (long_integer_type_node
,
2386 TYPE_ALIGN_UNIT (ctx
->record_type
));
2387 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2391 /* Find the enclosing offload context. */
2393 static omp_context
*
2394 enclosing_target_ctx (omp_context
*ctx
)
2396 for (; ctx
; ctx
= ctx
->outer
)
2397 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2403 /* Return true if ctx is part of an oacc kernels region. */
2406 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2408 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2410 gimple
*stmt
= ctx
->stmt
;
2411 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2412 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2419 /* Check the parallelism clauses inside a kernels regions.
2420 Until kernels handling moves to use the same loop indirection
2421 scheme as parallel, we need to do this checking early. */
2424 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2426 bool checking
= true;
2427 unsigned outer_mask
= 0;
2428 unsigned this_mask
= 0;
2429 bool has_seq
= false, has_auto
= false;
2432 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2436 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2438 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2441 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2443 switch (OMP_CLAUSE_CODE (c
))
2445 case OMP_CLAUSE_GANG
:
2446 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2448 case OMP_CLAUSE_WORKER
:
2449 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2451 case OMP_CLAUSE_VECTOR
:
2452 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2454 case OMP_CLAUSE_SEQ
:
2457 case OMP_CLAUSE_AUTO
:
2467 if (has_seq
&& (this_mask
|| has_auto
))
2468 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2469 " OpenACC loop specifiers");
2470 else if (has_auto
&& this_mask
)
2471 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2472 " OpenACC loop specifiers");
2474 if (this_mask
& outer_mask
)
2475 error_at (gimple_location (stmt
), "inner loop uses same"
2476 " OpenACC parallelism as containing loop");
2479 return outer_mask
| this_mask
;
2482 /* Scan a GIMPLE_OMP_FOR. */
2484 static omp_context
*
2485 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2489 tree clauses
= gimple_omp_for_clauses (stmt
);
2491 ctx
= new_omp_context (stmt
, outer_ctx
);
2493 if (is_gimple_omp_oacc (stmt
))
2495 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2497 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
2498 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2501 switch (OMP_CLAUSE_CODE (c
))
2503 case OMP_CLAUSE_GANG
:
2504 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2507 case OMP_CLAUSE_WORKER
:
2508 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2511 case OMP_CLAUSE_VECTOR
:
2512 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2521 error_at (OMP_CLAUSE_LOCATION (c
),
2522 "argument not permitted on %qs clause",
2523 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2525 inform (gimple_location (tgt
->stmt
),
2526 "enclosing parent compute construct");
2527 else if (oacc_get_fn_attrib (current_function_decl
))
2528 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2529 "enclosing routine");
2535 if (tgt
&& is_oacc_kernels (tgt
))
2536 check_oacc_kernel_gwv (stmt
, ctx
);
2538 /* Collect all variables named in reductions on this loop. Ensure
2539 that, if this loop has a reduction on some variable v, and there is
2540 a reduction on v somewhere in an outer context, then there is a
2541 reduction on v on all intervening loops as well. */
2542 tree local_reduction_clauses
= NULL
;
2543 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2546 local_reduction_clauses
2547 = tree_cons (NULL
, c
, local_reduction_clauses
);
2549 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2550 ctx
->outer_reduction_clauses
2551 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2552 ctx
->outer
->outer_reduction_clauses
);
2553 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2554 tree local_iter
= local_reduction_clauses
;
2555 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2557 tree local_clause
= TREE_VALUE (local_iter
);
2558 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2559 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2560 bool have_outer_reduction
= false;
2561 tree ctx_iter
= outer_reduction_clauses
;
2562 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2564 tree outer_clause
= TREE_VALUE (ctx_iter
);
2565 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2566 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2567 if (outer_var
== local_var
&& outer_op
!= local_op
)
2569 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2570 "conflicting reduction operations for %qE",
2572 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2573 "location of the previous reduction for %qE",
2576 if (outer_var
== local_var
)
2578 have_outer_reduction
= true;
2582 if (have_outer_reduction
)
2584 /* There is a reduction on outer_var both on this loop and on
2585 some enclosing loop. Walk up the context tree until such a
2586 loop with a reduction on outer_var is found, and complain
2587 about all intervening loops that do not have such a
2589 struct omp_context
*curr_loop
= ctx
->outer
;
2591 while (curr_loop
!= NULL
)
2593 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2594 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2596 tree curr_clause
= TREE_VALUE (curr_iter
);
2597 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2598 if (curr_var
== local_var
)
2605 warning_at (gimple_location (curr_loop
->stmt
), 0,
2606 "nested loop in reduction needs "
2607 "reduction clause for %qE",
2611 curr_loop
= curr_loop
->outer
;
2615 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2616 ctx
->outer_reduction_clauses
2617 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2618 ctx
->outer_reduction_clauses
);
2620 if (tgt
&& is_oacc_kernels (tgt
))
2622 /* Strip out reductions, as they are not handled yet. */
2623 tree
*prev_ptr
= &clauses
;
2625 while (tree probe
= *prev_ptr
)
2627 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2629 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2630 *prev_ptr
= *next_ptr
;
2632 prev_ptr
= next_ptr
;
2635 gimple_omp_for_set_clauses (stmt
, clauses
);
2639 scan_sharing_clauses (clauses
, ctx
);
2641 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2642 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2644 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2645 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2646 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2647 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2649 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2653 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2656 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2657 omp_context
*outer_ctx
)
2659 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2660 gsi_replace (gsi
, bind
, false);
2661 gimple_seq seq
= NULL
;
2662 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2663 tree cond
= create_tmp_var_raw (integer_type_node
);
2664 DECL_CONTEXT (cond
) = current_function_decl
;
2665 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2666 gimple_bind_set_vars (bind
, cond
);
2667 gimple_call_set_lhs (g
, cond
);
2668 gimple_seq_add_stmt (&seq
, g
);
2669 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2670 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2671 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2672 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2673 gimple_seq_add_stmt (&seq
, g
);
2674 g
= gimple_build_label (lab1
);
2675 gimple_seq_add_stmt (&seq
, g
);
2676 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2677 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2678 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2679 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2680 gimple_omp_for_set_clauses (new_stmt
, clause
);
2681 gimple_seq_add_stmt (&seq
, new_stmt
);
2682 g
= gimple_build_goto (lab3
);
2683 gimple_seq_add_stmt (&seq
, g
);
2684 g
= gimple_build_label (lab2
);
2685 gimple_seq_add_stmt (&seq
, g
);
2686 gimple_seq_add_stmt (&seq
, stmt
);
2687 g
= gimple_build_label (lab3
);
2688 gimple_seq_add_stmt (&seq
, g
);
2689 gimple_bind_set_body (bind
, seq
);
2691 scan_omp_for (new_stmt
, outer_ctx
);
2692 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2695 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2696 struct walk_stmt_info
*);
2697 static omp_context
*maybe_lookup_ctx (gimple
*);
2699 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2700 for scan phase loop. */
2703 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2704 omp_context
*outer_ctx
)
2706 /* The only change between inclusive and exclusive scan will be
2707 within the first simd loop, so just use inclusive in the
2708 worksharing loop. */
2709 outer_ctx
->scan_inclusive
= true;
2710 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2711 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2713 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2714 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2715 gsi_replace (gsi
, input_stmt
, false);
2716 gimple_seq input_body
= NULL
;
2717 gimple_seq_add_stmt (&input_body
, stmt
);
2718 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2720 gimple_stmt_iterator input1_gsi
= gsi_none ();
2721 struct walk_stmt_info wi
;
2722 memset (&wi
, 0, sizeof (wi
));
2724 wi
.info
= (void *) &input1_gsi
;
2725 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2726 gcc_assert (!gsi_end_p (input1_gsi
));
2728 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2729 gsi_next (&input1_gsi
);
2730 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2731 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2732 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2733 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2734 std::swap (input_stmt1
, scan_stmt1
);
2736 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2737 gimple_omp_set_body (input_stmt1
, NULL
);
2739 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2740 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2742 gimple_omp_set_body (input_stmt1
, input_body1
);
2743 gimple_omp_set_body (scan_stmt1
, NULL
);
2745 gimple_stmt_iterator input2_gsi
= gsi_none ();
2746 memset (&wi
, 0, sizeof (wi
));
2748 wi
.info
= (void *) &input2_gsi
;
2749 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2751 gcc_assert (!gsi_end_p (input2_gsi
));
2753 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2754 gsi_next (&input2_gsi
);
2755 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2756 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2757 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2758 std::swap (input_stmt2
, scan_stmt2
);
2760 gimple_omp_set_body (input_stmt2
, NULL
);
2762 gimple_omp_set_body (input_stmt
, input_body
);
2763 gimple_omp_set_body (scan_stmt
, scan_body
);
2765 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2766 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2768 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2769 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2771 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2774 /* Scan an OpenMP sections directive. */
2777 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2781 ctx
= new_omp_context (stmt
, outer_ctx
);
2782 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2783 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2786 /* Scan an OpenMP single directive. */
2789 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2794 ctx
= new_omp_context (stmt
, outer_ctx
);
2795 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2796 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2797 name
= create_tmp_var_name (".omp_copy_s");
2798 name
= build_decl (gimple_location (stmt
),
2799 TYPE_DECL
, name
, ctx
->record_type
);
2800 TYPE_NAME (ctx
->record_type
) = name
;
2802 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2803 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2805 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2806 ctx
->record_type
= NULL
;
2808 layout_type (ctx
->record_type
);
2811 /* Scan a GIMPLE_OMP_TARGET. */
2814 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2818 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2819 tree clauses
= gimple_omp_target_clauses (stmt
);
2821 ctx
= new_omp_context (stmt
, outer_ctx
);
2822 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2823 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2824 name
= create_tmp_var_name (".omp_data_t");
2825 name
= build_decl (gimple_location (stmt
),
2826 TYPE_DECL
, name
, ctx
->record_type
);
2827 DECL_ARTIFICIAL (name
) = 1;
2828 DECL_NAMELESS (name
) = 1;
2829 TYPE_NAME (ctx
->record_type
) = name
;
2830 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2834 create_omp_child_function (ctx
, false);
2835 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2838 scan_sharing_clauses (clauses
, ctx
);
2839 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2841 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2842 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2845 TYPE_FIELDS (ctx
->record_type
)
2846 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2849 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2850 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2852 field
= DECL_CHAIN (field
))
2853 gcc_assert (DECL_ALIGN (field
) == align
);
2855 layout_type (ctx
->record_type
);
2857 fixup_child_record_type (ctx
);
2861 /* Scan an OpenMP teams directive. */
2864 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2866 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2868 if (!gimple_omp_teams_host (stmt
))
2870 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2871 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2874 taskreg_contexts
.safe_push (ctx
);
2875 gcc_assert (taskreg_nesting_level
== 1);
2876 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2877 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2878 tree name
= create_tmp_var_name (".omp_data_s");
2879 name
= build_decl (gimple_location (stmt
),
2880 TYPE_DECL
, name
, ctx
->record_type
);
2881 DECL_ARTIFICIAL (name
) = 1;
2882 DECL_NAMELESS (name
) = 1;
2883 TYPE_NAME (ctx
->record_type
) = name
;
2884 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2885 create_omp_child_function (ctx
, false);
2886 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2888 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2889 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2891 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2892 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2895 /* Check nesting restrictions. */
2897 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2901 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2902 inside an OpenACC CTX. */
2903 if (!(is_gimple_omp (stmt
)
2904 && is_gimple_omp_oacc (stmt
))
2905 /* Except for atomic codes that we share with OpenMP. */
2906 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2907 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2909 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2911 error_at (gimple_location (stmt
),
2912 "non-OpenACC construct inside of OpenACC routine");
2916 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2917 if (is_gimple_omp (octx
->stmt
)
2918 && is_gimple_omp_oacc (octx
->stmt
))
2920 error_at (gimple_location (stmt
),
2921 "non-OpenACC construct inside of OpenACC region");
2928 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2930 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2932 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2933 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
2937 if (ctx
->order_concurrent
2938 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
2939 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2940 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2942 error_at (gimple_location (stmt
),
2943 "OpenMP constructs other than %<parallel%>, %<loop%>"
2944 " or %<simd%> may not be nested inside a region with"
2945 " the %<order(concurrent)%> clause");
2948 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2950 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2951 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2953 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2954 && (ctx
->outer
== NULL
2955 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2956 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2957 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2958 != GF_OMP_FOR_KIND_FOR
)
2959 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2961 error_at (gimple_location (stmt
),
2962 "%<ordered simd threads%> must be closely "
2963 "nested inside of %<%s simd%> region",
2964 lang_GNU_Fortran () ? "do" : "for");
2970 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2971 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2972 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2974 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
2975 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
2977 error_at (gimple_location (stmt
),
2978 "OpenMP constructs other than "
2979 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2980 "not be nested inside %<simd%> region");
2983 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2985 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2986 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
2987 && omp_find_clause (gimple_omp_for_clauses (stmt
),
2988 OMP_CLAUSE_BIND
) == NULL_TREE
))
2989 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2991 error_at (gimple_location (stmt
),
2992 "only %<distribute%>, %<parallel%> or %<loop%> "
2993 "regions are allowed to be strictly nested inside "
2994 "%<teams%> region");
2998 else if (ctx
->order_concurrent
2999 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3000 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3001 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3002 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3005 error_at (gimple_location (stmt
),
3006 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3007 "%<simd%> may not be nested inside a %<loop%> region");
3009 error_at (gimple_location (stmt
),
3010 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3011 "%<simd%> may not be nested inside a region with "
3012 "the %<order(concurrent)%> clause");
3016 switch (gimple_code (stmt
))
3018 case GIMPLE_OMP_FOR
:
3019 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3021 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3023 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3025 error_at (gimple_location (stmt
),
3026 "%<distribute%> region must be strictly nested "
3027 "inside %<teams%> construct");
3032 /* We split taskloop into task and nested taskloop in it. */
3033 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3035 /* For now, hope this will change and loop bind(parallel) will not
3036 be allowed in lots of contexts. */
3037 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3038 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3040 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3045 switch (gimple_code (ctx
->stmt
))
3047 case GIMPLE_OMP_FOR
:
3048 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3049 == GF_OMP_FOR_KIND_OACC_LOOP
);
3052 case GIMPLE_OMP_TARGET
:
3053 switch (gimple_omp_target_kind (ctx
->stmt
))
3055 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3056 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3057 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3068 else if (oacc_get_fn_attrib (current_function_decl
))
3072 error_at (gimple_location (stmt
),
3073 "OpenACC loop directive must be associated with"
3074 " an OpenACC compute region");
3080 if (is_gimple_call (stmt
)
3081 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3082 == BUILT_IN_GOMP_CANCEL
3083 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3084 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3086 const char *bad
= NULL
;
3087 const char *kind
= NULL
;
3088 const char *construct
3089 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3090 == BUILT_IN_GOMP_CANCEL
)
3092 : "cancellation point";
3095 error_at (gimple_location (stmt
), "orphaned %qs construct",
3099 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3100 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3104 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3106 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3107 == BUILT_IN_GOMP_CANCEL
3108 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3109 ctx
->cancellable
= true;
3113 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3114 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3116 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3117 == BUILT_IN_GOMP_CANCEL
3118 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3120 ctx
->cancellable
= true;
3121 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3123 warning_at (gimple_location (stmt
), 0,
3124 "%<cancel for%> inside "
3125 "%<nowait%> for construct");
3126 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3127 OMP_CLAUSE_ORDERED
))
3128 warning_at (gimple_location (stmt
), 0,
3129 "%<cancel for%> inside "
3130 "%<ordered%> for construct");
3135 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3136 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3138 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3139 == BUILT_IN_GOMP_CANCEL
3140 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3142 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3144 ctx
->cancellable
= true;
3145 if (omp_find_clause (gimple_omp_sections_clauses
3148 warning_at (gimple_location (stmt
), 0,
3149 "%<cancel sections%> inside "
3150 "%<nowait%> sections construct");
3154 gcc_assert (ctx
->outer
3155 && gimple_code (ctx
->outer
->stmt
)
3156 == GIMPLE_OMP_SECTIONS
);
3157 ctx
->outer
->cancellable
= true;
3158 if (omp_find_clause (gimple_omp_sections_clauses
3161 warning_at (gimple_location (stmt
), 0,
3162 "%<cancel sections%> inside "
3163 "%<nowait%> sections construct");
3169 if (!is_task_ctx (ctx
)
3170 && (!is_taskloop_ctx (ctx
)
3171 || ctx
->outer
== NULL
3172 || !is_task_ctx (ctx
->outer
)))
3176 for (omp_context
*octx
= ctx
->outer
;
3177 octx
; octx
= octx
->outer
)
3179 switch (gimple_code (octx
->stmt
))
3181 case GIMPLE_OMP_TASKGROUP
:
3183 case GIMPLE_OMP_TARGET
:
3184 if (gimple_omp_target_kind (octx
->stmt
)
3185 != GF_OMP_TARGET_KIND_REGION
)
3188 case GIMPLE_OMP_PARALLEL
:
3189 case GIMPLE_OMP_TEAMS
:
3190 error_at (gimple_location (stmt
),
3191 "%<%s taskgroup%> construct not closely "
3192 "nested inside of %<taskgroup%> region",
3195 case GIMPLE_OMP_TASK
:
3196 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3198 && is_taskloop_ctx (octx
->outer
))
3201 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3202 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3211 ctx
->cancellable
= true;
3216 error_at (gimple_location (stmt
), "invalid arguments");
3221 error_at (gimple_location (stmt
),
3222 "%<%s %s%> construct not closely nested inside of %qs",
3223 construct
, kind
, bad
);
3228 case GIMPLE_OMP_SECTIONS
:
3229 case GIMPLE_OMP_SINGLE
:
3230 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3231 switch (gimple_code (ctx
->stmt
))
3233 case GIMPLE_OMP_FOR
:
3234 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3235 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3238 case GIMPLE_OMP_SECTIONS
:
3239 case GIMPLE_OMP_SINGLE
:
3240 case GIMPLE_OMP_ORDERED
:
3241 case GIMPLE_OMP_MASTER
:
3242 case GIMPLE_OMP_TASK
:
3243 case GIMPLE_OMP_CRITICAL
:
3244 if (is_gimple_call (stmt
))
3246 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3247 != BUILT_IN_GOMP_BARRIER
)
3249 error_at (gimple_location (stmt
),
3250 "barrier region may not be closely nested inside "
3251 "of work-sharing, %<loop%>, %<critical%>, "
3252 "%<ordered%>, %<master%>, explicit %<task%> or "
3253 "%<taskloop%> region");
3256 error_at (gimple_location (stmt
),
3257 "work-sharing region may not be closely nested inside "
3258 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3259 "%<master%>, explicit %<task%> or %<taskloop%> region");
3261 case GIMPLE_OMP_PARALLEL
:
3262 case GIMPLE_OMP_TEAMS
:
3264 case GIMPLE_OMP_TARGET
:
3265 if (gimple_omp_target_kind (ctx
->stmt
)
3266 == GF_OMP_TARGET_KIND_REGION
)
3273 case GIMPLE_OMP_MASTER
:
3274 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3275 switch (gimple_code (ctx
->stmt
))
3277 case GIMPLE_OMP_FOR
:
3278 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3279 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3282 case GIMPLE_OMP_SECTIONS
:
3283 case GIMPLE_OMP_SINGLE
:
3284 case GIMPLE_OMP_TASK
:
3285 error_at (gimple_location (stmt
),
3286 "%<master%> region may not be closely nested inside "
3287 "of work-sharing, %<loop%>, explicit %<task%> or "
3288 "%<taskloop%> region");
3290 case GIMPLE_OMP_PARALLEL
:
3291 case GIMPLE_OMP_TEAMS
:
3293 case GIMPLE_OMP_TARGET
:
3294 if (gimple_omp_target_kind (ctx
->stmt
)
3295 == GF_OMP_TARGET_KIND_REGION
)
3302 case GIMPLE_OMP_TASK
:
3303 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3304 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3305 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3306 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3308 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3309 error_at (OMP_CLAUSE_LOCATION (c
),
3310 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3311 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3315 case GIMPLE_OMP_ORDERED
:
3316 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3317 c
; c
= OMP_CLAUSE_CHAIN (c
))
3319 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3321 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3322 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3325 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3326 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3327 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3330 /* Look for containing ordered(N) loop. */
3332 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3334 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3335 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3337 error_at (OMP_CLAUSE_LOCATION (c
),
3338 "%<ordered%> construct with %<depend%> clause "
3339 "must be closely nested inside an %<ordered%> "
3343 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3345 error_at (OMP_CLAUSE_LOCATION (c
),
3346 "%<ordered%> construct with %<depend%> clause "
3347 "must be closely nested inside a loop with "
3348 "%<ordered%> clause with a parameter");
3354 error_at (OMP_CLAUSE_LOCATION (c
),
3355 "invalid depend kind in omp %<ordered%> %<depend%>");
3359 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3360 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3362 /* ordered simd must be closely nested inside of simd region,
3363 and simd region must not encounter constructs other than
3364 ordered simd, therefore ordered simd may be either orphaned,
3365 or ctx->stmt must be simd. The latter case is handled already
3369 error_at (gimple_location (stmt
),
3370 "%<ordered%> %<simd%> must be closely nested inside "
3375 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3376 switch (gimple_code (ctx
->stmt
))
3378 case GIMPLE_OMP_CRITICAL
:
3379 case GIMPLE_OMP_TASK
:
3380 case GIMPLE_OMP_ORDERED
:
3381 ordered_in_taskloop
:
3382 error_at (gimple_location (stmt
),
3383 "%<ordered%> region may not be closely nested inside "
3384 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3385 "%<taskloop%> region");
3387 case GIMPLE_OMP_FOR
:
3388 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3389 goto ordered_in_taskloop
;
3391 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3392 OMP_CLAUSE_ORDERED
);
3395 error_at (gimple_location (stmt
),
3396 "%<ordered%> region must be closely nested inside "
3397 "a loop region with an %<ordered%> clause");
3400 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3401 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3403 error_at (gimple_location (stmt
),
3404 "%<ordered%> region without %<depend%> clause may "
3405 "not be closely nested inside a loop region with "
3406 "an %<ordered%> clause with a parameter");
3410 case GIMPLE_OMP_TARGET
:
3411 if (gimple_omp_target_kind (ctx
->stmt
)
3412 != GF_OMP_TARGET_KIND_REGION
)
3415 case GIMPLE_OMP_PARALLEL
:
3416 case GIMPLE_OMP_TEAMS
:
3417 error_at (gimple_location (stmt
),
3418 "%<ordered%> region must be closely nested inside "
3419 "a loop region with an %<ordered%> clause");
3425 case GIMPLE_OMP_CRITICAL
:
3428 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3429 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3430 if (gomp_critical
*other_crit
3431 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3432 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3434 error_at (gimple_location (stmt
),
3435 "%<critical%> region may not be nested inside "
3436 "a %<critical%> region with the same name");
3441 case GIMPLE_OMP_TEAMS
:
3444 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3445 || (gimple_omp_target_kind (ctx
->stmt
)
3446 != GF_OMP_TARGET_KIND_REGION
))
3448 /* Teams construct can appear either strictly nested inside of
3449 target construct with no intervening stmts, or can be encountered
3450 only by initial task (so must not appear inside any OpenMP
3452 error_at (gimple_location (stmt
),
3453 "%<teams%> construct must be closely nested inside of "
3454 "%<target%> construct or not nested in any OpenMP "
3459 case GIMPLE_OMP_TARGET
:
3460 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3461 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3462 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3463 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3465 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3466 error_at (OMP_CLAUSE_LOCATION (c
),
3467 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3468 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3471 if (is_gimple_omp_offloaded (stmt
)
3472 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3474 error_at (gimple_location (stmt
),
3475 "OpenACC region inside of OpenACC routine, nested "
3476 "parallelism not supported yet");
3479 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3481 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3483 if (is_gimple_omp (stmt
)
3484 && is_gimple_omp_oacc (stmt
)
3485 && is_gimple_omp (ctx
->stmt
))
3487 error_at (gimple_location (stmt
),
3488 "OpenACC construct inside of non-OpenACC region");
3494 const char *stmt_name
, *ctx_stmt_name
;
3495 switch (gimple_omp_target_kind (stmt
))
3497 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3498 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3499 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3500 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3501 stmt_name
= "target enter data"; break;
3502 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3503 stmt_name
= "target exit data"; break;
3504 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3505 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3506 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3507 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3508 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3509 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3510 stmt_name
= "enter/exit data"; break;
3511 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3512 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3514 default: gcc_unreachable ();
3516 switch (gimple_omp_target_kind (ctx
->stmt
))
3518 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3519 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3520 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3521 ctx_stmt_name
= "parallel"; break;
3522 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3523 ctx_stmt_name
= "kernels"; break;
3524 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3525 ctx_stmt_name
= "serial"; break;
3526 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3527 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3528 ctx_stmt_name
= "host_data"; break;
3529 default: gcc_unreachable ();
3532 /* OpenACC/OpenMP mismatch? */
3533 if (is_gimple_omp_oacc (stmt
)
3534 != is_gimple_omp_oacc (ctx
->stmt
))
3536 error_at (gimple_location (stmt
),
3537 "%s %qs construct inside of %s %qs region",
3538 (is_gimple_omp_oacc (stmt
)
3539 ? "OpenACC" : "OpenMP"), stmt_name
,
3540 (is_gimple_omp_oacc (ctx
->stmt
)
3541 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3544 if (is_gimple_omp_offloaded (ctx
->stmt
))
3546 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3547 if (is_gimple_omp_oacc (ctx
->stmt
))
3549 error_at (gimple_location (stmt
),
3550 "%qs construct inside of %qs region",
3551 stmt_name
, ctx_stmt_name
);
3556 warning_at (gimple_location (stmt
), 0,
3557 "%qs construct inside of %qs region",
3558 stmt_name
, ctx_stmt_name
);
3570 /* Helper function scan_omp.
3572 Callback for walk_tree or operators in walk_gimple_stmt used to
3573 scan for OMP directives in TP. */
3576 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3578 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3579 omp_context
*ctx
= (omp_context
*) wi
->info
;
3582 switch (TREE_CODE (t
))
3590 tree repl
= remap_decl (t
, &ctx
->cb
);
3591 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3597 if (ctx
&& TYPE_P (t
))
3598 *tp
= remap_type (t
, &ctx
->cb
);
3599 else if (!DECL_P (t
))
3604 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3605 if (tem
!= TREE_TYPE (t
))
3607 if (TREE_CODE (t
) == INTEGER_CST
)
3608 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3610 TREE_TYPE (t
) = tem
;
3620 /* Return true if FNDECL is a setjmp or a longjmp. */
3623 setjmp_or_longjmp_p (const_tree fndecl
)
3625 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3626 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3629 tree declname
= DECL_NAME (fndecl
);
3631 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3632 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3633 || !TREE_PUBLIC (fndecl
))
3636 const char *name
= IDENTIFIER_POINTER (declname
);
3637 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3640 /* Return true if FNDECL is an omp_* runtime API call. */
3643 omp_runtime_api_call (const_tree fndecl
)
3645 tree declname
= DECL_NAME (fndecl
);
3647 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3648 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3649 || !TREE_PUBLIC (fndecl
))
3652 const char *name
= IDENTIFIER_POINTER (declname
);
3653 if (strncmp (name
, "omp_", 4) != 0)
3656 static const char *omp_runtime_apis
[] =
3658 /* This array has 3 sections. First omp_* calls that don't
3659 have any suffixes. */
3661 "target_associate_ptr",
3662 "target_disassociate_ptr",
3664 "target_is_present",
3666 "target_memcpy_rect",
3668 /* Now omp_* calls that are available as omp_* and omp_*_. */
3671 "destroy_nest_lock",
3674 "get_affinity_format",
3676 "get_default_device",
3678 "get_initial_device",
3680 "get_max_active_levels",
3681 "get_max_task_priority",
3689 "get_partition_num_places",
3701 "is_initial_device",
3703 "pause_resource_all",
3704 "set_affinity_format",
3712 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3713 "get_ancestor_thread_num",
3714 "get_partition_place_nums",
3715 "get_place_num_procs",
3716 "get_place_proc_ids",
3719 "set_default_device",
3721 "set_max_active_levels",
3728 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3730 if (omp_runtime_apis
[i
] == NULL
)
3735 size_t len
= strlen (omp_runtime_apis
[i
]);
3736 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3737 && (name
[4 + len
] == '\0'
3739 && name
[4 + len
] == '_'
3740 && (name
[4 + len
+ 1] == '\0'
3742 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3748 /* Helper function for scan_omp.
3750 Callback for walk_gimple_stmt used to scan for OMP directives in
3751 the current statement in GSI. */
3754 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3755 struct walk_stmt_info
*wi
)
3757 gimple
*stmt
= gsi_stmt (*gsi
);
3758 omp_context
*ctx
= (omp_context
*) wi
->info
;
3760 if (gimple_has_location (stmt
))
3761 input_location
= gimple_location (stmt
);
3763 /* Check the nesting restrictions. */
3764 bool remove
= false;
3765 if (is_gimple_omp (stmt
))
3766 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3767 else if (is_gimple_call (stmt
))
3769 tree fndecl
= gimple_call_fndecl (stmt
);
3773 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3774 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3775 && setjmp_or_longjmp_p (fndecl
)
3779 error_at (gimple_location (stmt
),
3780 "setjmp/longjmp inside %<simd%> construct");
3782 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3783 switch (DECL_FUNCTION_CODE (fndecl
))
3785 case BUILT_IN_GOMP_BARRIER
:
3786 case BUILT_IN_GOMP_CANCEL
:
3787 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3788 case BUILT_IN_GOMP_TASKYIELD
:
3789 case BUILT_IN_GOMP_TASKWAIT
:
3790 case BUILT_IN_GOMP_TASKGROUP_START
:
3791 case BUILT_IN_GOMP_TASKGROUP_END
:
3792 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3799 omp_context
*octx
= ctx
;
3800 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3802 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
3805 error_at (gimple_location (stmt
),
3806 "OpenMP runtime API call %qD in a region with "
3807 "%<order(concurrent)%> clause", fndecl
);
3814 stmt
= gimple_build_nop ();
3815 gsi_replace (gsi
, stmt
, false);
3818 *handled_ops_p
= true;
3820 switch (gimple_code (stmt
))
3822 case GIMPLE_OMP_PARALLEL
:
3823 taskreg_nesting_level
++;
3824 scan_omp_parallel (gsi
, ctx
);
3825 taskreg_nesting_level
--;
3828 case GIMPLE_OMP_TASK
:
3829 taskreg_nesting_level
++;
3830 scan_omp_task (gsi
, ctx
);
3831 taskreg_nesting_level
--;
3834 case GIMPLE_OMP_FOR
:
3835 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3836 == GF_OMP_FOR_KIND_SIMD
)
3837 && gimple_omp_for_combined_into_p (stmt
)
3838 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3840 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3841 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3842 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3844 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3848 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3849 == GF_OMP_FOR_KIND_SIMD
)
3850 && omp_maybe_offloaded_ctx (ctx
)
3851 && omp_max_simt_vf ()
3852 && gimple_omp_for_collapse (stmt
) == 1)
3853 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3855 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3858 case GIMPLE_OMP_SECTIONS
:
3859 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3862 case GIMPLE_OMP_SINGLE
:
3863 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3866 case GIMPLE_OMP_SCAN
:
3867 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3869 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3870 ctx
->scan_inclusive
= true;
3871 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3872 ctx
->scan_exclusive
= true;
3875 case GIMPLE_OMP_SECTION
:
3876 case GIMPLE_OMP_MASTER
:
3877 case GIMPLE_OMP_ORDERED
:
3878 case GIMPLE_OMP_CRITICAL
:
3879 ctx
= new_omp_context (stmt
, ctx
);
3880 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3883 case GIMPLE_OMP_TASKGROUP
:
3884 ctx
= new_omp_context (stmt
, ctx
);
3885 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3886 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3889 case GIMPLE_OMP_TARGET
:
3890 if (is_gimple_omp_offloaded (stmt
))
3892 taskreg_nesting_level
++;
3893 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3894 taskreg_nesting_level
--;
3897 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3900 case GIMPLE_OMP_TEAMS
:
3901 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3903 taskreg_nesting_level
++;
3904 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3905 taskreg_nesting_level
--;
3908 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3915 *handled_ops_p
= false;
3917 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3919 var
= DECL_CHAIN (var
))
3920 insert_decl_map (&ctx
->cb
, var
, var
);
3924 *handled_ops_p
= false;
3932 /* Scan all the statements starting at the current statement. CTX
3933 contains context information about the OMP directives and
3934 clauses found during the scan. */
3937 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3939 location_t saved_location
;
3940 struct walk_stmt_info wi
;
3942 memset (&wi
, 0, sizeof (wi
));
3944 wi
.want_locations
= true;
3946 saved_location
= input_location
;
3947 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3948 input_location
= saved_location
;
3951 /* Re-gimplification and code generation routines. */
3953 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3954 of BIND if in a method. */
3957 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3959 if (DECL_ARGUMENTS (current_function_decl
)
3960 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3961 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3964 tree vars
= gimple_bind_vars (bind
);
3965 for (tree
*pvar
= &vars
; *pvar
; )
3966 if (omp_member_access_dummy_var (*pvar
))
3967 *pvar
= DECL_CHAIN (*pvar
);
3969 pvar
= &DECL_CHAIN (*pvar
);
3970 gimple_bind_set_vars (bind
, vars
);
3974 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3975 block and its subblocks. */
3978 remove_member_access_dummy_vars (tree block
)
3980 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3981 if (omp_member_access_dummy_var (*pvar
))
3982 *pvar
= DECL_CHAIN (*pvar
);
3984 pvar
= &DECL_CHAIN (*pvar
);
3986 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3987 remove_member_access_dummy_vars (block
);
3990 /* If a context was created for STMT when it was scanned, return it. */
3992 static omp_context
*
3993 maybe_lookup_ctx (gimple
*stmt
)
3996 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3997 return n
? (omp_context
*) n
->value
: NULL
;
4001 /* Find the mapping for DECL in CTX or the immediately enclosing
4002 context that has a mapping for DECL.
4004 If CTX is a nested parallel directive, we may have to use the decl
4005 mappings created in CTX's parent context. Suppose that we have the
4006 following parallel nesting (variable UIDs showed for clarity):
4009 #omp parallel shared(iD.1562) -> outer parallel
4010 iD.1562 = iD.1562 + 1;
4012 #omp parallel shared (iD.1562) -> inner parallel
4013 iD.1562 = iD.1562 - 1;
4015 Each parallel structure will create a distinct .omp_data_s structure
4016 for copying iD.1562 in/out of the directive:
4018 outer parallel .omp_data_s.1.i -> iD.1562
4019 inner parallel .omp_data_s.2.i -> iD.1562
4021 A shared variable mapping will produce a copy-out operation before
4022 the parallel directive and a copy-in operation after it. So, in
4023 this case we would have:
4026 .omp_data_o.1.i = iD.1562;
4027 #omp parallel shared(iD.1562) -> outer parallel
4028 .omp_data_i.1 = &.omp_data_o.1
4029 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4031 .omp_data_o.2.i = iD.1562; -> **
4032 #omp parallel shared(iD.1562) -> inner parallel
4033 .omp_data_i.2 = &.omp_data_o.2
4034 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4037 ** This is a problem. The symbol iD.1562 cannot be referenced
4038 inside the body of the outer parallel region. But since we are
4039 emitting this copy operation while expanding the inner parallel
4040 directive, we need to access the CTX structure of the outer
4041 parallel directive to get the correct mapping:
4043 .omp_data_o.2.i = .omp_data_i.1->i
4045 Since there may be other workshare or parallel directives enclosing
4046 the parallel directive, it may be necessary to walk up the context
4047 parent chain. This is not a problem in general because nested
4048 parallelism happens only rarely. */
4051 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4056 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4057 t
= maybe_lookup_decl (decl
, up
);
4059 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4061 return t
? t
: decl
;
4065 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4066 in outer contexts. */
4069 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4074 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4075 t
= maybe_lookup_decl (decl
, up
);
4077 return t
? t
: decl
;
4081 /* Construct the initialization value for reduction operation OP. */
4084 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4093 case TRUTH_ORIF_EXPR
:
4094 case TRUTH_XOR_EXPR
:
4096 return build_zero_cst (type
);
4099 case TRUTH_AND_EXPR
:
4100 case TRUTH_ANDIF_EXPR
:
4102 return fold_convert_loc (loc
, type
, integer_one_node
);
4105 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4108 if (SCALAR_FLOAT_TYPE_P (type
))
4110 REAL_VALUE_TYPE max
, min
;
4111 if (HONOR_INFINITIES (type
))
4114 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4117 real_maxval (&min
, 1, TYPE_MODE (type
));
4118 return build_real (type
, min
);
4120 else if (POINTER_TYPE_P (type
))
4123 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4124 return wide_int_to_tree (type
, min
);
4128 gcc_assert (INTEGRAL_TYPE_P (type
));
4129 return TYPE_MIN_VALUE (type
);
4133 if (SCALAR_FLOAT_TYPE_P (type
))
4135 REAL_VALUE_TYPE max
;
4136 if (HONOR_INFINITIES (type
))
4139 real_maxval (&max
, 0, TYPE_MODE (type
));
4140 return build_real (type
, max
);
4142 else if (POINTER_TYPE_P (type
))
4145 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4146 return wide_int_to_tree (type
, max
);
4150 gcc_assert (INTEGRAL_TYPE_P (type
));
4151 return TYPE_MAX_VALUE (type
);
4159 /* Construct the initialization value for reduction CLAUSE. */
4162 omp_reduction_init (tree clause
, tree type
)
4164 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4165 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4168 /* Return alignment to be assumed for var in CLAUSE, which should be
4169 OMP_CLAUSE_ALIGNED. */
4172 omp_clause_aligned_alignment (tree clause
)
4174 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4175 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4177 /* Otherwise return implementation defined alignment. */
4178 unsigned int al
= 1;
4179 opt_scalar_mode mode_iter
;
4180 auto_vector_modes modes
;
4181 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4182 static enum mode_class classes
[]
4183 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4184 for (int i
= 0; i
< 4; i
+= 2)
4185 /* The for loop above dictates that we only walk through scalar classes. */
4186 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4188 scalar_mode mode
= mode_iter
.require ();
4189 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4190 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4192 machine_mode alt_vmode
;
4193 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4194 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4195 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4198 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4199 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4201 type
= build_vector_type_for_mode (type
, vmode
);
4202 if (TYPE_MODE (type
) != vmode
)
4204 if (TYPE_ALIGN_UNIT (type
) > al
)
4205 al
= TYPE_ALIGN_UNIT (type
);
4207 return build_int_cst (integer_type_node
, al
);
4211 /* This structure is part of the interface between lower_rec_simd_input_clauses
4212 and lower_rec_input_clauses. */
4214 class omplow_simd_context
{
4216 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4220 vec
<tree
, va_heap
> simt_eargs
;
4221 gimple_seq simt_dlist
;
4222 poly_uint64_pod max_vf
;
4226 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4230 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4231 omplow_simd_context
*sctx
, tree
&ivar
,
4232 tree
&lvar
, tree
*rvar
= NULL
,
4235 if (known_eq (sctx
->max_vf
, 0U))
4237 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4238 if (maybe_gt (sctx
->max_vf
, 1U))
4240 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4241 OMP_CLAUSE_SAFELEN
);
4244 poly_uint64 safe_len
;
4245 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4246 || maybe_lt (safe_len
, 1U))
4249 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4252 if (maybe_gt (sctx
->max_vf
, 1U))
4254 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4255 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4258 if (known_eq (sctx
->max_vf
, 1U))
4263 if (is_gimple_reg (new_var
))
4265 ivar
= lvar
= new_var
;
4268 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4269 ivar
= lvar
= create_tmp_var (type
);
4270 TREE_ADDRESSABLE (ivar
) = 1;
4271 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4272 NULL
, DECL_ATTRIBUTES (ivar
));
4273 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4274 tree clobber
= build_clobber (type
);
4275 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4276 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4280 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4281 tree avar
= create_tmp_var_raw (atype
);
4282 if (TREE_ADDRESSABLE (new_var
))
4283 TREE_ADDRESSABLE (avar
) = 1;
4284 DECL_ATTRIBUTES (avar
)
4285 = tree_cons (get_identifier ("omp simd array"), NULL
,
4286 DECL_ATTRIBUTES (avar
));
4287 gimple_add_tmp_var (avar
);
4289 if (rvar
&& !ctx
->for_simd_scan_phase
)
4291 /* For inscan reductions, create another array temporary,
4292 which will hold the reduced value. */
4293 iavar
= create_tmp_var_raw (atype
);
4294 if (TREE_ADDRESSABLE (new_var
))
4295 TREE_ADDRESSABLE (iavar
) = 1;
4296 DECL_ATTRIBUTES (iavar
)
4297 = tree_cons (get_identifier ("omp simd array"), NULL
,
4298 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4299 DECL_ATTRIBUTES (iavar
)));
4300 gimple_add_tmp_var (iavar
);
4301 ctx
->cb
.decl_map
->put (avar
, iavar
);
4302 if (sctx
->lastlane
== NULL_TREE
)
4303 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4304 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4305 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4306 TREE_THIS_NOTRAP (*rvar
) = 1;
4308 if (ctx
->scan_exclusive
)
4310 /* And for exclusive scan yet another one, which will
4311 hold the value during the scan phase. */
4312 tree savar
= create_tmp_var_raw (atype
);
4313 if (TREE_ADDRESSABLE (new_var
))
4314 TREE_ADDRESSABLE (savar
) = 1;
4315 DECL_ATTRIBUTES (savar
)
4316 = tree_cons (get_identifier ("omp simd array"), NULL
,
4317 tree_cons (get_identifier ("omp simd inscan "
4319 DECL_ATTRIBUTES (savar
)));
4320 gimple_add_tmp_var (savar
);
4321 ctx
->cb
.decl_map
->put (iavar
, savar
);
4322 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4323 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4324 TREE_THIS_NOTRAP (*rvar2
) = 1;
4327 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4328 NULL_TREE
, NULL_TREE
);
4329 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4330 NULL_TREE
, NULL_TREE
);
4331 TREE_THIS_NOTRAP (ivar
) = 1;
4332 TREE_THIS_NOTRAP (lvar
) = 1;
4334 if (DECL_P (new_var
))
4336 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4337 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4342 /* Helper function of lower_rec_input_clauses. For a reference
4343 in simd reduction, add an underlying variable it will reference. */
4346 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4348 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4349 if (TREE_CONSTANT (z
))
4351 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4352 get_name (new_vard
));
4353 gimple_add_tmp_var (z
);
4354 TREE_ADDRESSABLE (z
) = 1;
4355 z
= build_fold_addr_expr_loc (loc
, z
);
4356 gimplify_assign (new_vard
, z
, ilist
);
4360 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4361 code to emit (type) (tskred_temp[idx]). */
4364 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4367 unsigned HOST_WIDE_INT sz
4368 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4369 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4370 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4372 tree v
= create_tmp_var (pointer_sized_int_node
);
4373 gimple
*g
= gimple_build_assign (v
, r
);
4374 gimple_seq_add_stmt (ilist
, g
);
4375 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4377 v
= create_tmp_var (type
);
4378 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4379 gimple_seq_add_stmt (ilist
, g
);
4384 /* Lower early initialization of privatized variable NEW_VAR
4385 if it needs an allocator (has allocate clause). */
4388 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4389 tree
&allocate_ptr
, gimple_seq
*ilist
,
4390 omp_context
*ctx
, bool is_ref
, tree size
)
4394 gcc_assert (allocate_ptr
== NULL_TREE
);
4395 if (ctx
->allocate_map
&& DECL_P (new_var
))
4396 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4397 allocator
= *allocatorp
;
4398 if (allocator
== NULL_TREE
)
4400 if (!is_ref
&& omp_is_reference (var
))
4403 if (TREE_CODE (allocator
) != INTEGER_CST
)
4404 allocator
= build_outer_var_ref (allocator
, ctx
);
4405 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4406 if (TREE_CODE (allocator
) != INTEGER_CST
)
4408 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4409 gimplify_assign (var
, allocator
, ilist
);
4413 tree ptr_type
, align
, sz
;
4416 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4417 align
= build_int_cst (size_type_node
,
4418 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4423 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4424 align
= build_int_cst (size_type_node
, DECL_ALIGN_UNIT (new_var
));
4425 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4427 if (TREE_CODE (sz
) != INTEGER_CST
)
4429 tree szvar
= create_tmp_var (size_type_node
);
4430 gimplify_assign (szvar
, sz
, ilist
);
4433 allocate_ptr
= create_tmp_var (ptr_type
);
4434 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4435 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4436 gimple_call_set_lhs (g
, allocate_ptr
);
4437 gimple_seq_add_stmt (ilist
, g
);
4440 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (allocate_ptr
));
4441 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4446 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4447 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4448 private variables. Initialization statements go in ILIST, while calls
4449 to destructors go in DLIST. */
4452 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4453 omp_context
*ctx
, struct omp_for_data
*fd
)
4455 tree c
, copyin_seq
, x
, ptr
;
4456 bool copyin_by_ref
= false;
4457 bool lastprivate_firstprivate
= false;
4458 bool reduction_omp_orig_ref
= false;
4460 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4461 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4462 omplow_simd_context sctx
= omplow_simd_context ();
4463 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4464 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4465 gimple_seq llist
[4] = { };
4466 tree nonconst_simd_if
= NULL_TREE
;
4469 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4471 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4472 with data sharing clauses referencing variable sized vars. That
4473 is unnecessarily hard to support and very unlikely to result in
4474 vectorized code anyway. */
4476 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4477 switch (OMP_CLAUSE_CODE (c
))
4479 case OMP_CLAUSE_LINEAR
:
4480 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4483 case OMP_CLAUSE_PRIVATE
:
4484 case OMP_CLAUSE_FIRSTPRIVATE
:
4485 case OMP_CLAUSE_LASTPRIVATE
:
4486 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4488 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4490 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4491 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4495 case OMP_CLAUSE_REDUCTION
:
4496 case OMP_CLAUSE_IN_REDUCTION
:
4497 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4498 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4500 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4502 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4503 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4508 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4510 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4511 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4513 case OMP_CLAUSE_SIMDLEN
:
4514 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4517 case OMP_CLAUSE__CONDTEMP_
:
4518 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4526 /* Add a placeholder for simduid. */
4527 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4528 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4530 unsigned task_reduction_cnt
= 0;
4531 unsigned task_reduction_cntorig
= 0;
4532 unsigned task_reduction_cnt_full
= 0;
4533 unsigned task_reduction_cntorig_full
= 0;
4534 unsigned task_reduction_other_cnt
= 0;
4535 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4536 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4537 /* Do all the fixed sized types in the first pass, and the variable sized
4538 types in the second pass. This makes sure that the scalar arguments to
4539 the variable sized types are processed before we use them in the
4540 variable sized operations. For task reductions we use 4 passes, in the
4541 first two we ignore them, in the third one gather arguments for
4542 GOMP_task_reduction_remap call and in the last pass actually handle
4543 the task reductions. */
4544 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4547 if (pass
== 2 && task_reduction_cnt
)
4550 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4551 + task_reduction_cntorig
);
4552 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4553 gimple_add_tmp_var (tskred_avar
);
4554 TREE_ADDRESSABLE (tskred_avar
) = 1;
4555 task_reduction_cnt_full
= task_reduction_cnt
;
4556 task_reduction_cntorig_full
= task_reduction_cntorig
;
4558 else if (pass
== 3 && task_reduction_cnt
)
4560 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4562 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4563 size_int (task_reduction_cntorig
),
4564 build_fold_addr_expr (tskred_avar
));
4565 gimple_seq_add_stmt (ilist
, g
);
4567 if (pass
== 3 && task_reduction_other_cnt
)
4569 /* For reduction clauses, build
4570 tskred_base = (void *) tskred_temp[2]
4571 + omp_get_thread_num () * tskred_temp[1]
4572 or if tskred_temp[1] is known to be constant, that constant
4573 directly. This is the start of the private reduction copy block
4574 for the current thread. */
4575 tree v
= create_tmp_var (integer_type_node
);
4576 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4577 gimple
*g
= gimple_build_call (x
, 0);
4578 gimple_call_set_lhs (g
, v
);
4579 gimple_seq_add_stmt (ilist
, g
);
4580 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4581 tskred_temp
= OMP_CLAUSE_DECL (c
);
4582 if (is_taskreg_ctx (ctx
))
4583 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4584 tree v2
= create_tmp_var (sizetype
);
4585 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4586 gimple_seq_add_stmt (ilist
, g
);
4587 if (ctx
->task_reductions
[0])
4588 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4590 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4591 tree v3
= create_tmp_var (sizetype
);
4592 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4593 gimple_seq_add_stmt (ilist
, g
);
4594 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4595 tskred_base
= create_tmp_var (ptr_type_node
);
4596 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4597 gimple_seq_add_stmt (ilist
, g
);
4599 task_reduction_cnt
= 0;
4600 task_reduction_cntorig
= 0;
4601 task_reduction_other_cnt
= 0;
4602 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4604 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4607 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4608 bool task_reduction_p
= false;
4609 bool task_reduction_needs_orig_p
= false;
4610 tree cond
= NULL_TREE
;
4611 tree allocator
, allocate_ptr
;
4615 case OMP_CLAUSE_PRIVATE
:
4616 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4619 case OMP_CLAUSE_SHARED
:
4620 /* Ignore shared directives in teams construct inside
4621 of target construct. */
4622 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4623 && !is_host_teams_ctx (ctx
))
4625 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4627 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4628 || is_global_var (OMP_CLAUSE_DECL (c
)));
4631 case OMP_CLAUSE_FIRSTPRIVATE
:
4632 case OMP_CLAUSE_COPYIN
:
4634 case OMP_CLAUSE_LINEAR
:
4635 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4636 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4637 lastprivate_firstprivate
= true;
4639 case OMP_CLAUSE_REDUCTION
:
4640 case OMP_CLAUSE_IN_REDUCTION
:
4641 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4643 task_reduction_p
= true;
4644 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4646 task_reduction_other_cnt
++;
4651 task_reduction_cnt
++;
4652 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4654 var
= OMP_CLAUSE_DECL (c
);
4655 /* If var is a global variable that isn't privatized
4656 in outer contexts, we don't need to look up the
4657 original address, it is always the address of the
4658 global variable itself. */
4660 || omp_is_reference (var
)
4662 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4664 task_reduction_needs_orig_p
= true;
4665 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4666 task_reduction_cntorig
++;
4670 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4671 reduction_omp_orig_ref
= true;
4673 case OMP_CLAUSE__REDUCTEMP_
:
4674 if (!is_taskreg_ctx (ctx
))
4677 case OMP_CLAUSE__LOOPTEMP_
:
4678 /* Handle _looptemp_/_reductemp_ clauses only on
4683 case OMP_CLAUSE_LASTPRIVATE
:
4684 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4686 lastprivate_firstprivate
= true;
4687 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4690 /* Even without corresponding firstprivate, if
4691 decl is Fortran allocatable, it needs outer var
4694 && lang_hooks
.decls
.omp_private_outer_ref
4695 (OMP_CLAUSE_DECL (c
)))
4696 lastprivate_firstprivate
= true;
4698 case OMP_CLAUSE_ALIGNED
:
4701 var
= OMP_CLAUSE_DECL (c
);
4702 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4703 && !is_global_var (var
))
4705 new_var
= maybe_lookup_decl (var
, ctx
);
4706 if (new_var
== NULL_TREE
)
4707 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4708 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4709 tree alarg
= omp_clause_aligned_alignment (c
);
4710 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4711 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4712 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4713 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4714 gimplify_and_add (x
, ilist
);
4716 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4717 && is_global_var (var
))
4719 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4720 new_var
= lookup_decl (var
, ctx
);
4721 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4722 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4723 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4724 tree alarg
= omp_clause_aligned_alignment (c
);
4725 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4726 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4727 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4728 x
= create_tmp_var (ptype
);
4729 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4730 gimplify_and_add (t
, ilist
);
4731 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4732 SET_DECL_VALUE_EXPR (new_var
, t
);
4733 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4736 case OMP_CLAUSE__CONDTEMP_
:
4737 if (is_parallel_ctx (ctx
)
4738 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4745 if (task_reduction_p
!= (pass
>= 2))
4748 allocator
= NULL_TREE
;
4749 allocate_ptr
= NULL_TREE
;
4750 new_var
= var
= OMP_CLAUSE_DECL (c
);
4751 if ((c_kind
== OMP_CLAUSE_REDUCTION
4752 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4753 && TREE_CODE (var
) == MEM_REF
)
4755 var
= TREE_OPERAND (var
, 0);
4756 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4757 var
= TREE_OPERAND (var
, 0);
4758 if (TREE_CODE (var
) == INDIRECT_REF
4759 || TREE_CODE (var
) == ADDR_EXPR
)
4760 var
= TREE_OPERAND (var
, 0);
4761 if (is_variable_sized (var
))
4763 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4764 var
= DECL_VALUE_EXPR (var
);
4765 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4766 var
= TREE_OPERAND (var
, 0);
4767 gcc_assert (DECL_P (var
));
4771 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4772 new_var
= lookup_decl (var
, ctx
);
4774 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4779 /* C/C++ array section reductions. */
4780 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4781 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4782 && var
!= OMP_CLAUSE_DECL (c
))
4787 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4788 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4790 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4792 tree b
= TREE_OPERAND (orig_var
, 1);
4793 b
= maybe_lookup_decl (b
, ctx
);
4796 b
= TREE_OPERAND (orig_var
, 1);
4797 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4799 if (integer_zerop (bias
))
4803 bias
= fold_convert_loc (clause_loc
,
4804 TREE_TYPE (b
), bias
);
4805 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4806 TREE_TYPE (b
), b
, bias
);
4808 orig_var
= TREE_OPERAND (orig_var
, 0);
4812 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4813 if (is_global_var (out
)
4814 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4815 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4816 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4821 bool by_ref
= use_pointer_for_field (var
, NULL
);
4822 x
= build_receiver_ref (var
, by_ref
, ctx
);
4823 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4824 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4826 x
= build_fold_addr_expr (x
);
4828 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4829 x
= build_simple_mem_ref (x
);
4830 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4832 if (var
== TREE_OPERAND (orig_var
, 0))
4833 x
= build_fold_addr_expr (x
);
4835 bias
= fold_convert (sizetype
, bias
);
4836 x
= fold_convert (ptr_type_node
, x
);
4837 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4838 TREE_TYPE (x
), x
, bias
);
4839 unsigned cnt
= task_reduction_cnt
- 1;
4840 if (!task_reduction_needs_orig_p
)
4841 cnt
+= (task_reduction_cntorig_full
4842 - task_reduction_cntorig
);
4844 cnt
= task_reduction_cntorig
- 1;
4845 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4846 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4847 gimplify_assign (r
, x
, ilist
);
4851 if (TREE_CODE (orig_var
) == INDIRECT_REF
4852 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4853 orig_var
= TREE_OPERAND (orig_var
, 0);
4854 tree d
= OMP_CLAUSE_DECL (c
);
4855 tree type
= TREE_TYPE (d
);
4856 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4857 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4858 const char *name
= get_name (orig_var
);
4861 tree xv
= create_tmp_var (ptr_type_node
);
4862 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4864 unsigned cnt
= task_reduction_cnt
- 1;
4865 if (!task_reduction_needs_orig_p
)
4866 cnt
+= (task_reduction_cntorig_full
4867 - task_reduction_cntorig
);
4869 cnt
= task_reduction_cntorig
- 1;
4870 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4871 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4873 gimple
*g
= gimple_build_assign (xv
, x
);
4874 gimple_seq_add_stmt (ilist
, g
);
4878 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4880 if (ctx
->task_reductions
[1 + idx
])
4881 off
= fold_convert (sizetype
,
4882 ctx
->task_reductions
[1 + idx
]);
4884 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4886 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4888 gimple_seq_add_stmt (ilist
, g
);
4890 x
= fold_convert (build_pointer_type (boolean_type_node
),
4892 if (TREE_CONSTANT (v
))
4893 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4894 TYPE_SIZE_UNIT (type
));
4897 tree t
= maybe_lookup_decl (v
, ctx
);
4901 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4902 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4904 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4906 build_int_cst (TREE_TYPE (v
), 1));
4907 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4909 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4910 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4912 cond
= create_tmp_var (TREE_TYPE (x
));
4913 gimplify_assign (cond
, x
, ilist
);
4916 else if (TREE_CONSTANT (v
))
4918 x
= create_tmp_var_raw (type
, name
);
4919 gimple_add_tmp_var (x
);
4920 TREE_ADDRESSABLE (x
) = 1;
4921 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4926 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4927 tree t
= maybe_lookup_decl (v
, ctx
);
4931 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4932 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4933 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4935 build_int_cst (TREE_TYPE (v
), 1));
4936 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4938 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4939 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4940 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4943 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4944 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4945 tree y
= create_tmp_var (ptype
, name
);
4946 gimplify_assign (y
, x
, ilist
);
4950 if (!integer_zerop (bias
))
4952 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4954 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4956 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4957 pointer_sized_int_node
, yb
, bias
);
4958 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4959 yb
= create_tmp_var (ptype
, name
);
4960 gimplify_assign (yb
, x
, ilist
);
4964 d
= TREE_OPERAND (d
, 0);
4965 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4966 d
= TREE_OPERAND (d
, 0);
4967 if (TREE_CODE (d
) == ADDR_EXPR
)
4969 if (orig_var
!= var
)
4971 gcc_assert (is_variable_sized (orig_var
));
4972 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4974 gimplify_assign (new_var
, x
, ilist
);
4975 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4976 tree t
= build_fold_indirect_ref (new_var
);
4977 DECL_IGNORED_P (new_var
) = 0;
4978 TREE_THIS_NOTRAP (t
) = 1;
4979 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4980 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4984 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4985 build_int_cst (ptype
, 0));
4986 SET_DECL_VALUE_EXPR (new_var
, x
);
4987 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4992 gcc_assert (orig_var
== var
);
4993 if (TREE_CODE (d
) == INDIRECT_REF
)
4995 x
= create_tmp_var (ptype
, name
);
4996 TREE_ADDRESSABLE (x
) = 1;
4997 gimplify_assign (x
, yb
, ilist
);
4998 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5000 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5001 gimplify_assign (new_var
, x
, ilist
);
5003 /* GOMP_taskgroup_reduction_register memsets the whole
5004 array to zero. If the initializer is zero, we don't
5005 need to initialize it again, just mark it as ever
5006 used unconditionally, i.e. cond = true. */
5008 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5009 && initializer_zerop (omp_reduction_init (c
,
5012 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5014 gimple_seq_add_stmt (ilist
, g
);
5017 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5021 if (!is_parallel_ctx (ctx
))
5023 tree condv
= create_tmp_var (boolean_type_node
);
5024 g
= gimple_build_assign (condv
,
5025 build_simple_mem_ref (cond
));
5026 gimple_seq_add_stmt (ilist
, g
);
5027 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5028 g
= gimple_build_cond (NE_EXPR
, condv
,
5029 boolean_false_node
, end
, lab1
);
5030 gimple_seq_add_stmt (ilist
, g
);
5031 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5033 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5035 gimple_seq_add_stmt (ilist
, g
);
5038 tree y1
= create_tmp_var (ptype
);
5039 gimplify_assign (y1
, y
, ilist
);
5040 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5041 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5042 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5043 if (task_reduction_needs_orig_p
)
5045 y3
= create_tmp_var (ptype
);
5047 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5048 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5049 size_int (task_reduction_cnt_full
5050 + task_reduction_cntorig
- 1),
5051 NULL_TREE
, NULL_TREE
);
5054 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5055 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5058 gimplify_assign (y3
, ref
, ilist
);
5060 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5064 y2
= create_tmp_var (ptype
);
5065 gimplify_assign (y2
, y
, ilist
);
5067 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5069 tree ref
= build_outer_var_ref (var
, ctx
);
5070 /* For ref build_outer_var_ref already performs this. */
5071 if (TREE_CODE (d
) == INDIRECT_REF
)
5072 gcc_assert (omp_is_reference (var
));
5073 else if (TREE_CODE (d
) == ADDR_EXPR
)
5074 ref
= build_fold_addr_expr (ref
);
5075 else if (omp_is_reference (var
))
5076 ref
= build_fold_addr_expr (ref
);
5077 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5078 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5079 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5081 y3
= create_tmp_var (ptype
);
5082 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5086 y4
= create_tmp_var (ptype
);
5087 gimplify_assign (y4
, ref
, dlist
);
5091 tree i
= create_tmp_var (TREE_TYPE (v
));
5092 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5093 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5094 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5097 i2
= create_tmp_var (TREE_TYPE (v
));
5098 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5099 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5100 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5101 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5103 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5105 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5106 tree decl_placeholder
5107 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5108 SET_DECL_VALUE_EXPR (decl_placeholder
,
5109 build_simple_mem_ref (y1
));
5110 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5111 SET_DECL_VALUE_EXPR (placeholder
,
5112 y3
? build_simple_mem_ref (y3
)
5114 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5115 x
= lang_hooks
.decls
.omp_clause_default_ctor
5116 (c
, build_simple_mem_ref (y1
),
5117 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5119 gimplify_and_add (x
, ilist
);
5120 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5122 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5123 lower_omp (&tseq
, ctx
);
5124 gimple_seq_add_seq (ilist
, tseq
);
5126 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5129 SET_DECL_VALUE_EXPR (decl_placeholder
,
5130 build_simple_mem_ref (y2
));
5131 SET_DECL_VALUE_EXPR (placeholder
,
5132 build_simple_mem_ref (y4
));
5133 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5134 lower_omp (&tseq
, ctx
);
5135 gimple_seq_add_seq (dlist
, tseq
);
5136 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5138 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5139 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5142 x
= lang_hooks
.decls
.omp_clause_dtor
5143 (c
, build_simple_mem_ref (y2
));
5145 gimplify_and_add (x
, dlist
);
5150 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5151 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5153 /* reduction(-:var) sums up the partial results, so it
5154 acts identically to reduction(+:var). */
5155 if (code
== MINUS_EXPR
)
5158 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5161 x
= build2 (code
, TREE_TYPE (type
),
5162 build_simple_mem_ref (y4
),
5163 build_simple_mem_ref (y2
));
5164 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5168 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5169 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5170 gimple_seq_add_stmt (ilist
, g
);
5173 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5174 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5175 gimple_seq_add_stmt (ilist
, g
);
5177 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5178 build_int_cst (TREE_TYPE (i
), 1));
5179 gimple_seq_add_stmt (ilist
, g
);
5180 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5181 gimple_seq_add_stmt (ilist
, g
);
5182 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5185 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5186 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5187 gimple_seq_add_stmt (dlist
, g
);
5190 g
= gimple_build_assign
5191 (y4
, POINTER_PLUS_EXPR
, y4
,
5192 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5193 gimple_seq_add_stmt (dlist
, g
);
5195 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5196 build_int_cst (TREE_TYPE (i2
), 1));
5197 gimple_seq_add_stmt (dlist
, g
);
5198 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5199 gimple_seq_add_stmt (dlist
, g
);
5200 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5206 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5210 bool by_ref
= use_pointer_for_field (var
, ctx
);
5211 x
= build_receiver_ref (var
, by_ref
, ctx
);
5213 if (!omp_is_reference (var
))
5214 x
= build_fold_addr_expr (x
);
5215 x
= fold_convert (ptr_type_node
, x
);
5216 unsigned cnt
= task_reduction_cnt
- 1;
5217 if (!task_reduction_needs_orig_p
)
5218 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5220 cnt
= task_reduction_cntorig
- 1;
5221 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5222 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5223 gimplify_assign (r
, x
, ilist
);
5228 tree type
= TREE_TYPE (new_var
);
5229 if (!omp_is_reference (var
))
5230 type
= build_pointer_type (type
);
5231 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5233 unsigned cnt
= task_reduction_cnt
- 1;
5234 if (!task_reduction_needs_orig_p
)
5235 cnt
+= (task_reduction_cntorig_full
5236 - task_reduction_cntorig
);
5238 cnt
= task_reduction_cntorig
- 1;
5239 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5240 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5244 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5246 if (ctx
->task_reductions
[1 + idx
])
5247 off
= fold_convert (sizetype
,
5248 ctx
->task_reductions
[1 + idx
]);
5250 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5252 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5255 x
= fold_convert (type
, x
);
5257 if (omp_is_reference (var
))
5259 gimplify_assign (new_var
, x
, ilist
);
5261 new_var
= build_simple_mem_ref (new_var
);
5265 t
= create_tmp_var (type
);
5266 gimplify_assign (t
, x
, ilist
);
5267 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5268 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5270 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5271 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5272 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5273 cond
= create_tmp_var (TREE_TYPE (t
));
5274 gimplify_assign (cond
, t
, ilist
);
5276 else if (is_variable_sized (var
))
5278 /* For variable sized types, we need to allocate the
5279 actual storage here. Call alloca and store the
5280 result in the pointer decl that we created elsewhere. */
5284 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5288 ptr
= DECL_VALUE_EXPR (new_var
);
5289 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5290 ptr
= TREE_OPERAND (ptr
, 0);
5291 gcc_assert (DECL_P (ptr
));
5292 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5294 if (lower_private_allocate (var
, new_var
, allocator
,
5295 allocate_ptr
, ilist
, ctx
,
5300 /* void *tmp = __builtin_alloca */
5302 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5304 = gimple_build_call (atmp
, 2, x
,
5305 size_int (DECL_ALIGN (var
)));
5306 cfun
->calls_alloca
= 1;
5307 tmp
= create_tmp_var_raw (ptr_type_node
);
5308 gimple_add_tmp_var (tmp
);
5309 gimple_call_set_lhs (stmt
, tmp
);
5311 gimple_seq_add_stmt (ilist
, stmt
);
5314 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5315 gimplify_assign (ptr
, x
, ilist
);
5318 else if (omp_is_reference (var
)
5319 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5320 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5322 /* For references that are being privatized for Fortran,
5323 allocate new backing storage for the new pointer
5324 variable. This allows us to avoid changing all the
5325 code that expects a pointer to something that expects
5326 a direct variable. */
5330 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5331 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5333 x
= build_receiver_ref (var
, false, ctx
);
5334 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5336 else if (lower_private_allocate (var
, new_var
, allocator
,
5338 ilist
, ctx
, true, x
))
5340 else if (TREE_CONSTANT (x
))
5342 /* For reduction in SIMD loop, defer adding the
5343 initialization of the reference, because if we decide
5344 to use SIMD array for it, the initilization could cause
5345 expansion ICE. Ditto for other privatization clauses. */
5350 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5352 gimple_add_tmp_var (x
);
5353 TREE_ADDRESSABLE (x
) = 1;
5354 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5360 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5361 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5362 tree al
= size_int (TYPE_ALIGN (rtype
));
5363 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5368 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5369 gimplify_assign (new_var
, x
, ilist
);
5372 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5374 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5375 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5376 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5384 switch (OMP_CLAUSE_CODE (c
))
5386 case OMP_CLAUSE_SHARED
:
5387 /* Ignore shared directives in teams construct inside
5388 target construct. */
5389 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5390 && !is_host_teams_ctx (ctx
))
5392 /* Shared global vars are just accessed directly. */
5393 if (is_global_var (new_var
))
5395 /* For taskloop firstprivate/lastprivate, represented
5396 as firstprivate and shared clause on the task, new_var
5397 is the firstprivate var. */
5398 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5400 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5401 needs to be delayed until after fixup_child_record_type so
5402 that we get the correct type during the dereference. */
5403 by_ref
= use_pointer_for_field (var
, ctx
);
5404 x
= build_receiver_ref (var
, by_ref
, ctx
);
5405 SET_DECL_VALUE_EXPR (new_var
, x
);
5406 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5408 /* ??? If VAR is not passed by reference, and the variable
5409 hasn't been initialized yet, then we'll get a warning for
5410 the store into the omp_data_s structure. Ideally, we'd be
5411 able to notice this and not store anything at all, but
5412 we're generating code too early. Suppress the warning. */
5414 TREE_NO_WARNING (var
) = 1;
5417 case OMP_CLAUSE__CONDTEMP_
:
5418 if (is_parallel_ctx (ctx
))
5420 x
= build_receiver_ref (var
, false, ctx
);
5421 SET_DECL_VALUE_EXPR (new_var
, x
);
5422 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5424 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5426 x
= build_zero_cst (TREE_TYPE (var
));
5431 case OMP_CLAUSE_LASTPRIVATE
:
5432 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5436 case OMP_CLAUSE_PRIVATE
:
5437 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5438 x
= build_outer_var_ref (var
, ctx
);
5439 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5441 if (is_task_ctx (ctx
))
5442 x
= build_receiver_ref (var
, false, ctx
);
5444 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5452 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5453 ilist
, ctx
, false, NULL_TREE
);
5454 nx
= unshare_expr (new_var
);
5456 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5457 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5460 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5462 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5465 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5466 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5467 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5468 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5469 || (gimple_omp_for_index (ctx
->stmt
, 0)
5471 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5472 || omp_is_reference (var
))
5473 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5476 if (omp_is_reference (var
))
5478 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5479 tree new_vard
= TREE_OPERAND (new_var
, 0);
5480 gcc_assert (DECL_P (new_vard
));
5481 SET_DECL_VALUE_EXPR (new_vard
,
5482 build_fold_addr_expr (lvar
));
5483 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5488 tree iv
= unshare_expr (ivar
);
5490 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5493 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5497 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5499 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5500 unshare_expr (ivar
), x
);
5504 gimplify_and_add (x
, &llist
[0]);
5505 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5506 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5511 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5512 v
= TREE_OPERAND (v
, 0);
5513 gcc_assert (DECL_P (v
));
5515 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5516 tree t
= create_tmp_var (TREE_TYPE (v
));
5517 tree z
= build_zero_cst (TREE_TYPE (v
));
5519 = build_outer_var_ref (var
, ctx
,
5520 OMP_CLAUSE_LASTPRIVATE
);
5521 gimple_seq_add_stmt (dlist
,
5522 gimple_build_assign (t
, z
));
5523 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5524 tree civar
= DECL_VALUE_EXPR (v
);
5525 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5526 civar
= unshare_expr (civar
);
5527 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5528 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5529 unshare_expr (civar
));
5530 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5531 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5532 orig_v
, unshare_expr (ivar
)));
5533 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5535 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5537 gimple_seq tseq
= NULL
;
5538 gimplify_and_add (x
, &tseq
);
5540 lower_omp (&tseq
, ctx
->outer
);
5541 gimple_seq_add_seq (&llist
[1], tseq
);
5543 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5544 && ctx
->for_simd_scan_phase
)
5546 x
= unshare_expr (ivar
);
5548 = build_outer_var_ref (var
, ctx
,
5549 OMP_CLAUSE_LASTPRIVATE
);
5550 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5552 gimplify_and_add (x
, &llist
[0]);
5556 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5558 gimplify_and_add (y
, &llist
[1]);
5562 if (omp_is_reference (var
))
5564 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5565 tree new_vard
= TREE_OPERAND (new_var
, 0);
5566 gcc_assert (DECL_P (new_vard
));
5567 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5568 x
= TYPE_SIZE_UNIT (type
);
5569 if (TREE_CONSTANT (x
))
5571 x
= create_tmp_var_raw (type
, get_name (var
));
5572 gimple_add_tmp_var (x
);
5573 TREE_ADDRESSABLE (x
) = 1;
5574 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5575 x
= fold_convert_loc (clause_loc
,
5576 TREE_TYPE (new_vard
), x
);
5577 gimplify_assign (new_vard
, x
, ilist
);
5582 gimplify_and_add (nx
, ilist
);
5583 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5585 && ctx
->for_simd_scan_phase
)
5587 tree orig_v
= build_outer_var_ref (var
, ctx
,
5588 OMP_CLAUSE_LASTPRIVATE
);
5589 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5591 gimplify_and_add (x
, ilist
);
5596 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5598 gimplify_and_add (x
, dlist
);
5601 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5603 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5604 gimple_seq_add_stmt (dlist
, g
);
5608 case OMP_CLAUSE_LINEAR
:
5609 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5610 goto do_firstprivate
;
5611 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5614 x
= build_outer_var_ref (var
, ctx
);
5617 case OMP_CLAUSE_FIRSTPRIVATE
:
5618 if (is_task_ctx (ctx
))
5620 if ((omp_is_reference (var
)
5621 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5622 || is_variable_sized (var
))
5624 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5626 || use_pointer_for_field (var
, NULL
))
5628 x
= build_receiver_ref (var
, false, ctx
);
5629 SET_DECL_VALUE_EXPR (new_var
, x
);
5630 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5634 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5635 && omp_is_reference (var
))
5637 x
= build_outer_var_ref (var
, ctx
);
5638 gcc_assert (TREE_CODE (x
) == MEM_REF
5639 && integer_zerop (TREE_OPERAND (x
, 1)));
5640 x
= TREE_OPERAND (x
, 0);
5641 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5642 (c
, unshare_expr (new_var
), x
);
5643 gimplify_and_add (x
, ilist
);
5647 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5648 ilist
, ctx
, false, NULL_TREE
);
5649 x
= build_outer_var_ref (var
, ctx
);
5652 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5653 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5655 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5656 tree stept
= TREE_TYPE (t
);
5657 tree ct
= omp_find_clause (clauses
,
5658 OMP_CLAUSE__LOOPTEMP_
);
5660 tree l
= OMP_CLAUSE_DECL (ct
);
5661 tree n1
= fd
->loop
.n1
;
5662 tree step
= fd
->loop
.step
;
5663 tree itype
= TREE_TYPE (l
);
5664 if (POINTER_TYPE_P (itype
))
5665 itype
= signed_type_for (itype
);
5666 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5667 if (TYPE_UNSIGNED (itype
)
5668 && fd
->loop
.cond_code
== GT_EXPR
)
5669 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5670 fold_build1 (NEGATE_EXPR
, itype
, l
),
5671 fold_build1 (NEGATE_EXPR
,
5674 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5675 t
= fold_build2 (MULT_EXPR
, stept
,
5676 fold_convert (stept
, l
), t
);
5678 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5680 if (omp_is_reference (var
))
5682 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5683 tree new_vard
= TREE_OPERAND (new_var
, 0);
5684 gcc_assert (DECL_P (new_vard
));
5685 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5686 nx
= TYPE_SIZE_UNIT (type
);
5687 if (TREE_CONSTANT (nx
))
5689 nx
= create_tmp_var_raw (type
,
5691 gimple_add_tmp_var (nx
);
5692 TREE_ADDRESSABLE (nx
) = 1;
5693 nx
= build_fold_addr_expr_loc (clause_loc
,
5695 nx
= fold_convert_loc (clause_loc
,
5696 TREE_TYPE (new_vard
),
5698 gimplify_assign (new_vard
, nx
, ilist
);
5702 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5704 gimplify_and_add (x
, ilist
);
5708 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5709 x
= fold_build2 (POINTER_PLUS_EXPR
,
5710 TREE_TYPE (x
), x
, t
);
5712 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5715 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5716 || TREE_ADDRESSABLE (new_var
)
5717 || omp_is_reference (var
))
5718 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5721 if (omp_is_reference (var
))
5723 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5724 tree new_vard
= TREE_OPERAND (new_var
, 0);
5725 gcc_assert (DECL_P (new_vard
));
5726 SET_DECL_VALUE_EXPR (new_vard
,
5727 build_fold_addr_expr (lvar
));
5728 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5730 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5732 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5733 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5734 gimplify_and_add (x
, ilist
);
5735 gimple_stmt_iterator gsi
5736 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5738 = gimple_build_assign (unshare_expr (lvar
), iv
);
5739 gsi_insert_before_without_update (&gsi
, g
,
5741 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5742 enum tree_code code
= PLUS_EXPR
;
5743 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5744 code
= POINTER_PLUS_EXPR
;
5745 g
= gimple_build_assign (iv
, code
, iv
, t
);
5746 gsi_insert_before_without_update (&gsi
, g
,
5750 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5751 (c
, unshare_expr (ivar
), x
);
5752 gimplify_and_add (x
, &llist
[0]);
5753 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5755 gimplify_and_add (x
, &llist
[1]);
5758 if (omp_is_reference (var
))
5760 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5761 tree new_vard
= TREE_OPERAND (new_var
, 0);
5762 gcc_assert (DECL_P (new_vard
));
5763 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5764 nx
= TYPE_SIZE_UNIT (type
);
5765 if (TREE_CONSTANT (nx
))
5767 nx
= create_tmp_var_raw (type
, get_name (var
));
5768 gimple_add_tmp_var (nx
);
5769 TREE_ADDRESSABLE (nx
) = 1;
5770 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5771 nx
= fold_convert_loc (clause_loc
,
5772 TREE_TYPE (new_vard
), nx
);
5773 gimplify_assign (new_vard
, nx
, ilist
);
5777 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5778 (c
, unshare_expr (new_var
), x
);
5779 gimplify_and_add (x
, ilist
);
5782 case OMP_CLAUSE__LOOPTEMP_
:
5783 case OMP_CLAUSE__REDUCTEMP_
:
5784 gcc_assert (is_taskreg_ctx (ctx
));
5785 x
= build_outer_var_ref (var
, ctx
);
5786 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5787 gimplify_and_add (x
, ilist
);
5790 case OMP_CLAUSE_COPYIN
:
5791 by_ref
= use_pointer_for_field (var
, NULL
);
5792 x
= build_receiver_ref (var
, by_ref
, ctx
);
5793 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5794 append_to_statement_list (x
, ©in_seq
);
5795 copyin_by_ref
|= by_ref
;
5798 case OMP_CLAUSE_REDUCTION
:
5799 case OMP_CLAUSE_IN_REDUCTION
:
5800 /* OpenACC reductions are initialized using the
5801 GOACC_REDUCTION internal function. */
5802 if (is_gimple_omp_oacc (ctx
->stmt
))
5804 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5806 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5808 tree ptype
= TREE_TYPE (placeholder
);
5811 x
= error_mark_node
;
5812 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5813 && !task_reduction_needs_orig_p
)
5815 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5817 tree pptype
= build_pointer_type (ptype
);
5818 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5819 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5820 size_int (task_reduction_cnt_full
5821 + task_reduction_cntorig
- 1),
5822 NULL_TREE
, NULL_TREE
);
5826 = *ctx
->task_reduction_map
->get (c
);
5827 x
= task_reduction_read (ilist
, tskred_temp
,
5828 pptype
, 7 + 3 * idx
);
5830 x
= fold_convert (pptype
, x
);
5831 x
= build_simple_mem_ref (x
);
5836 lower_private_allocate (var
, new_var
, allocator
,
5837 allocate_ptr
, ilist
, ctx
, false,
5839 x
= build_outer_var_ref (var
, ctx
);
5841 if (omp_is_reference (var
)
5842 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5843 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5845 SET_DECL_VALUE_EXPR (placeholder
, x
);
5846 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5847 tree new_vard
= new_var
;
5848 if (omp_is_reference (var
))
5850 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5851 new_vard
= TREE_OPERAND (new_var
, 0);
5852 gcc_assert (DECL_P (new_vard
));
5854 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5856 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5857 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5860 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5864 if (new_vard
== new_var
)
5866 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5867 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5871 SET_DECL_VALUE_EXPR (new_vard
,
5872 build_fold_addr_expr (ivar
));
5873 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5875 x
= lang_hooks
.decls
.omp_clause_default_ctor
5876 (c
, unshare_expr (ivar
),
5877 build_outer_var_ref (var
, ctx
));
5878 if (rvarp
&& ctx
->for_simd_scan_phase
)
5881 gimplify_and_add (x
, &llist
[0]);
5882 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5884 gimplify_and_add (x
, &llist
[1]);
5891 gimplify_and_add (x
, &llist
[0]);
5893 tree ivar2
= unshare_expr (lvar
);
5894 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5895 x
= lang_hooks
.decls
.omp_clause_default_ctor
5896 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5897 gimplify_and_add (x
, &llist
[0]);
5901 x
= lang_hooks
.decls
.omp_clause_default_ctor
5902 (c
, unshare_expr (rvar2
),
5903 build_outer_var_ref (var
, ctx
));
5904 gimplify_and_add (x
, &llist
[0]);
5907 /* For types that need construction, add another
5908 private var which will be default constructed
5909 and optionally initialized with
5910 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5911 loop we want to assign this value instead of
5912 constructing and destructing it in each
5914 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5915 gimple_add_tmp_var (nv
);
5916 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5920 x
= lang_hooks
.decls
.omp_clause_default_ctor
5921 (c
, nv
, build_outer_var_ref (var
, ctx
));
5922 gimplify_and_add (x
, ilist
);
5924 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5926 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5927 x
= DECL_VALUE_EXPR (new_vard
);
5929 if (new_vard
!= new_var
)
5930 vexpr
= build_fold_addr_expr (nv
);
5931 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5932 lower_omp (&tseq
, ctx
);
5933 SET_DECL_VALUE_EXPR (new_vard
, x
);
5934 gimple_seq_add_seq (ilist
, tseq
);
5935 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5938 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5940 gimplify_and_add (x
, dlist
);
5943 tree ref
= build_outer_var_ref (var
, ctx
);
5944 x
= unshare_expr (ivar
);
5945 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5947 gimplify_and_add (x
, &llist
[0]);
5949 ref
= build_outer_var_ref (var
, ctx
);
5950 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5952 gimplify_and_add (x
, &llist
[3]);
5954 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5955 if (new_vard
== new_var
)
5956 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5958 SET_DECL_VALUE_EXPR (new_vard
,
5959 build_fold_addr_expr (lvar
));
5961 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5963 gimplify_and_add (x
, &llist
[1]);
5965 tree ivar2
= unshare_expr (lvar
);
5966 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5967 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5969 gimplify_and_add (x
, &llist
[1]);
5973 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5975 gimplify_and_add (x
, &llist
[1]);
5980 gimplify_and_add (x
, &llist
[0]);
5981 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5983 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5984 lower_omp (&tseq
, ctx
);
5985 gimple_seq_add_seq (&llist
[0], tseq
);
5987 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5988 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5989 lower_omp (&tseq
, ctx
);
5990 gimple_seq_add_seq (&llist
[1], tseq
);
5991 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5992 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5993 if (new_vard
== new_var
)
5994 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5996 SET_DECL_VALUE_EXPR (new_vard
,
5997 build_fold_addr_expr (lvar
));
5998 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6000 gimplify_and_add (x
, &llist
[1]);
6003 /* If this is a reference to constant size reduction var
6004 with placeholder, we haven't emitted the initializer
6005 for it because it is undesirable if SIMD arrays are used.
6006 But if they aren't used, we need to emit the deferred
6007 initialization now. */
6008 else if (omp_is_reference (var
) && is_simd
)
6009 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6011 tree lab2
= NULL_TREE
;
6015 if (!is_parallel_ctx (ctx
))
6017 tree condv
= create_tmp_var (boolean_type_node
);
6018 tree m
= build_simple_mem_ref (cond
);
6019 g
= gimple_build_assign (condv
, m
);
6020 gimple_seq_add_stmt (ilist
, g
);
6022 = create_artificial_label (UNKNOWN_LOCATION
);
6023 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6024 g
= gimple_build_cond (NE_EXPR
, condv
,
6027 gimple_seq_add_stmt (ilist
, g
);
6028 gimple_seq_add_stmt (ilist
,
6029 gimple_build_label (lab1
));
6031 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6033 gimple_seq_add_stmt (ilist
, g
);
6035 x
= lang_hooks
.decls
.omp_clause_default_ctor
6036 (c
, unshare_expr (new_var
),
6038 : build_outer_var_ref (var
, ctx
));
6040 gimplify_and_add (x
, ilist
);
6042 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6043 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6045 if (ctx
->for_simd_scan_phase
)
6048 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6050 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6051 gimple_add_tmp_var (nv
);
6052 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6053 x
= lang_hooks
.decls
.omp_clause_default_ctor
6054 (c
, nv
, build_outer_var_ref (var
, ctx
));
6056 gimplify_and_add (x
, ilist
);
6057 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6059 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6061 if (new_vard
!= new_var
)
6062 vexpr
= build_fold_addr_expr (nv
);
6063 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6064 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6065 lower_omp (&tseq
, ctx
);
6066 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6067 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6068 gimple_seq_add_seq (ilist
, tseq
);
6070 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6071 if (is_simd
&& ctx
->scan_exclusive
)
6074 = create_tmp_var_raw (TREE_TYPE (new_var
));
6075 gimple_add_tmp_var (nv2
);
6076 ctx
->cb
.decl_map
->put (nv
, nv2
);
6077 x
= lang_hooks
.decls
.omp_clause_default_ctor
6078 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6079 gimplify_and_add (x
, ilist
);
6080 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6082 gimplify_and_add (x
, dlist
);
6084 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6086 gimplify_and_add (x
, dlist
);
6089 && ctx
->scan_exclusive
6090 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6092 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6093 gimple_add_tmp_var (nv2
);
6094 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6095 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6097 gimplify_and_add (x
, dlist
);
6099 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6103 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6105 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6106 lower_omp (&tseq
, ctx
);
6107 gimple_seq_add_seq (ilist
, tseq
);
6109 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6112 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6113 lower_omp (&tseq
, ctx
);
6114 gimple_seq_add_seq (dlist
, tseq
);
6115 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6117 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6121 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6128 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6129 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6130 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6135 tree lab2
= NULL_TREE
;
6136 /* GOMP_taskgroup_reduction_register memsets the whole
6137 array to zero. If the initializer is zero, we don't
6138 need to initialize it again, just mark it as ever
6139 used unconditionally, i.e. cond = true. */
6140 if (initializer_zerop (x
))
6142 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6144 gimple_seq_add_stmt (ilist
, g
);
6149 if (!cond) { cond = true; new_var = x; } */
6150 if (!is_parallel_ctx (ctx
))
6152 tree condv
= create_tmp_var (boolean_type_node
);
6153 tree m
= build_simple_mem_ref (cond
);
6154 g
= gimple_build_assign (condv
, m
);
6155 gimple_seq_add_stmt (ilist
, g
);
6157 = create_artificial_label (UNKNOWN_LOCATION
);
6158 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6159 g
= gimple_build_cond (NE_EXPR
, condv
,
6162 gimple_seq_add_stmt (ilist
, g
);
6163 gimple_seq_add_stmt (ilist
,
6164 gimple_build_label (lab1
));
6166 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6168 gimple_seq_add_stmt (ilist
, g
);
6169 gimplify_assign (new_var
, x
, ilist
);
6171 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6175 /* reduction(-:var) sums up the partial results, so it
6176 acts identically to reduction(+:var). */
6177 if (code
== MINUS_EXPR
)
6180 tree new_vard
= new_var
;
6181 if (is_simd
&& omp_is_reference (var
))
6183 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6184 new_vard
= TREE_OPERAND (new_var
, 0);
6185 gcc_assert (DECL_P (new_vard
));
6187 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6189 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6190 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6193 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6197 if (new_vard
!= new_var
)
6199 SET_DECL_VALUE_EXPR (new_vard
,
6200 build_fold_addr_expr (lvar
));
6201 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6204 tree ref
= build_outer_var_ref (var
, ctx
);
6208 if (ctx
->for_simd_scan_phase
)
6210 gimplify_assign (ivar
, ref
, &llist
[0]);
6211 ref
= build_outer_var_ref (var
, ctx
);
6212 gimplify_assign (ref
, rvar
, &llist
[3]);
6216 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6221 simt_lane
= create_tmp_var (unsigned_type_node
);
6222 x
= build_call_expr_internal_loc
6223 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6224 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6225 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6226 gimplify_assign (ivar
, x
, &llist
[2]);
6228 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
6229 ref
= build_outer_var_ref (var
, ctx
);
6230 gimplify_assign (ref
, x
, &llist
[1]);
6235 lower_private_allocate (var
, new_var
, allocator
,
6236 allocate_ptr
, ilist
, ctx
,
6238 if (omp_is_reference (var
) && is_simd
)
6239 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6240 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6241 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6243 gimplify_assign (new_var
, x
, ilist
);
6246 tree ref
= build_outer_var_ref (var
, ctx
);
6248 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6249 ref
= build_outer_var_ref (var
, ctx
);
6250 gimplify_assign (ref
, x
, dlist
);
6265 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6266 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6269 if (known_eq (sctx
.max_vf
, 1U))
6271 sctx
.is_simt
= false;
6272 if (ctx
->lastprivate_conditional_map
)
6274 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6276 /* Signal to lower_omp_1 that it should use parent context. */
6277 ctx
->combined_into_simd_safelen1
= true;
6278 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6279 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6280 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6282 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6283 omp_context
*outer
= ctx
->outer
;
6284 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6285 outer
= outer
->outer
;
6286 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6287 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6288 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6294 /* When not vectorized, treat lastprivate(conditional:) like
6295 normal lastprivate, as there will be just one simd lane
6296 writing the privatized variable. */
6297 delete ctx
->lastprivate_conditional_map
;
6298 ctx
->lastprivate_conditional_map
= NULL
;
6303 if (nonconst_simd_if
)
6305 if (sctx
.lane
== NULL_TREE
)
6307 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6308 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6310 /* FIXME: For now. */
6311 sctx
.is_simt
= false;
6314 if (sctx
.lane
|| sctx
.is_simt
)
6316 uid
= create_tmp_var (ptr_type_node
, "simduid");
6317 /* Don't want uninit warnings on simduid, it is always uninitialized,
6318 but we use it not for the value, but for the DECL_UID only. */
6319 TREE_NO_WARNING (uid
) = 1;
6320 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6321 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6322 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6323 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6325 /* Emit calls denoting privatized variables and initializing a pointer to
6326 structure that holds private variables as fields after ompdevlow pass. */
6329 sctx
.simt_eargs
[0] = uid
;
6331 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6332 gimple_call_set_lhs (g
, uid
);
6333 gimple_seq_add_stmt (ilist
, g
);
6334 sctx
.simt_eargs
.release ();
6336 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6337 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6338 gimple_call_set_lhs (g
, simtrec
);
6339 gimple_seq_add_stmt (ilist
, g
);
6343 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6344 2 + (nonconst_simd_if
!= NULL
),
6345 uid
, integer_zero_node
,
6347 gimple_call_set_lhs (g
, sctx
.lane
);
6348 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6349 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6350 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6351 build_int_cst (unsigned_type_node
, 0));
6352 gimple_seq_add_stmt (ilist
, g
);
6355 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6357 gimple_call_set_lhs (g
, sctx
.lastlane
);
6358 gimple_seq_add_stmt (dlist
, g
);
6359 gimple_seq_add_seq (dlist
, llist
[3]);
6361 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6364 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6365 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6366 gimple_call_set_lhs (g
, simt_vf
);
6367 gimple_seq_add_stmt (dlist
, g
);
6369 tree t
= build_int_cst (unsigned_type_node
, 1);
6370 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6371 gimple_seq_add_stmt (dlist
, g
);
6373 t
= build_int_cst (unsigned_type_node
, 0);
6374 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6375 gimple_seq_add_stmt (dlist
, g
);
6377 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6378 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6379 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6380 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6381 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6383 gimple_seq_add_seq (dlist
, llist
[2]);
6385 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6386 gimple_seq_add_stmt (dlist
, g
);
6388 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6389 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6390 gimple_seq_add_stmt (dlist
, g
);
6392 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6394 for (int i
= 0; i
< 2; i
++)
6397 tree vf
= create_tmp_var (unsigned_type_node
);
6398 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6399 gimple_call_set_lhs (g
, vf
);
6400 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6401 gimple_seq_add_stmt (seq
, g
);
6402 tree t
= build_int_cst (unsigned_type_node
, 0);
6403 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6404 gimple_seq_add_stmt (seq
, g
);
6405 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6406 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6407 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6408 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6409 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6410 gimple_seq_add_seq (seq
, llist
[i
]);
6411 t
= build_int_cst (unsigned_type_node
, 1);
6412 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6413 gimple_seq_add_stmt (seq
, g
);
6414 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6415 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6416 gimple_seq_add_stmt (seq
, g
);
6417 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6422 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6424 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6425 gimple_seq_add_stmt (dlist
, g
);
6428 /* The copyin sequence is not to be executed by the main thread, since
6429 that would result in self-copies. Perhaps not visible to scalars,
6430 but it certainly is to C++ operator=. */
6433 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6435 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6436 build_int_cst (TREE_TYPE (x
), 0));
6437 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6438 gimplify_and_add (x
, ilist
);
6441 /* If any copyin variable is passed by reference, we must ensure the
6442 master thread doesn't modify it before it is copied over in all
6443 threads. Similarly for variables in both firstprivate and
6444 lastprivate clauses we need to ensure the lastprivate copying
6445 happens after firstprivate copying in all threads. And similarly
6446 for UDRs if initializer expression refers to omp_orig. */
6447 if (copyin_by_ref
|| lastprivate_firstprivate
6448 || (reduction_omp_orig_ref
6449 && !ctx
->scan_inclusive
6450 && !ctx
->scan_exclusive
))
6452 /* Don't add any barrier for #pragma omp simd or
6453 #pragma omp distribute. */
6454 if (!is_task_ctx (ctx
)
6455 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6456 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6457 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6460 /* If max_vf is non-zero, then we can use only a vectorization factor
6461 up to the max_vf we chose. So stick it into the safelen clause. */
6462 if (maybe_ne (sctx
.max_vf
, 0U))
6464 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6465 OMP_CLAUSE_SAFELEN
);
6466 poly_uint64 safe_len
;
6468 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6469 && maybe_gt (safe_len
, sctx
.max_vf
)))
6471 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6472 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6474 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6475 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6480 /* Create temporary variables for lastprivate(conditional:) implementation
6481 in context CTX with CLAUSES. */
6484 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6486 tree iter_type
= NULL_TREE
;
6487 tree cond_ptr
= NULL_TREE
;
6488 tree iter_var
= NULL_TREE
;
6489 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6490 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6491 tree next
= *clauses
;
6492 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6493 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6494 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6498 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6500 if (iter_type
== NULL_TREE
)
6502 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6503 iter_var
= create_tmp_var_raw (iter_type
);
6504 DECL_CONTEXT (iter_var
) = current_function_decl
;
6505 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6506 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6507 ctx
->block_vars
= iter_var
;
6509 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6510 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6511 OMP_CLAUSE_DECL (c3
) = iter_var
;
6512 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6514 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6516 next
= OMP_CLAUSE_CHAIN (cc
);
6517 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6518 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6519 ctx
->lastprivate_conditional_map
->put (o
, v
);
6522 if (iter_type
== NULL
)
6524 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6526 struct omp_for_data fd
;
6527 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6529 iter_type
= unsigned_type_for (fd
.iter_type
);
6531 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6532 iter_type
= unsigned_type_node
;
6533 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6537 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6538 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6542 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6543 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6544 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6545 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6546 ctx
->block_vars
= cond_ptr
;
6547 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6548 OMP_CLAUSE__CONDTEMP_
);
6549 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6550 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6553 iter_var
= create_tmp_var_raw (iter_type
);
6554 DECL_CONTEXT (iter_var
) = current_function_decl
;
6555 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6556 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6557 ctx
->block_vars
= iter_var
;
6559 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6560 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6561 OMP_CLAUSE_DECL (c3
) = iter_var
;
6562 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6563 OMP_CLAUSE_CHAIN (c2
) = c3
;
6564 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6566 tree v
= create_tmp_var_raw (iter_type
);
6567 DECL_CONTEXT (v
) = current_function_decl
;
6568 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6569 DECL_CHAIN (v
) = ctx
->block_vars
;
6570 ctx
->block_vars
= v
;
6571 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6572 ctx
->lastprivate_conditional_map
->put (o
, v
);
6577 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6578 both parallel and workshare constructs. PREDICATE may be NULL if it's
6579 always true. BODY_P is the sequence to insert early initialization
6580 if needed, STMT_LIST is where the non-conditional lastprivate handling
6581 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6585 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6586 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6589 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6590 bool par_clauses
= false;
6591 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6592 unsigned HOST_WIDE_INT conditional_off
= 0;
6593 gimple_seq post_stmt_list
= NULL
;
6595 /* Early exit if there are no lastprivate or linear clauses. */
6596 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6597 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6598 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6599 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6601 if (clauses
== NULL
)
6603 /* If this was a workshare clause, see if it had been combined
6604 with its parallel. In that case, look for the clauses on the
6605 parallel statement itself. */
6606 if (is_parallel_ctx (ctx
))
6610 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6613 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6614 OMP_CLAUSE_LASTPRIVATE
);
6615 if (clauses
== NULL
)
6620 bool maybe_simt
= false;
6621 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6622 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6624 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6625 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6627 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6633 tree label_true
, arm1
, arm2
;
6634 enum tree_code pred_code
= TREE_CODE (predicate
);
6636 label
= create_artificial_label (UNKNOWN_LOCATION
);
6637 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6638 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6640 arm1
= TREE_OPERAND (predicate
, 0);
6641 arm2
= TREE_OPERAND (predicate
, 1);
6642 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6643 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6648 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6649 arm2
= boolean_false_node
;
6650 pred_code
= NE_EXPR
;
6654 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6655 c
= fold_convert (integer_type_node
, c
);
6656 simtcond
= create_tmp_var (integer_type_node
);
6657 gimplify_assign (simtcond
, c
, stmt_list
);
6658 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6660 c
= create_tmp_var (integer_type_node
);
6661 gimple_call_set_lhs (g
, c
);
6662 gimple_seq_add_stmt (stmt_list
, g
);
6663 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6667 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6668 gimple_seq_add_stmt (stmt_list
, stmt
);
6669 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6672 tree cond_ptr
= NULL_TREE
;
6673 for (c
= clauses
; c
;)
6676 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6677 gimple_seq
*this_stmt_list
= stmt_list
;
6678 tree lab2
= NULL_TREE
;
6680 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6681 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6682 && ctx
->lastprivate_conditional_map
6683 && !ctx
->combined_into_simd_safelen1
)
6685 gcc_assert (body_p
);
6688 if (cond_ptr
== NULL_TREE
)
6690 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6691 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6693 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6694 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6695 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6696 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6697 this_stmt_list
= cstmt_list
;
6699 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6701 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6702 build_int_cst (TREE_TYPE (cond_ptr
),
6704 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6707 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6708 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6709 tree mem2
= copy_node (mem
);
6710 gimple_seq seq
= NULL
;
6711 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6712 gimple_seq_add_seq (this_stmt_list
, seq
);
6713 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6714 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6715 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6716 gimple_seq_add_stmt (this_stmt_list
, g
);
6717 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6718 gimplify_assign (mem2
, v
, this_stmt_list
);
6721 && ctx
->combined_into_simd_safelen1
6722 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6723 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6724 && ctx
->lastprivate_conditional_map
)
6725 this_stmt_list
= &post_stmt_list
;
6727 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6728 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6729 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6731 var
= OMP_CLAUSE_DECL (c
);
6732 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6733 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6734 && is_taskloop_ctx (ctx
))
6736 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6737 new_var
= lookup_decl (var
, ctx
->outer
);
6741 new_var
= lookup_decl (var
, ctx
);
6742 /* Avoid uninitialized warnings for lastprivate and
6743 for linear iterators. */
6745 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6746 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6747 TREE_NO_WARNING (new_var
) = 1;
6750 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6752 tree val
= DECL_VALUE_EXPR (new_var
);
6753 if (TREE_CODE (val
) == ARRAY_REF
6754 && VAR_P (TREE_OPERAND (val
, 0))
6755 && lookup_attribute ("omp simd array",
6756 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6759 if (lastlane
== NULL
)
6761 lastlane
= create_tmp_var (unsigned_type_node
);
6763 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6765 TREE_OPERAND (val
, 1));
6766 gimple_call_set_lhs (g
, lastlane
);
6767 gimple_seq_add_stmt (this_stmt_list
, g
);
6769 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6770 TREE_OPERAND (val
, 0), lastlane
,
6771 NULL_TREE
, NULL_TREE
);
6772 TREE_THIS_NOTRAP (new_var
) = 1;
6775 else if (maybe_simt
)
6777 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6778 ? DECL_VALUE_EXPR (new_var
)
6780 if (simtlast
== NULL
)
6782 simtlast
= create_tmp_var (unsigned_type_node
);
6783 gcall
*g
= gimple_build_call_internal
6784 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6785 gimple_call_set_lhs (g
, simtlast
);
6786 gimple_seq_add_stmt (this_stmt_list
, g
);
6788 x
= build_call_expr_internal_loc
6789 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6790 TREE_TYPE (val
), 2, val
, simtlast
);
6791 new_var
= unshare_expr (new_var
);
6792 gimplify_assign (new_var
, x
, this_stmt_list
);
6793 new_var
= unshare_expr (new_var
);
6796 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6797 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6799 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6800 gimple_seq_add_seq (this_stmt_list
,
6801 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6802 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6804 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6805 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6807 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6808 gimple_seq_add_seq (this_stmt_list
,
6809 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6810 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6814 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6815 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
6816 && is_taskloop_ctx (ctx
))
6818 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6820 if (is_global_var (ovar
))
6824 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6825 if (omp_is_reference (var
))
6826 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6827 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6828 gimplify_and_add (x
, this_stmt_list
);
6831 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6835 c
= OMP_CLAUSE_CHAIN (c
);
6836 if (c
== NULL
&& !par_clauses
)
6838 /* If this was a workshare clause, see if it had been combined
6839 with its parallel. In that case, continue looking for the
6840 clauses also on the parallel statement itself. */
6841 if (is_parallel_ctx (ctx
))
6845 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6848 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6849 OMP_CLAUSE_LASTPRIVATE
);
6855 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6856 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6859 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6860 (which might be a placeholder). INNER is true if this is an inner
6861 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6862 join markers. Generate the before-loop forking sequence in
6863 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6864 general form of these sequences is
6866 GOACC_REDUCTION_SETUP
6868 GOACC_REDUCTION_INIT
6870 GOACC_REDUCTION_FINI
6872 GOACC_REDUCTION_TEARDOWN. */
6875 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6876 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6877 gimple_seq
*join_seq
, omp_context
*ctx
)
6879 gimple_seq before_fork
= NULL
;
6880 gimple_seq after_fork
= NULL
;
6881 gimple_seq before_join
= NULL
;
6882 gimple_seq after_join
= NULL
;
6883 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6884 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6885 unsigned offset
= 0;
6887 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6888 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6890 tree orig
= OMP_CLAUSE_DECL (c
);
6891 tree var
= maybe_lookup_decl (orig
, ctx
);
6892 tree ref_to_res
= NULL_TREE
;
6893 tree incoming
, outgoing
, v1
, v2
, v3
;
6894 bool is_private
= false;
6896 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6897 if (rcode
== MINUS_EXPR
)
6899 else if (rcode
== TRUTH_ANDIF_EXPR
)
6900 rcode
= BIT_AND_EXPR
;
6901 else if (rcode
== TRUTH_ORIF_EXPR
)
6902 rcode
= BIT_IOR_EXPR
;
6903 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6908 incoming
= outgoing
= var
;
6912 /* See if an outer construct also reduces this variable. */
6913 omp_context
*outer
= ctx
;
6915 while (omp_context
*probe
= outer
->outer
)
6917 enum gimple_code type
= gimple_code (probe
->stmt
);
6922 case GIMPLE_OMP_FOR
:
6923 cls
= gimple_omp_for_clauses (probe
->stmt
);
6926 case GIMPLE_OMP_TARGET
:
6927 if ((gimple_omp_target_kind (probe
->stmt
)
6928 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6929 && (gimple_omp_target_kind (probe
->stmt
)
6930 != GF_OMP_TARGET_KIND_OACC_SERIAL
))
6933 cls
= gimple_omp_target_clauses (probe
->stmt
);
6941 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6942 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6943 && orig
== OMP_CLAUSE_DECL (cls
))
6945 incoming
= outgoing
= lookup_decl (orig
, probe
);
6946 goto has_outer_reduction
;
6948 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6949 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6950 && orig
== OMP_CLAUSE_DECL (cls
))
6958 /* This is the outermost construct with this reduction,
6959 see if there's a mapping for it. */
6960 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6961 && maybe_lookup_field (orig
, outer
) && !is_private
)
6963 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6964 if (omp_is_reference (orig
))
6965 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6967 tree type
= TREE_TYPE (var
);
6968 if (POINTER_TYPE_P (type
))
6969 type
= TREE_TYPE (type
);
6972 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6976 /* Try to look at enclosing contexts for reduction var,
6977 use original if no mapping found. */
6979 omp_context
*c
= ctx
->outer
;
6982 t
= maybe_lookup_decl (orig
, c
);
6985 incoming
= outgoing
= (t
? t
: orig
);
6988 has_outer_reduction
:;
6992 ref_to_res
= integer_zero_node
;
6994 if (omp_is_reference (orig
))
6996 tree type
= TREE_TYPE (var
);
6997 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7001 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7002 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7005 v1
= create_tmp_var (type
, id
);
7006 v2
= create_tmp_var (type
, id
);
7007 v3
= create_tmp_var (type
, id
);
7009 gimplify_assign (v1
, var
, fork_seq
);
7010 gimplify_assign (v2
, var
, fork_seq
);
7011 gimplify_assign (v3
, var
, fork_seq
);
7013 var
= build_simple_mem_ref (var
);
7014 v1
= build_simple_mem_ref (v1
);
7015 v2
= build_simple_mem_ref (v2
);
7016 v3
= build_simple_mem_ref (v3
);
7017 outgoing
= build_simple_mem_ref (outgoing
);
7019 if (!TREE_CONSTANT (incoming
))
7020 incoming
= build_simple_mem_ref (incoming
);
7025 /* Determine position in reduction buffer, which may be used
7026 by target. The parser has ensured that this is not a
7027 variable-sized type. */
7028 fixed_size_mode mode
7029 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7030 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7031 offset
= (offset
+ align
- 1) & ~(align
- 1);
7032 tree off
= build_int_cst (sizetype
, offset
);
7033 offset
+= GET_MODE_SIZE (mode
);
7037 init_code
= build_int_cst (integer_type_node
,
7038 IFN_GOACC_REDUCTION_INIT
);
7039 fini_code
= build_int_cst (integer_type_node
,
7040 IFN_GOACC_REDUCTION_FINI
);
7041 setup_code
= build_int_cst (integer_type_node
,
7042 IFN_GOACC_REDUCTION_SETUP
);
7043 teardown_code
= build_int_cst (integer_type_node
,
7044 IFN_GOACC_REDUCTION_TEARDOWN
);
7048 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7049 TREE_TYPE (var
), 6, setup_code
,
7050 unshare_expr (ref_to_res
),
7051 incoming
, level
, op
, off
);
7053 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7054 TREE_TYPE (var
), 6, init_code
,
7055 unshare_expr (ref_to_res
),
7056 v1
, level
, op
, off
);
7058 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7059 TREE_TYPE (var
), 6, fini_code
,
7060 unshare_expr (ref_to_res
),
7061 v2
, level
, op
, off
);
7063 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7064 TREE_TYPE (var
), 6, teardown_code
,
7065 ref_to_res
, v3
, level
, op
, off
);
7067 gimplify_assign (v1
, setup_call
, &before_fork
);
7068 gimplify_assign (v2
, init_call
, &after_fork
);
7069 gimplify_assign (v3
, fini_call
, &before_join
);
7070 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7073 /* Now stitch things together. */
7074 gimple_seq_add_seq (fork_seq
, before_fork
);
7076 gimple_seq_add_stmt (fork_seq
, fork
);
7077 gimple_seq_add_seq (fork_seq
, after_fork
);
7079 gimple_seq_add_seq (join_seq
, before_join
);
7081 gimple_seq_add_stmt (join_seq
, join
);
7082 gimple_seq_add_seq (join_seq
, after_join
);
7085 /* Generate code to implement the REDUCTION clauses, append it
7086 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7087 that should be emitted also inside of the critical section,
7088 in that case clear *CLIST afterwards, otherwise leave it as is
7089 and let the caller emit it itself. */
7092 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7093 gimple_seq
*clist
, omp_context
*ctx
)
7095 gimple_seq sub_seq
= NULL
;
7100 /* OpenACC loop reductions are handled elsewhere. */
7101 if (is_gimple_omp_oacc (ctx
->stmt
))
7104 /* SIMD reductions are handled in lower_rec_input_clauses. */
7105 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7106 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7109 /* inscan reductions are handled elsewhere. */
7110 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7113 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7114 update in that case, otherwise use a lock. */
7115 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7116 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7117 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7119 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7120 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7122 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7132 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7134 tree var
, ref
, new_var
, orig_var
;
7135 enum tree_code code
;
7136 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7138 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7139 || OMP_CLAUSE_REDUCTION_TASK (c
))
7142 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7143 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7144 if (TREE_CODE (var
) == MEM_REF
)
7146 var
= TREE_OPERAND (var
, 0);
7147 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7148 var
= TREE_OPERAND (var
, 0);
7149 if (TREE_CODE (var
) == ADDR_EXPR
)
7150 var
= TREE_OPERAND (var
, 0);
7153 /* If this is a pointer or referenced based array
7154 section, the var could be private in the outer
7155 context e.g. on orphaned loop construct. Pretend this
7156 is private variable's outer reference. */
7157 ccode
= OMP_CLAUSE_PRIVATE
;
7158 if (TREE_CODE (var
) == INDIRECT_REF
)
7159 var
= TREE_OPERAND (var
, 0);
7162 if (is_variable_sized (var
))
7164 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7165 var
= DECL_VALUE_EXPR (var
);
7166 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7167 var
= TREE_OPERAND (var
, 0);
7168 gcc_assert (DECL_P (var
));
7171 new_var
= lookup_decl (var
, ctx
);
7172 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
7173 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7174 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7175 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7177 /* reduction(-:var) sums up the partial results, so it acts
7178 identically to reduction(+:var). */
7179 if (code
== MINUS_EXPR
)
7184 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7186 addr
= save_expr (addr
);
7187 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7188 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
7189 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7190 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7191 gimplify_and_add (x
, stmt_seqp
);
7194 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7196 tree d
= OMP_CLAUSE_DECL (c
);
7197 tree type
= TREE_TYPE (d
);
7198 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7199 tree i
= create_tmp_var (TREE_TYPE (v
));
7200 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7201 tree bias
= TREE_OPERAND (d
, 1);
7202 d
= TREE_OPERAND (d
, 0);
7203 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7205 tree b
= TREE_OPERAND (d
, 1);
7206 b
= maybe_lookup_decl (b
, ctx
);
7209 b
= TREE_OPERAND (d
, 1);
7210 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7212 if (integer_zerop (bias
))
7216 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7217 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7218 TREE_TYPE (b
), b
, bias
);
7220 d
= TREE_OPERAND (d
, 0);
7222 /* For ref build_outer_var_ref already performs this, so
7223 only new_var needs a dereference. */
7224 if (TREE_CODE (d
) == INDIRECT_REF
)
7226 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7227 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
7229 else if (TREE_CODE (d
) == ADDR_EXPR
)
7231 if (orig_var
== var
)
7233 new_var
= build_fold_addr_expr (new_var
);
7234 ref
= build_fold_addr_expr (ref
);
7239 gcc_assert (orig_var
== var
);
7240 if (omp_is_reference (var
))
7241 ref
= build_fold_addr_expr (ref
);
7245 tree t
= maybe_lookup_decl (v
, ctx
);
7249 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7250 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7252 if (!integer_zerop (bias
))
7254 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7255 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7256 TREE_TYPE (new_var
), new_var
,
7257 unshare_expr (bias
));
7258 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7259 TREE_TYPE (ref
), ref
, bias
);
7261 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7262 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7263 tree m
= create_tmp_var (ptype
);
7264 gimplify_assign (m
, new_var
, stmt_seqp
);
7266 m
= create_tmp_var (ptype
);
7267 gimplify_assign (m
, ref
, stmt_seqp
);
7269 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7270 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7271 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7272 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7273 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7274 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7275 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7277 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7278 tree decl_placeholder
7279 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7280 SET_DECL_VALUE_EXPR (placeholder
, out
);
7281 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7282 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7283 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7284 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7285 gimple_seq_add_seq (&sub_seq
,
7286 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7287 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7288 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7289 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7293 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
7294 out
= unshare_expr (out
);
7295 gimplify_assign (out
, x
, &sub_seq
);
7297 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7298 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7299 gimple_seq_add_stmt (&sub_seq
, g
);
7300 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7301 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7302 gimple_seq_add_stmt (&sub_seq
, g
);
7303 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7304 build_int_cst (TREE_TYPE (i
), 1));
7305 gimple_seq_add_stmt (&sub_seq
, g
);
7306 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7307 gimple_seq_add_stmt (&sub_seq
, g
);
7308 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7310 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7312 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7314 if (omp_is_reference (var
)
7315 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7317 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7318 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7319 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7320 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7321 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7322 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7323 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7327 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
7328 ref
= build_outer_var_ref (var
, ctx
);
7329 gimplify_assign (ref
, x
, &sub_seq
);
7333 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7335 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7337 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7341 gimple_seq_add_seq (stmt_seqp
, *clist
);
7345 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7347 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7351 /* Generate code to implement the COPYPRIVATE clauses. */
7354 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7359 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7361 tree var
, new_var
, ref
, x
;
7363 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7365 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7368 var
= OMP_CLAUSE_DECL (c
);
7369 by_ref
= use_pointer_for_field (var
, NULL
);
7371 ref
= build_sender_ref (var
, ctx
);
7372 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7375 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7376 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7378 gimplify_assign (ref
, x
, slist
);
7380 ref
= build_receiver_ref (var
, false, ctx
);
7383 ref
= fold_convert_loc (clause_loc
,
7384 build_pointer_type (TREE_TYPE (new_var
)),
7386 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7388 if (omp_is_reference (var
))
7390 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7391 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7392 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7394 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7395 gimplify_and_add (x
, rlist
);
7400 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7401 and REDUCTION from the sender (aka parent) side. */
7404 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7408 int ignored_looptemp
= 0;
7409 bool is_taskloop
= false;
7411 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7412 by GOMP_taskloop. */
7413 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7415 ignored_looptemp
= 2;
7419 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7421 tree val
, ref
, x
, var
;
7422 bool by_ref
, do_in
= false, do_out
= false;
7423 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7425 switch (OMP_CLAUSE_CODE (c
))
7427 case OMP_CLAUSE_PRIVATE
:
7428 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7431 case OMP_CLAUSE_FIRSTPRIVATE
:
7432 case OMP_CLAUSE_COPYIN
:
7433 case OMP_CLAUSE_LASTPRIVATE
:
7434 case OMP_CLAUSE_IN_REDUCTION
:
7435 case OMP_CLAUSE__REDUCTEMP_
:
7437 case OMP_CLAUSE_REDUCTION
:
7438 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7441 case OMP_CLAUSE_SHARED
:
7442 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7445 case OMP_CLAUSE__LOOPTEMP_
:
7446 if (ignored_looptemp
)
7456 val
= OMP_CLAUSE_DECL (c
);
7457 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7458 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7459 && TREE_CODE (val
) == MEM_REF
)
7461 val
= TREE_OPERAND (val
, 0);
7462 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7463 val
= TREE_OPERAND (val
, 0);
7464 if (TREE_CODE (val
) == INDIRECT_REF
7465 || TREE_CODE (val
) == ADDR_EXPR
)
7466 val
= TREE_OPERAND (val
, 0);
7467 if (is_variable_sized (val
))
7471 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7472 outer taskloop region. */
7473 omp_context
*ctx_for_o
= ctx
;
7475 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7476 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7477 ctx_for_o
= ctx
->outer
;
7479 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7481 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7482 && is_global_var (var
)
7483 && (val
== OMP_CLAUSE_DECL (c
)
7484 || !is_task_ctx (ctx
)
7485 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7486 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7487 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7488 != POINTER_TYPE
)))))
7491 t
= omp_member_access_dummy_var (var
);
7494 var
= DECL_VALUE_EXPR (var
);
7495 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7497 var
= unshare_and_remap (var
, t
, o
);
7499 var
= unshare_expr (var
);
7502 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7504 /* Handle taskloop firstprivate/lastprivate, where the
7505 lastprivate on GIMPLE_OMP_TASK is represented as
7506 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7507 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7508 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7509 if (use_pointer_for_field (val
, ctx
))
7510 var
= build_fold_addr_expr (var
);
7511 gimplify_assign (x
, var
, ilist
);
7512 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7516 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7517 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7518 || val
== OMP_CLAUSE_DECL (c
))
7519 && is_variable_sized (val
))
7521 by_ref
= use_pointer_for_field (val
, NULL
);
7523 switch (OMP_CLAUSE_CODE (c
))
7525 case OMP_CLAUSE_FIRSTPRIVATE
:
7526 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7528 && is_task_ctx (ctx
))
7529 TREE_NO_WARNING (var
) = 1;
7533 case OMP_CLAUSE_PRIVATE
:
7534 case OMP_CLAUSE_COPYIN
:
7535 case OMP_CLAUSE__LOOPTEMP_
:
7536 case OMP_CLAUSE__REDUCTEMP_
:
7540 case OMP_CLAUSE_LASTPRIVATE
:
7541 if (by_ref
|| omp_is_reference (val
))
7543 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7550 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7555 case OMP_CLAUSE_REDUCTION
:
7556 case OMP_CLAUSE_IN_REDUCTION
:
7558 if (val
== OMP_CLAUSE_DECL (c
))
7560 if (is_task_ctx (ctx
))
7561 by_ref
= use_pointer_for_field (val
, ctx
);
7563 do_out
= !(by_ref
|| omp_is_reference (val
));
7566 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7575 ref
= build_sender_ref (val
, ctx
);
7576 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7577 gimplify_assign (ref
, x
, ilist
);
7578 if (is_task_ctx (ctx
))
7579 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7584 ref
= build_sender_ref (val
, ctx
);
7585 gimplify_assign (var
, ref
, olist
);
7590 /* Generate code to implement SHARED from the sender (aka parent)
7591 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7592 list things that got automatically shared. */
7595 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7597 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7599 if (ctx
->record_type
== NULL
)
7602 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7603 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7605 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7606 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7609 nvar
= maybe_lookup_decl (ovar
, ctx
);
7611 || !DECL_HAS_VALUE_EXPR_P (nvar
)
7612 || (ctx
->allocate_map
7613 && ctx
->allocate_map
->get (ovar
)))
7616 /* If CTX is a nested parallel directive. Find the immediately
7617 enclosing parallel or workshare construct that contains a
7618 mapping for OVAR. */
7619 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7621 t
= omp_member_access_dummy_var (var
);
7624 var
= DECL_VALUE_EXPR (var
);
7625 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7627 var
= unshare_and_remap (var
, t
, o
);
7629 var
= unshare_expr (var
);
7632 if (use_pointer_for_field (ovar
, ctx
))
7634 x
= build_sender_ref (ovar
, ctx
);
7635 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7636 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7638 gcc_assert (is_parallel_ctx (ctx
)
7639 && DECL_ARTIFICIAL (ovar
));
7640 /* _condtemp_ clause. */
7641 var
= build_constructor (TREE_TYPE (x
), NULL
);
7644 var
= build_fold_addr_expr (var
);
7645 gimplify_assign (x
, var
, ilist
);
7649 x
= build_sender_ref (ovar
, ctx
);
7650 gimplify_assign (x
, var
, ilist
);
7652 if (!TREE_READONLY (var
)
7653 /* We don't need to receive a new reference to a result
7654 or parm decl. In fact we may not store to it as we will
7655 invalidate any pending RSO and generate wrong gimple
7657 && !((TREE_CODE (var
) == RESULT_DECL
7658 || TREE_CODE (var
) == PARM_DECL
)
7659 && DECL_BY_REFERENCE (var
)))
7661 x
= build_sender_ref (ovar
, ctx
);
7662 gimplify_assign (var
, x
, olist
);
7668 /* Emit an OpenACC head marker call, encapulating the partitioning and
7669 other information that must be processed by the target compiler.
7670 Return the maximum number of dimensions the associated loop might
7671 be partitioned over. */
7674 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7675 gimple_seq
*seq
, omp_context
*ctx
)
7677 unsigned levels
= 0;
7679 tree gang_static
= NULL_TREE
;
7680 auto_vec
<tree
, 5> args
;
7682 args
.quick_push (build_int_cst
7683 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7684 args
.quick_push (ddvar
);
7685 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7687 switch (OMP_CLAUSE_CODE (c
))
7689 case OMP_CLAUSE_GANG
:
7690 tag
|= OLF_DIM_GANG
;
7691 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7692 /* static:* is represented by -1, and we can ignore it, as
7693 scheduling is always static. */
7694 if (gang_static
&& integer_minus_onep (gang_static
))
7695 gang_static
= NULL_TREE
;
7699 case OMP_CLAUSE_WORKER
:
7700 tag
|= OLF_DIM_WORKER
;
7704 case OMP_CLAUSE_VECTOR
:
7705 tag
|= OLF_DIM_VECTOR
;
7709 case OMP_CLAUSE_SEQ
:
7713 case OMP_CLAUSE_AUTO
:
7717 case OMP_CLAUSE_INDEPENDENT
:
7718 tag
|= OLF_INDEPENDENT
;
7721 case OMP_CLAUSE_TILE
:
7732 if (DECL_P (gang_static
))
7733 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7734 tag
|= OLF_GANG_STATIC
;
7737 /* In a parallel region, loops are implicitly INDEPENDENT. */
7738 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7739 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
7740 tag
|= OLF_INDEPENDENT
;
7743 /* Tiling could use all 3 levels. */
7747 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7748 Ensure at least one level, or 2 for possible auto
7750 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7751 << OLF_DIM_BASE
) | OLF_SEQ
));
7753 if (levels
< 1u + maybe_auto
)
7754 levels
= 1u + maybe_auto
;
7757 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7758 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7760 args
.quick_push (gang_static
);
7762 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7763 gimple_set_location (call
, loc
);
7764 gimple_set_lhs (call
, ddvar
);
7765 gimple_seq_add_stmt (seq
, call
);
7770 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7771 partitioning level of the enclosed region. */
7774 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7775 tree tofollow
, gimple_seq
*seq
)
7777 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7778 : IFN_UNIQUE_OACC_TAIL_MARK
);
7779 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7780 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7781 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7782 marker
, ddvar
, tofollow
);
7783 gimple_set_location (call
, loc
);
7784 gimple_set_lhs (call
, ddvar
);
7785 gimple_seq_add_stmt (seq
, call
);
7788 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7789 the loop clauses, from which we extract reductions. Initialize
7793 lower_oacc_head_tail (location_t loc
, tree clauses
,
7794 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7797 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7798 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7800 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7801 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7802 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7805 for (unsigned done
= 1; count
; count
--, done
++)
7807 gimple_seq fork_seq
= NULL
;
7808 gimple_seq join_seq
= NULL
;
7810 tree place
= build_int_cst (integer_type_node
, -1);
7811 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7812 fork_kind
, ddvar
, place
);
7813 gimple_set_location (fork
, loc
);
7814 gimple_set_lhs (fork
, ddvar
);
7816 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7817 join_kind
, ddvar
, place
);
7818 gimple_set_location (join
, loc
);
7819 gimple_set_lhs (join
, ddvar
);
7821 /* Mark the beginning of this level sequence. */
7823 lower_oacc_loop_marker (loc
, ddvar
, true,
7824 build_int_cst (integer_type_node
, count
),
7826 lower_oacc_loop_marker (loc
, ddvar
, false,
7827 build_int_cst (integer_type_node
, done
),
7830 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7831 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7833 /* Append this level to head. */
7834 gimple_seq_add_seq (head
, fork_seq
);
7835 /* Prepend it to tail. */
7836 gimple_seq_add_seq (&join_seq
, *tail
);
7842 /* Mark the end of the sequence. */
7843 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7844 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7847 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7848 catch handler and return it. This prevents programs from violating the
7849 structured block semantics with throws. */
7852 maybe_catch_exception (gimple_seq body
)
7857 if (!flag_exceptions
)
7860 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7861 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7863 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7865 g
= gimple_build_eh_must_not_throw (decl
);
7866 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7869 return gimple_seq_alloc_with_stmt (g
);
7873 /* Routines to lower OMP directives into OMP-GIMPLE. */
7875 /* If ctx is a worksharing context inside of a cancellable parallel
7876 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7877 and conditional branch to parallel's cancel_label to handle
7878 cancellation in the implicit barrier. */
7881 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7884 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7885 if (gimple_omp_return_nowait_p (omp_return
))
7887 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7888 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7889 && outer
->cancellable
)
7891 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7892 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7893 tree lhs
= create_tmp_var (c_bool_type
);
7894 gimple_omp_return_set_lhs (omp_return
, lhs
);
7895 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7896 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7897 fold_convert (c_bool_type
,
7898 boolean_false_node
),
7899 outer
->cancel_label
, fallthru_label
);
7900 gimple_seq_add_stmt (body
, g
);
7901 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7903 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7907 /* Find the first task_reduction or reduction clause or return NULL
7908 if there are none. */
7911 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7912 enum omp_clause_code ccode
)
7916 clauses
= omp_find_clause (clauses
, ccode
);
7917 if (clauses
== NULL_TREE
)
7919 if (ccode
!= OMP_CLAUSE_REDUCTION
7920 || code
== OMP_TASKLOOP
7921 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7923 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7927 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7928 gimple_seq
*, gimple_seq
*);
7930 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7931 CTX is the enclosing OMP context for the current statement. */
7934 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7936 tree block
, control
;
7937 gimple_stmt_iterator tgsi
;
7938 gomp_sections
*stmt
;
7940 gbind
*new_stmt
, *bind
;
7941 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7943 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7945 push_gimplify_context ();
7951 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7952 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7953 tree rtmp
= NULL_TREE
;
7956 tree type
= build_pointer_type (pointer_sized_int_node
);
7957 tree temp
= create_tmp_var (type
);
7958 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7959 OMP_CLAUSE_DECL (c
) = temp
;
7960 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7961 gimple_omp_sections_set_clauses (stmt
, c
);
7962 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7963 gimple_omp_sections_clauses (stmt
),
7964 &ilist
, &tred_dlist
);
7966 rtmp
= make_ssa_name (type
);
7967 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7970 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7971 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7973 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7974 &ilist
, &dlist
, ctx
, NULL
);
7976 control
= create_tmp_var (unsigned_type_node
, ".section");
7977 gimple_omp_sections_set_control (stmt
, control
);
7979 new_body
= gimple_omp_body (stmt
);
7980 gimple_omp_set_body (stmt
, NULL
);
7981 tgsi
= gsi_start (new_body
);
7982 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7987 sec_start
= gsi_stmt (tgsi
);
7988 sctx
= maybe_lookup_ctx (sec_start
);
7991 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7992 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7993 GSI_CONTINUE_LINKING
);
7994 gimple_omp_set_body (sec_start
, NULL
);
7996 if (gsi_one_before_end_p (tgsi
))
7998 gimple_seq l
= NULL
;
7999 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8000 &ilist
, &l
, &clist
, ctx
);
8001 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8002 gimple_omp_section_set_last (sec_start
);
8005 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8006 GSI_CONTINUE_LINKING
);
8009 block
= make_node (BLOCK
);
8010 bind
= gimple_build_bind (NULL
, new_body
, block
);
8013 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8017 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8018 gcall
*g
= gimple_build_call (fndecl
, 0);
8019 gimple_seq_add_stmt (&olist
, g
);
8020 gimple_seq_add_seq (&olist
, clist
);
8021 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8022 g
= gimple_build_call (fndecl
, 0);
8023 gimple_seq_add_stmt (&olist
, g
);
8026 block
= make_node (BLOCK
);
8027 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8028 gsi_replace (gsi_p
, new_stmt
, true);
8030 pop_gimplify_context (new_stmt
);
8031 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8032 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8033 if (BLOCK_VARS (block
))
8034 TREE_USED (block
) = 1;
8037 gimple_seq_add_seq (&new_body
, ilist
);
8038 gimple_seq_add_stmt (&new_body
, stmt
);
8039 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8040 gimple_seq_add_stmt (&new_body
, bind
);
8042 t
= gimple_build_omp_continue (control
, control
);
8043 gimple_seq_add_stmt (&new_body
, t
);
8045 gimple_seq_add_seq (&new_body
, olist
);
8046 if (ctx
->cancellable
)
8047 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8048 gimple_seq_add_seq (&new_body
, dlist
);
8050 new_body
= maybe_catch_exception (new_body
);
8052 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8053 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8054 t
= gimple_build_omp_return (nowait
);
8055 gimple_seq_add_stmt (&new_body
, t
);
8056 gimple_seq_add_seq (&new_body
, tred_dlist
);
8057 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8060 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8062 gimple_bind_set_body (new_stmt
, new_body
);
8066 /* A subroutine of lower_omp_single. Expand the simple form of
8067 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8069 if (GOMP_single_start ())
8071 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8073 FIXME. It may be better to delay expanding the logic of this until
8074 pass_expand_omp. The expanded logic may make the job more difficult
8075 to a synchronization analysis pass. */
8078 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8080 location_t loc
= gimple_location (single_stmt
);
8081 tree tlabel
= create_artificial_label (loc
);
8082 tree flabel
= create_artificial_label (loc
);
8083 gimple
*call
, *cond
;
8086 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8087 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8088 call
= gimple_build_call (decl
, 0);
8089 gimple_call_set_lhs (call
, lhs
);
8090 gimple_seq_add_stmt (pre_p
, call
);
8092 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8093 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8096 gimple_seq_add_stmt (pre_p
, cond
);
8097 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8098 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8099 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8103 /* A subroutine of lower_omp_single. Expand the simple form of
8104 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8106 #pragma omp single copyprivate (a, b, c)
8108 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8111 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8117 GOMP_single_copy_end (©out);
8128 FIXME. It may be better to delay expanding the logic of this until
8129 pass_expand_omp. The expanded logic may make the job more difficult
8130 to a synchronization analysis pass. */
8133 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8136 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8137 gimple_seq copyin_seq
;
8138 location_t loc
= gimple_location (single_stmt
);
8140 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8142 ptr_type
= build_pointer_type (ctx
->record_type
);
8143 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8145 l0
= create_artificial_label (loc
);
8146 l1
= create_artificial_label (loc
);
8147 l2
= create_artificial_label (loc
);
8149 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8150 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8151 t
= fold_convert_loc (loc
, ptr_type
, t
);
8152 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8154 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8155 build_int_cst (ptr_type
, 0));
8156 t
= build3 (COND_EXPR
, void_type_node
, t
,
8157 build_and_jump (&l0
), build_and_jump (&l1
));
8158 gimplify_and_add (t
, pre_p
);
8160 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8162 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8165 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8168 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8169 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8170 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8171 gimplify_and_add (t
, pre_p
);
8173 t
= build_and_jump (&l2
);
8174 gimplify_and_add (t
, pre_p
);
8176 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8178 gimple_seq_add_seq (pre_p
, copyin_seq
);
8180 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8184 /* Expand code for an OpenMP single directive. */
8187 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8190 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8192 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8194 push_gimplify_context ();
8196 block
= make_node (BLOCK
);
8197 bind
= gimple_build_bind (NULL
, NULL
, block
);
8198 gsi_replace (gsi_p
, bind
, true);
8201 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8202 &bind_body
, &dlist
, ctx
, NULL
);
8203 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8205 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8207 if (ctx
->record_type
)
8208 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8210 lower_omp_single_simple (single_stmt
, &bind_body
);
8212 gimple_omp_set_body (single_stmt
, NULL
);
8214 gimple_seq_add_seq (&bind_body
, dlist
);
8216 bind_body
= maybe_catch_exception (bind_body
);
8218 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8219 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8220 gimple
*g
= gimple_build_omp_return (nowait
);
8221 gimple_seq_add_stmt (&bind_body_tail
, g
);
8222 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8223 if (ctx
->record_type
)
8225 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8226 tree clobber
= build_clobber (ctx
->record_type
);
8227 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8228 clobber
), GSI_SAME_STMT
);
8230 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8231 gimple_bind_set_body (bind
, bind_body
);
8233 pop_gimplify_context (bind
);
8235 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8236 BLOCK_VARS (block
) = ctx
->block_vars
;
8237 if (BLOCK_VARS (block
))
8238 TREE_USED (block
) = 1;
8242 /* Expand code for an OpenMP master directive. */
8245 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8247 tree block
, lab
= NULL
, x
, bfn_decl
;
8248 gimple
*stmt
= gsi_stmt (*gsi_p
);
8250 location_t loc
= gimple_location (stmt
);
8253 push_gimplify_context ();
8255 block
= make_node (BLOCK
);
8256 bind
= gimple_build_bind (NULL
, NULL
, block
);
8257 gsi_replace (gsi_p
, bind
, true);
8258 gimple_bind_add_stmt (bind
, stmt
);
8260 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8261 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8262 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
8263 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8265 gimplify_and_add (x
, &tseq
);
8266 gimple_bind_add_seq (bind
, tseq
);
8268 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8269 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8270 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8271 gimple_omp_set_body (stmt
, NULL
);
8273 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8275 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8277 pop_gimplify_context (bind
);
8279 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8280 BLOCK_VARS (block
) = ctx
->block_vars
;
8283 /* Helper function for lower_omp_task_reductions. For a specific PASS
8284 find out the current clause it should be processed, or return false
8285 if all have been processed already. */
8288 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8289 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8290 tree
*type
, tree
*next
)
8292 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8294 if (ccode
== OMP_CLAUSE_REDUCTION
8295 && code
!= OMP_TASKLOOP
8296 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8298 *decl
= OMP_CLAUSE_DECL (*c
);
8299 *type
= TREE_TYPE (*decl
);
8300 if (TREE_CODE (*decl
) == MEM_REF
)
8307 if (omp_is_reference (*decl
))
8308 *type
= TREE_TYPE (*type
);
8309 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8312 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8321 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8322 OMP_TASKGROUP only with task modifier). Register mapping of those in
8323 START sequence and reducing them and unregister them in the END sequence. */
8326 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8327 gimple_seq
*start
, gimple_seq
*end
)
8329 enum omp_clause_code ccode
8330 = (code
== OMP_TASKGROUP
8331 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8332 tree cancellable
= NULL_TREE
;
8333 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8334 if (clauses
== NULL_TREE
)
8336 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8338 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8339 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8340 && outer
->cancellable
)
8342 cancellable
= error_mark_node
;
8345 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8348 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8349 tree
*last
= &TYPE_FIELDS (record_type
);
8353 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8355 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8358 DECL_CHAIN (field
) = ifield
;
8359 last
= &DECL_CHAIN (ifield
);
8360 DECL_CONTEXT (field
) = record_type
;
8361 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8362 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8363 DECL_CONTEXT (ifield
) = record_type
;
8364 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8365 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8367 for (int pass
= 0; pass
< 2; pass
++)
8369 tree decl
, type
, next
;
8370 for (tree c
= clauses
;
8371 omp_task_reduction_iterate (pass
, code
, ccode
,
8372 &c
, &decl
, &type
, &next
); c
= next
)
8375 tree new_type
= type
;
8377 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8379 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8380 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8382 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8384 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8385 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8386 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8389 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8390 DECL_CONTEXT (field
) = record_type
;
8391 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8392 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8394 last
= &DECL_CHAIN (field
);
8396 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8398 DECL_CONTEXT (bfield
) = record_type
;
8399 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8400 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8402 last
= &DECL_CHAIN (bfield
);
8406 layout_type (record_type
);
8408 /* Build up an array which registers with the runtime all the reductions
8409 and deregisters them at the end. Format documented in libgomp/task.c. */
8410 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8411 tree avar
= create_tmp_var_raw (atype
);
8412 gimple_add_tmp_var (avar
);
8413 TREE_ADDRESSABLE (avar
) = 1;
8414 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8415 NULL_TREE
, NULL_TREE
);
8416 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8417 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8418 gimple_seq seq
= NULL
;
8419 tree sz
= fold_convert (pointer_sized_int_node
,
8420 TYPE_SIZE_UNIT (record_type
));
8422 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8423 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8424 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8425 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8426 ctx
->task_reductions
.create (1 + cnt
);
8427 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8428 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8430 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8431 gimple_seq_add_seq (start
, seq
);
8432 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8433 NULL_TREE
, NULL_TREE
);
8434 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8435 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8436 NULL_TREE
, NULL_TREE
);
8437 t
= build_int_cst (pointer_sized_int_node
,
8438 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8439 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8440 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8441 NULL_TREE
, NULL_TREE
);
8442 t
= build_int_cst (pointer_sized_int_node
, -1);
8443 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8444 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8445 NULL_TREE
, NULL_TREE
);
8446 t
= build_int_cst (pointer_sized_int_node
, 0);
8447 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8449 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8450 and for each task reduction checks a bool right after the private variable
8451 within that thread's chunk; if the bool is clear, it hasn't been
8452 initialized and thus isn't going to be reduced nor destructed, otherwise
8453 reduce and destruct it. */
8454 tree idx
= create_tmp_var (size_type_node
);
8455 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8456 tree num_thr_sz
= create_tmp_var (size_type_node
);
8457 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8458 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8459 tree lab3
= NULL_TREE
;
8461 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8463 /* For worksharing constructs, only perform it in the master thread,
8464 with the exception of cancelled implicit barriers - then only handle
8465 the current thread. */
8466 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8467 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8468 tree thr_num
= create_tmp_var (integer_type_node
);
8469 g
= gimple_build_call (t
, 0);
8470 gimple_call_set_lhs (g
, thr_num
);
8471 gimple_seq_add_stmt (end
, g
);
8475 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8476 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8477 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8478 if (code
== OMP_FOR
)
8479 c
= gimple_omp_for_clauses (ctx
->stmt
);
8480 else /* if (code == OMP_SECTIONS) */
8481 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8482 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8484 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8486 gimple_seq_add_stmt (end
, g
);
8487 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8488 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8489 gimple_seq_add_stmt (end
, g
);
8490 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8491 build_one_cst (TREE_TYPE (idx
)));
8492 gimple_seq_add_stmt (end
, g
);
8493 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8494 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8496 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8497 gimple_seq_add_stmt (end
, g
);
8498 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8500 if (code
!= OMP_PARALLEL
)
8502 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8503 tree num_thr
= create_tmp_var (integer_type_node
);
8504 g
= gimple_build_call (t
, 0);
8505 gimple_call_set_lhs (g
, num_thr
);
8506 gimple_seq_add_stmt (end
, g
);
8507 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8508 gimple_seq_add_stmt (end
, g
);
8510 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8514 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8515 OMP_CLAUSE__REDUCTEMP_
);
8516 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8517 t
= fold_convert (size_type_node
, t
);
8518 gimplify_assign (num_thr_sz
, t
, end
);
8520 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8521 NULL_TREE
, NULL_TREE
);
8522 tree data
= create_tmp_var (pointer_sized_int_node
);
8523 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8524 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8526 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8527 ptr
= create_tmp_var (build_pointer_type (record_type
));
8529 ptr
= create_tmp_var (ptr_type_node
);
8530 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8532 tree field
= TYPE_FIELDS (record_type
);
8535 field
= DECL_CHAIN (DECL_CHAIN (field
));
8536 for (int pass
= 0; pass
< 2; pass
++)
8538 tree decl
, type
, next
;
8539 for (tree c
= clauses
;
8540 omp_task_reduction_iterate (pass
, code
, ccode
,
8541 &c
, &decl
, &type
, &next
); c
= next
)
8543 tree var
= decl
, ref
;
8544 if (TREE_CODE (decl
) == MEM_REF
)
8546 var
= TREE_OPERAND (var
, 0);
8547 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8548 var
= TREE_OPERAND (var
, 0);
8550 if (TREE_CODE (var
) == ADDR_EXPR
)
8551 var
= TREE_OPERAND (var
, 0);
8552 else if (TREE_CODE (var
) == INDIRECT_REF
)
8553 var
= TREE_OPERAND (var
, 0);
8554 tree orig_var
= var
;
8555 if (is_variable_sized (var
))
8557 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8558 var
= DECL_VALUE_EXPR (var
);
8559 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8560 var
= TREE_OPERAND (var
, 0);
8561 gcc_assert (DECL_P (var
));
8563 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8564 if (orig_var
!= var
)
8565 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8566 else if (TREE_CODE (v
) == ADDR_EXPR
)
8567 t
= build_fold_addr_expr (t
);
8568 else if (TREE_CODE (v
) == INDIRECT_REF
)
8569 t
= build_fold_indirect_ref (t
);
8570 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8572 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8573 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8574 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8576 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8577 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8578 fold_convert (size_type_node
,
8579 TREE_OPERAND (decl
, 1)));
8583 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8584 if (!omp_is_reference (decl
))
8585 t
= build_fold_addr_expr (t
);
8587 t
= fold_convert (pointer_sized_int_node
, t
);
8589 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8590 gimple_seq_add_seq (start
, seq
);
8591 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8592 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8593 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8594 t
= unshare_expr (byte_position (field
));
8595 t
= fold_convert (pointer_sized_int_node
, t
);
8596 ctx
->task_reduction_map
->put (c
, cnt
);
8597 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8600 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8601 gimple_seq_add_seq (start
, seq
);
8602 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8603 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8604 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8606 tree bfield
= DECL_CHAIN (field
);
8608 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8609 /* In parallel or worksharing all threads unconditionally
8610 initialize all their task reduction private variables. */
8611 cond
= boolean_true_node
;
8612 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8614 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8615 unshare_expr (byte_position (bfield
)));
8617 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8618 gimple_seq_add_seq (end
, seq
);
8619 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8620 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8621 build_int_cst (pbool
, 0));
8624 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8625 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8626 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8627 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8628 tree condv
= create_tmp_var (boolean_type_node
);
8629 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8630 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8632 gimple_seq_add_stmt (end
, g
);
8633 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8634 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8636 /* If this reduction doesn't need destruction and parallel
8637 has been cancelled, there is nothing to do for this
8638 reduction, so jump around the merge operation. */
8639 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8640 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8641 build_zero_cst (TREE_TYPE (cancellable
)),
8643 gimple_seq_add_stmt (end
, g
);
8644 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8648 if (TREE_TYPE (ptr
) == ptr_type_node
)
8650 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8651 unshare_expr (byte_position (field
)));
8653 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8654 gimple_seq_add_seq (end
, seq
);
8655 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8656 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8657 build_int_cst (pbool
, 0));
8660 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8661 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8663 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8664 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8665 ref
= build_simple_mem_ref (ref
);
8666 /* reduction(-:var) sums up the partial results, so it acts
8667 identically to reduction(+:var). */
8668 if (rcode
== MINUS_EXPR
)
8670 if (TREE_CODE (decl
) == MEM_REF
)
8672 tree type
= TREE_TYPE (new_var
);
8673 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8674 tree i
= create_tmp_var (TREE_TYPE (v
));
8675 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8678 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8679 tree vv
= create_tmp_var (TREE_TYPE (v
));
8680 gimplify_assign (vv
, v
, start
);
8683 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8684 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8685 new_var
= build_fold_addr_expr (new_var
);
8686 new_var
= fold_convert (ptype
, new_var
);
8687 ref
= fold_convert (ptype
, ref
);
8688 tree m
= create_tmp_var (ptype
);
8689 gimplify_assign (m
, new_var
, end
);
8691 m
= create_tmp_var (ptype
);
8692 gimplify_assign (m
, ref
, end
);
8694 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8695 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8696 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8697 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8698 tree priv
= build_simple_mem_ref (new_var
);
8699 tree out
= build_simple_mem_ref (ref
);
8700 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8702 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8703 tree decl_placeholder
8704 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8705 tree lab6
= NULL_TREE
;
8708 /* If this reduction needs destruction and parallel
8709 has been cancelled, jump around the merge operation
8710 to the destruction. */
8711 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8712 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8713 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8714 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8716 gimple_seq_add_stmt (end
, g
);
8717 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8719 SET_DECL_VALUE_EXPR (placeholder
, out
);
8720 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8721 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8722 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8723 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8724 gimple_seq_add_seq (end
,
8725 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8726 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8727 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8729 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8730 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8733 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8734 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8737 gimple_seq tseq
= NULL
;
8738 gimplify_stmt (&x
, &tseq
);
8739 gimple_seq_add_seq (end
, tseq
);
8744 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8745 out
= unshare_expr (out
);
8746 gimplify_assign (out
, x
, end
);
8749 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8750 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8751 gimple_seq_add_stmt (end
, g
);
8752 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8753 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8754 gimple_seq_add_stmt (end
, g
);
8755 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8756 build_int_cst (TREE_TYPE (i
), 1));
8757 gimple_seq_add_stmt (end
, g
);
8758 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8759 gimple_seq_add_stmt (end
, g
);
8760 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8762 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8764 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8765 tree oldv
= NULL_TREE
;
8766 tree lab6
= NULL_TREE
;
8769 /* If this reduction needs destruction and parallel
8770 has been cancelled, jump around the merge operation
8771 to the destruction. */
8772 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8773 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8774 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8775 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8777 gimple_seq_add_stmt (end
, g
);
8778 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8780 if (omp_is_reference (decl
)
8781 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8783 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8784 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8785 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8786 gimplify_assign (refv
, ref
, end
);
8787 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8788 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8789 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8790 tree d
= maybe_lookup_decl (decl
, ctx
);
8792 if (DECL_HAS_VALUE_EXPR_P (d
))
8793 oldv
= DECL_VALUE_EXPR (d
);
8794 if (omp_is_reference (var
))
8796 tree v
= fold_convert (TREE_TYPE (d
),
8797 build_fold_addr_expr (new_var
));
8798 SET_DECL_VALUE_EXPR (d
, v
);
8801 SET_DECL_VALUE_EXPR (d
, new_var
);
8802 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8803 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8805 SET_DECL_VALUE_EXPR (d
, oldv
);
8808 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8809 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8811 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8812 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8813 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8814 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8816 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8817 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8820 gimple_seq tseq
= NULL
;
8821 gimplify_stmt (&x
, &tseq
);
8822 gimple_seq_add_seq (end
, tseq
);
8827 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8828 ref
= unshare_expr (ref
);
8829 gimplify_assign (ref
, x
, end
);
8831 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8833 field
= DECL_CHAIN (bfield
);
8837 if (code
== OMP_TASKGROUP
)
8839 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8840 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8841 gimple_seq_add_stmt (start
, g
);
8846 if (code
== OMP_FOR
)
8847 c
= gimple_omp_for_clauses (ctx
->stmt
);
8848 else if (code
== OMP_SECTIONS
)
8849 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8851 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8852 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8853 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8854 build_fold_addr_expr (avar
));
8855 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8858 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8859 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8861 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8862 gimple_seq_add_stmt (end
, g
);
8863 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8864 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8866 enum built_in_function bfn
8867 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8868 t
= builtin_decl_explicit (bfn
);
8869 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8873 arg
= create_tmp_var (c_bool_type
);
8874 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8878 arg
= build_int_cst (c_bool_type
, 0);
8879 g
= gimple_build_call (t
, 1, arg
);
8883 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8884 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8886 gimple_seq_add_stmt (end
, g
);
8887 t
= build_constructor (atype
, NULL
);
8888 TREE_THIS_VOLATILE (t
) = 1;
8889 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8892 /* Expand code for an OpenMP taskgroup directive. */
8895 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8897 gimple
*stmt
= gsi_stmt (*gsi_p
);
8900 gimple_seq dseq
= NULL
;
8901 tree block
= make_node (BLOCK
);
8903 bind
= gimple_build_bind (NULL
, NULL
, block
);
8904 gsi_replace (gsi_p
, bind
, true);
8905 gimple_bind_add_stmt (bind
, stmt
);
8907 push_gimplify_context ();
8909 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8911 gimple_bind_add_stmt (bind
, x
);
8913 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8914 gimple_omp_taskgroup_clauses (stmt
),
8915 gimple_bind_body_ptr (bind
), &dseq
);
8917 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8918 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8919 gimple_omp_set_body (stmt
, NULL
);
8921 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8922 gimple_bind_add_seq (bind
, dseq
);
8924 pop_gimplify_context (bind
);
8926 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8927 BLOCK_VARS (block
) = ctx
->block_vars
;
8931 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8934 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8937 struct omp_for_data fd
;
8938 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8941 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8942 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8943 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8947 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8948 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8949 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8950 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8952 /* Merge depend clauses from multiple adjacent
8953 #pragma omp ordered depend(sink:...) constructs
8954 into one #pragma omp ordered depend(sink:...), so that
8955 we can optimize them together. */
8956 gimple_stmt_iterator gsi
= *gsi_p
;
8958 while (!gsi_end_p (gsi
))
8960 gimple
*stmt
= gsi_stmt (gsi
);
8961 if (is_gimple_debug (stmt
)
8962 || gimple_code (stmt
) == GIMPLE_NOP
)
8967 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8969 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8970 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8972 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8973 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8976 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8978 gsi_remove (&gsi
, true);
8982 /* Canonicalize sink dependence clauses into one folded clause if
8985 The basic algorithm is to create a sink vector whose first
8986 element is the GCD of all the first elements, and whose remaining
8987 elements are the minimum of the subsequent columns.
8989 We ignore dependence vectors whose first element is zero because
8990 such dependencies are known to be executed by the same thread.
8992 We take into account the direction of the loop, so a minimum
8993 becomes a maximum if the loop is iterating forwards. We also
8994 ignore sink clauses where the loop direction is unknown, or where
8995 the offsets are clearly invalid because they are not a multiple
8996 of the loop increment.
9000 #pragma omp for ordered(2)
9001 for (i=0; i < N; ++i)
9002 for (j=0; j < M; ++j)
9004 #pragma omp ordered \
9005 depend(sink:i-8,j-2) \
9006 depend(sink:i,j-1) \ // Completely ignored because i+0.
9007 depend(sink:i-4,j-3) \
9008 depend(sink:i-6,j-4)
9009 #pragma omp ordered depend(source)
9014 depend(sink:-gcd(8,4,6),-min(2,3,4))
9019 /* FIXME: Computing GCD's where the first element is zero is
9020 non-trivial in the presence of collapsed loops. Do this later. */
9021 if (fd
.collapse
> 1)
9024 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9026 /* wide_int is not a POD so it must be default-constructed. */
9027 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9028 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9030 tree folded_dep
= NULL_TREE
;
9031 /* TRUE if the first dimension's offset is negative. */
9032 bool neg_offset_p
= false;
9034 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9036 while ((c
= *list_p
) != NULL
)
9038 bool remove
= false;
9040 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
9041 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9042 goto next_ordered_clause
;
9045 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9046 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9047 vec
= TREE_CHAIN (vec
), ++i
)
9049 gcc_assert (i
< len
);
9051 /* omp_extract_for_data has canonicalized the condition. */
9052 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9053 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9054 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9055 bool maybe_lexically_later
= true;
9057 /* While the committee makes up its mind, bail if we have any
9058 non-constant steps. */
9059 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9060 goto lower_omp_ordered_ret
;
9062 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9063 if (POINTER_TYPE_P (itype
))
9065 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9066 TYPE_PRECISION (itype
),
9069 /* Ignore invalid offsets that are not multiples of the step. */
9070 if (!wi::multiple_of_p (wi::abs (offset
),
9071 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9074 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9075 "ignoring sink clause with offset that is not "
9076 "a multiple of the loop step");
9078 goto next_ordered_clause
;
9081 /* Calculate the first dimension. The first dimension of
9082 the folded dependency vector is the GCD of the first
9083 elements, while ignoring any first elements whose offset
9087 /* Ignore dependence vectors whose first dimension is 0. */
9091 goto next_ordered_clause
;
9095 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9097 error_at (OMP_CLAUSE_LOCATION (c
),
9098 "first offset must be in opposite direction "
9099 "of loop iterations");
9100 goto lower_omp_ordered_ret
;
9104 neg_offset_p
= forward
;
9105 /* Initialize the first time around. */
9106 if (folded_dep
== NULL_TREE
)
9109 folded_deps
[0] = offset
;
9112 folded_deps
[0] = wi::gcd (folded_deps
[0],
9116 /* Calculate minimum for the remaining dimensions. */
9119 folded_deps
[len
+ i
- 1] = offset
;
9120 if (folded_dep
== c
)
9121 folded_deps
[i
] = offset
;
9122 else if (maybe_lexically_later
9123 && !wi::eq_p (folded_deps
[i
], offset
))
9125 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9129 for (j
= 1; j
<= i
; j
++)
9130 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9133 maybe_lexically_later
= false;
9137 gcc_assert (i
== len
);
9141 next_ordered_clause
:
9143 *list_p
= OMP_CLAUSE_CHAIN (c
);
9145 list_p
= &OMP_CLAUSE_CHAIN (c
);
9151 folded_deps
[0] = -folded_deps
[0];
9153 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9154 if (POINTER_TYPE_P (itype
))
9157 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9158 = wide_int_to_tree (itype
, folded_deps
[0]);
9159 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9160 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9163 lower_omp_ordered_ret
:
9165 /* Ordered without clauses is #pragma omp threads, while we want
9166 a nop instead if we remove all clauses. */
9167 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9168 gsi_replace (gsi_p
, gimple_build_nop (), true);
9172 /* Expand code for an OpenMP ordered directive. */
9175 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9178 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9179 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9182 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9184 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9187 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9188 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9189 OMP_CLAUSE_THREADS
);
9191 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9194 /* FIXME: This is needs to be moved to the expansion to verify various
9195 conditions only testable on cfg with dominators computed, and also
9196 all the depend clauses to be merged still might need to be available
9197 for the runtime checks. */
9199 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9203 push_gimplify_context ();
9205 block
= make_node (BLOCK
);
9206 bind
= gimple_build_bind (NULL
, NULL
, block
);
9207 gsi_replace (gsi_p
, bind
, true);
9208 gimple_bind_add_stmt (bind
, stmt
);
9212 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9213 build_int_cst (NULL_TREE
, threads
));
9214 cfun
->has_simduid_loops
= true;
9217 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9219 gimple_bind_add_stmt (bind
, x
);
9221 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9224 counter
= create_tmp_var (integer_type_node
);
9225 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9226 gimple_call_set_lhs (g
, counter
);
9227 gimple_bind_add_stmt (bind
, g
);
9229 body
= create_artificial_label (UNKNOWN_LOCATION
);
9230 test
= create_artificial_label (UNKNOWN_LOCATION
);
9231 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9233 tree simt_pred
= create_tmp_var (integer_type_node
);
9234 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9235 gimple_call_set_lhs (g
, simt_pred
);
9236 gimple_bind_add_stmt (bind
, g
);
9238 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9239 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9240 gimple_bind_add_stmt (bind
, g
);
9242 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9244 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9245 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9246 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9247 gimple_omp_set_body (stmt
, NULL
);
9251 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9252 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9253 gimple_bind_add_stmt (bind
, g
);
9255 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9256 tree nonneg
= create_tmp_var (integer_type_node
);
9257 gimple_seq tseq
= NULL
;
9258 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9259 gimple_bind_add_seq (bind
, tseq
);
9261 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9262 gimple_call_set_lhs (g
, nonneg
);
9263 gimple_bind_add_stmt (bind
, g
);
9265 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9266 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9267 gimple_bind_add_stmt (bind
, g
);
9269 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9272 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9273 build_int_cst (NULL_TREE
, threads
));
9275 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9277 gimple_bind_add_stmt (bind
, x
);
9279 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9281 pop_gimplify_context (bind
);
9283 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9284 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9288 /* Expand code for an OpenMP scan directive and the structured block
9289 before the scan directive. */
9292 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9294 gimple
*stmt
= gsi_stmt (*gsi_p
);
9296 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9297 tree lane
= NULL_TREE
;
9298 gimple_seq before
= NULL
;
9299 omp_context
*octx
= ctx
->outer
;
9301 if (octx
->scan_exclusive
&& !has_clauses
)
9303 gimple_stmt_iterator gsi2
= *gsi_p
;
9305 gimple
*stmt2
= gsi_stmt (gsi2
);
9306 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9307 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9308 the one with exclusive clause(s), comes first. */
9310 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
9311 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
9313 gsi_remove (gsi_p
, false);
9314 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
9315 ctx
= maybe_lookup_ctx (stmt2
);
9317 lower_omp_scan (gsi_p
, ctx
);
9322 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9323 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9324 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
9325 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9326 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
9327 && !gimple_omp_for_combined_p (octx
->stmt
));
9328 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
9329 if (is_for_simd
&& octx
->for_simd_scan_phase
)
9332 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
9333 OMP_CLAUSE__SIMDUID_
))
9335 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
9336 lane
= create_tmp_var (unsigned_type_node
);
9337 tree t
= build_int_cst (integer_type_node
,
9339 : octx
->scan_inclusive
? 2 : 3);
9341 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
9342 gimple_call_set_lhs (g
, lane
);
9343 gimple_seq_add_stmt (&before
, g
);
9346 if (is_simd
|| is_for
)
9348 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9349 c
; c
= OMP_CLAUSE_CHAIN (c
))
9350 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9351 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9353 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9354 tree var
= OMP_CLAUSE_DECL (c
);
9355 tree new_var
= lookup_decl (var
, octx
);
9357 tree var2
= NULL_TREE
;
9358 tree var3
= NULL_TREE
;
9359 tree var4
= NULL_TREE
;
9360 tree lane0
= NULL_TREE
;
9361 tree new_vard
= new_var
;
9362 if (omp_is_reference (var
))
9364 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9367 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9369 val
= DECL_VALUE_EXPR (new_vard
);
9370 if (new_vard
!= new_var
)
9372 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9373 val
= TREE_OPERAND (val
, 0);
9375 if (TREE_CODE (val
) == ARRAY_REF
9376 && VAR_P (TREE_OPERAND (val
, 0)))
9378 tree v
= TREE_OPERAND (val
, 0);
9379 if (lookup_attribute ("omp simd array",
9380 DECL_ATTRIBUTES (v
)))
9382 val
= unshare_expr (val
);
9383 lane0
= TREE_OPERAND (val
, 1);
9384 TREE_OPERAND (val
, 1) = lane
;
9385 var2
= lookup_decl (v
, octx
);
9386 if (octx
->scan_exclusive
)
9387 var4
= lookup_decl (var2
, octx
);
9389 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9390 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9393 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9394 var2
, lane
, NULL_TREE
, NULL_TREE
);
9395 TREE_THIS_NOTRAP (var2
) = 1;
9396 if (octx
->scan_exclusive
)
9398 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9399 var4
, lane
, NULL_TREE
,
9401 TREE_THIS_NOTRAP (var4
) = 1;
9412 var2
= build_outer_var_ref (var
, octx
);
9413 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9415 var3
= maybe_lookup_decl (new_vard
, octx
);
9416 if (var3
== new_vard
|| var3
== NULL_TREE
)
9418 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9420 var4
= maybe_lookup_decl (var3
, octx
);
9421 if (var4
== var3
|| var4
== NULL_TREE
)
9423 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9434 && octx
->scan_exclusive
9436 && var4
== NULL_TREE
)
9437 var4
= create_tmp_var (TREE_TYPE (val
));
9439 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9441 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9446 /* If we've added a separate identity element
9447 variable, copy it over into val. */
9448 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9450 gimplify_and_add (x
, &before
);
9452 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9454 /* Otherwise, assign to it the identity element. */
9455 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9457 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9458 tree ref
= build_outer_var_ref (var
, octx
);
9459 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9460 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9463 if (new_vard
!= new_var
)
9464 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9465 SET_DECL_VALUE_EXPR (new_vard
, val
);
9467 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9468 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9469 lower_omp (&tseq
, octx
);
9471 SET_DECL_VALUE_EXPR (new_vard
, x
);
9472 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9473 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9474 gimple_seq_add_seq (&before
, tseq
);
9476 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9482 if (octx
->scan_exclusive
)
9484 tree v4
= unshare_expr (var4
);
9485 tree v2
= unshare_expr (var2
);
9486 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9487 gimplify_and_add (x
, &before
);
9489 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9490 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9491 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9493 if (x
&& new_vard
!= new_var
)
9494 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9496 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9497 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9498 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9499 lower_omp (&tseq
, octx
);
9500 gimple_seq_add_seq (&before
, tseq
);
9501 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9503 SET_DECL_VALUE_EXPR (new_vard
, x
);
9504 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9505 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9506 if (octx
->scan_inclusive
)
9508 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9510 gimplify_and_add (x
, &before
);
9512 else if (lane0
== NULL_TREE
)
9514 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9516 gimplify_and_add (x
, &before
);
9524 /* input phase. Set val to initializer before
9526 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9527 gimplify_assign (val
, x
, &before
);
9532 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9533 if (code
== MINUS_EXPR
)
9536 tree x
= build2 (code
, TREE_TYPE (var2
),
9537 unshare_expr (var2
), unshare_expr (val
));
9538 if (octx
->scan_inclusive
)
9540 gimplify_assign (unshare_expr (var2
), x
, &before
);
9541 gimplify_assign (val
, var2
, &before
);
9545 gimplify_assign (unshare_expr (var4
),
9546 unshare_expr (var2
), &before
);
9547 gimplify_assign (var2
, x
, &before
);
9548 if (lane0
== NULL_TREE
)
9549 gimplify_assign (val
, var4
, &before
);
9553 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9555 tree vexpr
= unshare_expr (var4
);
9556 TREE_OPERAND (vexpr
, 1) = lane0
;
9557 if (new_vard
!= new_var
)
9558 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9559 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9563 if (is_simd
&& !is_for_simd
)
9565 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9566 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9567 gsi_replace (gsi_p
, gimple_build_nop (), true);
9570 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9573 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9574 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9579 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9580 substitution of a couple of function calls. But in the NAMED case,
9581 requires that languages coordinate a symbol name. It is therefore
9582 best put here in common code. */
9584 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9587 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9590 tree name
, lock
, unlock
;
9591 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9593 location_t loc
= gimple_location (stmt
);
9596 name
= gimple_omp_critical_name (stmt
);
9601 if (!critical_name_mutexes
)
9602 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9604 tree
*n
= critical_name_mutexes
->get (name
);
9609 decl
= create_tmp_var_raw (ptr_type_node
);
9611 new_str
= ACONCAT ((".gomp_critical_user_",
9612 IDENTIFIER_POINTER (name
), NULL
));
9613 DECL_NAME (decl
) = get_identifier (new_str
);
9614 TREE_PUBLIC (decl
) = 1;
9615 TREE_STATIC (decl
) = 1;
9616 DECL_COMMON (decl
) = 1;
9617 DECL_ARTIFICIAL (decl
) = 1;
9618 DECL_IGNORED_P (decl
) = 1;
9620 varpool_node::finalize_decl (decl
);
9622 critical_name_mutexes
->put (name
, decl
);
9627 /* If '#pragma omp critical' is inside offloaded region or
9628 inside function marked as offloadable, the symbol must be
9629 marked as offloadable too. */
9631 if (cgraph_node::get (current_function_decl
)->offloadable
)
9632 varpool_node::get_create (decl
)->offloadable
= 1;
9634 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9635 if (is_gimple_omp_offloaded (octx
->stmt
))
9637 varpool_node::get_create (decl
)->offloadable
= 1;
9641 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9642 lock
= build_call_expr_loc (loc
, lock
, 1,
9643 build_fold_addr_expr_loc (loc
, decl
));
9645 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9646 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9647 build_fold_addr_expr_loc (loc
, decl
));
9651 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9652 lock
= build_call_expr_loc (loc
, lock
, 0);
9654 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9655 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9658 push_gimplify_context ();
9660 block
= make_node (BLOCK
);
9661 bind
= gimple_build_bind (NULL
, NULL
, block
);
9662 gsi_replace (gsi_p
, bind
, true);
9663 gimple_bind_add_stmt (bind
, stmt
);
9665 tbody
= gimple_bind_body (bind
);
9666 gimplify_and_add (lock
, &tbody
);
9667 gimple_bind_set_body (bind
, tbody
);
9669 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9670 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9671 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9672 gimple_omp_set_body (stmt
, NULL
);
9674 tbody
= gimple_bind_body (bind
);
9675 gimplify_and_add (unlock
, &tbody
);
9676 gimple_bind_set_body (bind
, tbody
);
9678 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9680 pop_gimplify_context (bind
);
9681 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9682 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9685 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9686 for a lastprivate clause. Given a loop control predicate of (V
9687 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9688 is appended to *DLIST, iterator initialization is appended to
9689 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9690 to be emitted in a critical section. */
9693 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9694 gimple_seq
*dlist
, gimple_seq
*clist
,
9695 struct omp_context
*ctx
)
9697 tree clauses
, cond
, vinit
;
9698 enum tree_code cond_code
;
9701 cond_code
= fd
->loop
.cond_code
;
9702 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9704 /* When possible, use a strict equality expression. This can let VRP
9705 type optimizations deduce the value and remove a copy. */
9706 if (tree_fits_shwi_p (fd
->loop
.step
))
9708 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9709 if (step
== 1 || step
== -1)
9710 cond_code
= EQ_EXPR
;
9713 tree n2
= fd
->loop
.n2
;
9714 if (fd
->collapse
> 1
9715 && TREE_CODE (n2
) != INTEGER_CST
9716 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9718 struct omp_context
*taskreg_ctx
= NULL
;
9719 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9721 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9722 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9723 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9725 if (gimple_omp_for_combined_into_p (gfor
))
9727 gcc_assert (ctx
->outer
->outer
9728 && is_parallel_ctx (ctx
->outer
->outer
));
9729 taskreg_ctx
= ctx
->outer
->outer
;
9733 struct omp_for_data outer_fd
;
9734 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9735 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9738 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9739 taskreg_ctx
= ctx
->outer
->outer
;
9741 else if (is_taskreg_ctx (ctx
->outer
))
9742 taskreg_ctx
= ctx
->outer
;
9746 tree taskreg_clauses
9747 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9748 tree innerc
= omp_find_clause (taskreg_clauses
,
9749 OMP_CLAUSE__LOOPTEMP_
);
9750 gcc_assert (innerc
);
9751 int count
= fd
->collapse
;
9753 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
9754 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
9755 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
9757 for (i
= 0; i
< count
; i
++)
9759 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9760 OMP_CLAUSE__LOOPTEMP_
);
9761 gcc_assert (innerc
);
9763 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9764 OMP_CLAUSE__LOOPTEMP_
);
9766 n2
= fold_convert (TREE_TYPE (n2
),
9767 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9771 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9773 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9775 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9776 if (!gimple_seq_empty_p (stmts
))
9778 gimple_seq_add_seq (&stmts
, *dlist
);
9781 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9782 vinit
= fd
->loop
.n1
;
9783 if (cond_code
== EQ_EXPR
9784 && tree_fits_shwi_p (fd
->loop
.n2
)
9785 && ! integer_zerop (fd
->loop
.n2
))
9786 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9788 vinit
= unshare_expr (vinit
);
9790 /* Initialize the iterator variable, so that threads that don't execute
9791 any iterations don't execute the lastprivate clauses by accident. */
9792 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9796 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9799 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9800 struct walk_stmt_info
*wi
)
9802 gimple
*stmt
= gsi_stmt (*gsi_p
);
9804 *handled_ops_p
= true;
9805 switch (gimple_code (stmt
))
9809 case GIMPLE_OMP_FOR
:
9810 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
9811 && gimple_omp_for_combined_into_p (stmt
))
9812 *handled_ops_p
= false;
9815 case GIMPLE_OMP_SCAN
:
9816 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9817 return integer_zero_node
;
9824 /* Helper function for lower_omp_for, add transformations for a worksharing
9825 loop with scan directives inside of it.
9826 For worksharing loop not combined with simd, transform:
9827 #pragma omp for reduction(inscan,+:r) private(i)
9828 for (i = 0; i < n; i = i + 1)
9833 #pragma omp scan inclusive(r)
9839 into two worksharing loops + code to merge results:
9841 num_threads = omp_get_num_threads ();
9842 thread_num = omp_get_thread_num ();
9843 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9848 // For UDRs this is UDR init, or if ctors are needed, copy from
9849 // var3 that has been constructed to contain the neutral element.
9853 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9854 // a shared array with num_threads elements and rprivb to a local array
9855 // number of elements equal to the number of (contiguous) iterations the
9856 // current thread will perform. controlb and controlp variables are
9857 // temporaries to handle deallocation of rprivb at the end of second
9859 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9860 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9861 for (i = 0; i < n; i = i + 1)
9864 // For UDRs this is UDR init or copy from var3.
9866 // This is the input phase from user code.
9870 // For UDRs this is UDR merge.
9872 // Rather than handing it over to the user, save to local thread's
9874 rprivb[ivar] = var2;
9875 // For exclusive scan, the above two statements are swapped.
9879 // And remember the final value from this thread's into the shared
9881 rpriva[(sizetype) thread_num] = var2;
9882 // If more than one thread, compute using Work-Efficient prefix sum
9883 // the inclusive parallel scan of the rpriva array.
9884 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9889 num_threadsu = (unsigned int) num_threads;
9890 thread_numup1 = (unsigned int) thread_num + 1;
9893 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9897 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9902 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9903 mul = REALPART_EXPR <cplx>;
9904 ovf = IMAGPART_EXPR <cplx>;
9905 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9908 andvm1 = andv + 4294967295;
9910 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9912 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9913 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9914 rpriva[l] = rpriva[l - k] + rpriva[l];
9916 if (down == 0) goto <D.2121>; else goto <D.2122>;
9924 if (k != 0) goto <D.2108>; else goto <D.2103>;
9926 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9928 // For UDRs this is UDR init or copy from var3.
9932 var2 = rpriva[thread_num - 1];
9935 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9936 reduction(inscan,+:r) private(i)
9937 for (i = 0; i < n; i = i + 1)
9940 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9941 r = var2 + rprivb[ivar];
9944 // This is the scan phase from user code.
9946 // Plus a bump of the iterator.
9952 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9953 struct omp_for_data
*fd
, omp_context
*ctx
)
9955 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9956 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9958 gimple_seq body
= gimple_omp_body (stmt
);
9959 gimple_stmt_iterator input1_gsi
= gsi_none ();
9960 struct walk_stmt_info wi
;
9961 memset (&wi
, 0, sizeof (wi
));
9963 wi
.info
= (void *) &input1_gsi
;
9964 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9965 gcc_assert (!gsi_end_p (input1_gsi
));
9967 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9968 gimple_stmt_iterator gsi
= input1_gsi
;
9970 gimple_stmt_iterator scan1_gsi
= gsi
;
9971 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9972 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9974 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9975 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9976 gimple_omp_set_body (input_stmt1
, NULL
);
9977 gimple_omp_set_body (scan_stmt1
, NULL
);
9978 gimple_omp_set_body (stmt
, NULL
);
9980 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9981 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9982 gimple_omp_set_body (stmt
, body
);
9983 gimple_omp_set_body (input_stmt1
, input_body
);
9985 gimple_stmt_iterator input2_gsi
= gsi_none ();
9986 memset (&wi
, 0, sizeof (wi
));
9988 wi
.info
= (void *) &input2_gsi
;
9989 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9990 gcc_assert (!gsi_end_p (input2_gsi
));
9992 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9995 gimple_stmt_iterator scan2_gsi
= gsi
;
9996 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9997 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9998 gimple_omp_set_body (scan_stmt2
, scan_body
);
10000 gimple_stmt_iterator input3_gsi
= gsi_none ();
10001 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10002 gimple_stmt_iterator input4_gsi
= gsi_none ();
10003 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10004 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10005 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10006 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10009 memset (&wi
, 0, sizeof (wi
));
10010 wi
.val_only
= true;
10011 wi
.info
= (void *) &input3_gsi
;
10012 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10013 gcc_assert (!gsi_end_p (input3_gsi
));
10015 input_stmt3
= gsi_stmt (input3_gsi
);
10019 scan_stmt3
= gsi_stmt (gsi
);
10020 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10022 memset (&wi
, 0, sizeof (wi
));
10023 wi
.val_only
= true;
10024 wi
.info
= (void *) &input4_gsi
;
10025 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10026 gcc_assert (!gsi_end_p (input4_gsi
));
10028 input_stmt4
= gsi_stmt (input4_gsi
);
10032 scan_stmt4
= gsi_stmt (gsi
);
10033 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
10035 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
10036 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
10039 tree num_threads
= create_tmp_var (integer_type_node
);
10040 tree thread_num
= create_tmp_var (integer_type_node
);
10041 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
10042 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
10043 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
10044 gimple_call_set_lhs (g
, num_threads
);
10045 gimple_seq_add_stmt (body_p
, g
);
10046 g
= gimple_build_call (threadnum_decl
, 0);
10047 gimple_call_set_lhs (g
, thread_num
);
10048 gimple_seq_add_stmt (body_p
, g
);
10050 tree ivar
= create_tmp_var (sizetype
);
10051 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
10052 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
10053 tree k
= create_tmp_var (unsigned_type_node
);
10054 tree l
= create_tmp_var (unsigned_type_node
);
10056 gimple_seq clist
= NULL
, mdlist
= NULL
;
10057 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
10058 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
10059 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
10060 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
10061 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10062 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10063 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10065 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10066 tree var
= OMP_CLAUSE_DECL (c
);
10067 tree new_var
= lookup_decl (var
, ctx
);
10068 tree var3
= NULL_TREE
;
10069 tree new_vard
= new_var
;
10070 if (omp_is_reference (var
))
10071 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10072 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10074 var3
= maybe_lookup_decl (new_vard
, ctx
);
10075 if (var3
== new_vard
)
10079 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
10080 tree rpriva
= create_tmp_var (ptype
);
10081 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10082 OMP_CLAUSE_DECL (nc
) = rpriva
;
10084 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10086 tree rprivb
= create_tmp_var (ptype
);
10087 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10088 OMP_CLAUSE_DECL (nc
) = rprivb
;
10089 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
10091 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10093 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
10094 if (new_vard
!= new_var
)
10095 TREE_ADDRESSABLE (var2
) = 1;
10096 gimple_add_tmp_var (var2
);
10098 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
10099 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10100 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10101 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10102 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10104 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
10105 thread_num
, integer_minus_one_node
);
10106 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10107 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10108 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10109 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10110 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10112 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
10113 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10114 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10115 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10116 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10118 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
10119 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10120 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10121 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10122 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10123 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10125 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
10126 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10127 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
10128 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10130 tree var4
= is_for_simd
? new_var
: var2
;
10131 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
10134 var5
= lookup_decl (var
, input_simd_ctx
);
10135 var6
= lookup_decl (var
, scan_simd_ctx
);
10136 if (new_vard
!= new_var
)
10138 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
10139 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
10142 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10144 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10147 x
= lang_hooks
.decls
.omp_clause_default_ctor
10148 (c
, var2
, build_outer_var_ref (var
, ctx
));
10150 gimplify_and_add (x
, &clist
);
10152 x
= build_outer_var_ref (var
, ctx
);
10153 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
10155 gimplify_and_add (x
, &thr01_list
);
10157 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10158 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10161 x
= unshare_expr (var4
);
10162 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
10163 gimplify_and_add (x
, &thrn1_list
);
10164 x
= unshare_expr (var4
);
10165 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
10166 gimplify_and_add (x
, &thr02_list
);
10168 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10170 /* Otherwise, assign to it the identity element. */
10171 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10172 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10175 if (new_vard
!= new_var
)
10176 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10177 SET_DECL_VALUE_EXPR (new_vard
, val
);
10178 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10180 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
10181 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10182 lower_omp (&tseq
, ctx
);
10183 gimple_seq_add_seq (&thrn1_list
, tseq
);
10184 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10185 lower_omp (&tseq
, ctx
);
10186 gimple_seq_add_seq (&thr02_list
, tseq
);
10187 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10188 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10189 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10191 SET_DECL_VALUE_EXPR (new_vard
, y
);
10194 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10195 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10199 x
= unshare_expr (var4
);
10200 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
10201 gimplify_and_add (x
, &thrn2_list
);
10205 x
= unshare_expr (rprivb_ref
);
10206 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
10207 gimplify_and_add (x
, &scan1_list
);
10211 if (ctx
->scan_exclusive
)
10213 x
= unshare_expr (rprivb_ref
);
10214 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10215 gimplify_and_add (x
, &scan1_list
);
10218 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10219 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10220 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10221 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10222 lower_omp (&tseq
, ctx
);
10223 gimple_seq_add_seq (&scan1_list
, tseq
);
10225 if (ctx
->scan_inclusive
)
10227 x
= unshare_expr (rprivb_ref
);
10228 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10229 gimplify_and_add (x
, &scan1_list
);
10233 x
= unshare_expr (rpriva_ref
);
10234 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
10235 unshare_expr (var4
));
10236 gimplify_and_add (x
, &mdlist
);
10238 x
= unshare_expr (is_for_simd
? var6
: new_var
);
10239 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
10240 gimplify_and_add (x
, &input2_list
);
10243 if (new_vard
!= new_var
)
10244 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10246 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10247 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10248 SET_DECL_VALUE_EXPR (new_vard
, val
);
10249 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10252 SET_DECL_VALUE_EXPR (placeholder
, var6
);
10253 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10256 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10257 lower_omp (&tseq
, ctx
);
10259 SET_DECL_VALUE_EXPR (new_vard
, y
);
10262 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10263 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10267 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
10268 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10269 lower_omp (&tseq
, ctx
);
10271 gimple_seq_add_seq (&input2_list
, tseq
);
10273 x
= build_outer_var_ref (var
, ctx
);
10274 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
10275 gimplify_and_add (x
, &last_list
);
10277 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
10278 gimplify_and_add (x
, &reduc_list
);
10279 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10280 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10282 if (new_vard
!= new_var
)
10283 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10284 SET_DECL_VALUE_EXPR (new_vard
, val
);
10285 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10286 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10287 lower_omp (&tseq
, ctx
);
10288 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10289 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10290 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10292 SET_DECL_VALUE_EXPR (new_vard
, y
);
10295 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10296 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10298 gimple_seq_add_seq (&reduc_list
, tseq
);
10299 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
10300 gimplify_and_add (x
, &reduc_list
);
10302 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
10304 gimplify_and_add (x
, dlist
);
10308 x
= build_outer_var_ref (var
, ctx
);
10309 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
10311 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10312 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
10314 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
10316 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
10318 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10319 if (code
== MINUS_EXPR
)
10323 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
10326 if (ctx
->scan_exclusive
)
10327 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10329 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
10330 gimplify_assign (var2
, x
, &scan1_list
);
10331 if (ctx
->scan_inclusive
)
10332 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10336 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
10339 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
10340 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
10342 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
10345 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
10346 unshare_expr (rprival_ref
));
10347 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10351 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10352 gimple_seq_add_stmt (&scan1_list
, g
);
10353 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10354 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10355 ? scan_stmt4
: scan_stmt2
), g
);
10357 tree controlb
= create_tmp_var (boolean_type_node
);
10358 tree controlp
= create_tmp_var (ptr_type_node
);
10359 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10360 OMP_CLAUSE_DECL (nc
) = controlb
;
10361 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10363 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10364 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10365 OMP_CLAUSE_DECL (nc
) = controlp
;
10366 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10368 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10369 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10370 OMP_CLAUSE_DECL (nc
) = controlb
;
10371 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10373 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10374 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10375 OMP_CLAUSE_DECL (nc
) = controlp
;
10376 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10378 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10380 *cp1
= gimple_omp_for_clauses (stmt
);
10381 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10382 *cp2
= gimple_omp_for_clauses (new_stmt
);
10383 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10387 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10388 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10390 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
10392 gsi_remove (&input3_gsi
, true);
10393 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
10395 gsi_remove (&scan3_gsi
, true);
10396 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
10398 gsi_remove (&input4_gsi
, true);
10399 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
10401 gsi_remove (&scan4_gsi
, true);
10405 gimple_omp_set_body (scan_stmt1
, scan1_list
);
10406 gimple_omp_set_body (input_stmt2
, input2_list
);
10409 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
10411 gsi_remove (&input1_gsi
, true);
10412 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
10414 gsi_remove (&scan1_gsi
, true);
10415 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
10417 gsi_remove (&input2_gsi
, true);
10418 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
10420 gsi_remove (&scan2_gsi
, true);
10422 gimple_seq_add_seq (body_p
, clist
);
10424 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10425 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10426 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10427 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10428 gimple_seq_add_stmt (body_p
, g
);
10429 g
= gimple_build_label (lab1
);
10430 gimple_seq_add_stmt (body_p
, g
);
10431 gimple_seq_add_seq (body_p
, thr01_list
);
10432 g
= gimple_build_goto (lab3
);
10433 gimple_seq_add_stmt (body_p
, g
);
10434 g
= gimple_build_label (lab2
);
10435 gimple_seq_add_stmt (body_p
, g
);
10436 gimple_seq_add_seq (body_p
, thrn1_list
);
10437 g
= gimple_build_label (lab3
);
10438 gimple_seq_add_stmt (body_p
, g
);
10440 g
= gimple_build_assign (ivar
, size_zero_node
);
10441 gimple_seq_add_stmt (body_p
, g
);
10443 gimple_seq_add_stmt (body_p
, stmt
);
10444 gimple_seq_add_seq (body_p
, body
);
10445 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
10448 g
= gimple_build_omp_return (true);
10449 gimple_seq_add_stmt (body_p
, g
);
10450 gimple_seq_add_seq (body_p
, mdlist
);
10452 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10453 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10454 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
10455 gimple_seq_add_stmt (body_p
, g
);
10456 g
= gimple_build_label (lab1
);
10457 gimple_seq_add_stmt (body_p
, g
);
10459 g
= omp_build_barrier (NULL
);
10460 gimple_seq_add_stmt (body_p
, g
);
10462 tree down
= create_tmp_var (unsigned_type_node
);
10463 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
10464 gimple_seq_add_stmt (body_p
, g
);
10466 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
10467 gimple_seq_add_stmt (body_p
, g
);
10469 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
10470 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
10471 gimple_seq_add_stmt (body_p
, g
);
10473 tree thread_numu
= create_tmp_var (unsigned_type_node
);
10474 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
10475 gimple_seq_add_stmt (body_p
, g
);
10477 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
10478 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
10479 build_int_cst (unsigned_type_node
, 1));
10480 gimple_seq_add_stmt (body_p
, g
);
10482 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10483 g
= gimple_build_label (lab3
);
10484 gimple_seq_add_stmt (body_p
, g
);
10486 tree twok
= create_tmp_var (unsigned_type_node
);
10487 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10488 gimple_seq_add_stmt (body_p
, g
);
10490 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
10491 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
10492 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
10493 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
10494 gimple_seq_add_stmt (body_p
, g
);
10495 g
= gimple_build_label (lab4
);
10496 gimple_seq_add_stmt (body_p
, g
);
10497 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
10498 gimple_seq_add_stmt (body_p
, g
);
10499 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10500 gimple_seq_add_stmt (body_p
, g
);
10502 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
10503 gimple_seq_add_stmt (body_p
, g
);
10504 g
= gimple_build_label (lab6
);
10505 gimple_seq_add_stmt (body_p
, g
);
10507 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10508 gimple_seq_add_stmt (body_p
, g
);
10510 g
= gimple_build_label (lab5
);
10511 gimple_seq_add_stmt (body_p
, g
);
10513 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10514 gimple_seq_add_stmt (body_p
, g
);
10516 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
10517 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
10518 gimple_call_set_lhs (g
, cplx
);
10519 gimple_seq_add_stmt (body_p
, g
);
10520 tree mul
= create_tmp_var (unsigned_type_node
);
10521 g
= gimple_build_assign (mul
, REALPART_EXPR
,
10522 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
10523 gimple_seq_add_stmt (body_p
, g
);
10524 tree ovf
= create_tmp_var (unsigned_type_node
);
10525 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
10526 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
10527 gimple_seq_add_stmt (body_p
, g
);
10529 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
10530 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
10531 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
10533 gimple_seq_add_stmt (body_p
, g
);
10534 g
= gimple_build_label (lab7
);
10535 gimple_seq_add_stmt (body_p
, g
);
10537 tree andv
= create_tmp_var (unsigned_type_node
);
10538 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
10539 gimple_seq_add_stmt (body_p
, g
);
10540 tree andvm1
= create_tmp_var (unsigned_type_node
);
10541 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
10542 build_minus_one_cst (unsigned_type_node
));
10543 gimple_seq_add_stmt (body_p
, g
);
10545 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
10546 gimple_seq_add_stmt (body_p
, g
);
10548 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
10549 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
10550 gimple_seq_add_stmt (body_p
, g
);
10551 g
= gimple_build_label (lab9
);
10552 gimple_seq_add_stmt (body_p
, g
);
10553 gimple_seq_add_seq (body_p
, reduc_list
);
10554 g
= gimple_build_label (lab8
);
10555 gimple_seq_add_stmt (body_p
, g
);
10557 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10558 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10559 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10560 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10562 gimple_seq_add_stmt (body_p
, g
);
10563 g
= gimple_build_label (lab10
);
10564 gimple_seq_add_stmt (body_p
, g
);
10565 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10566 gimple_seq_add_stmt (body_p
, g
);
10567 g
= gimple_build_goto (lab12
);
10568 gimple_seq_add_stmt (body_p
, g
);
10569 g
= gimple_build_label (lab11
);
10570 gimple_seq_add_stmt (body_p
, g
);
10571 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10572 gimple_seq_add_stmt (body_p
, g
);
10573 g
= gimple_build_label (lab12
);
10574 gimple_seq_add_stmt (body_p
, g
);
10576 g
= omp_build_barrier (NULL
);
10577 gimple_seq_add_stmt (body_p
, g
);
10579 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10581 gimple_seq_add_stmt (body_p
, g
);
10583 g
= gimple_build_label (lab2
);
10584 gimple_seq_add_stmt (body_p
, g
);
10586 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10587 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10588 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10589 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10590 gimple_seq_add_stmt (body_p
, g
);
10591 g
= gimple_build_label (lab1
);
10592 gimple_seq_add_stmt (body_p
, g
);
10593 gimple_seq_add_seq (body_p
, thr02_list
);
10594 g
= gimple_build_goto (lab3
);
10595 gimple_seq_add_stmt (body_p
, g
);
10596 g
= gimple_build_label (lab2
);
10597 gimple_seq_add_stmt (body_p
, g
);
10598 gimple_seq_add_seq (body_p
, thrn2_list
);
10599 g
= gimple_build_label (lab3
);
10600 gimple_seq_add_stmt (body_p
, g
);
10602 g
= gimple_build_assign (ivar
, size_zero_node
);
10603 gimple_seq_add_stmt (body_p
, g
);
10604 gimple_seq_add_stmt (body_p
, new_stmt
);
10605 gimple_seq_add_seq (body_p
, new_body
);
10607 gimple_seq new_dlist
= NULL
;
10608 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10609 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10610 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10611 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10612 integer_minus_one_node
);
10613 gimple_seq_add_stmt (&new_dlist
, g
);
10614 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10615 gimple_seq_add_stmt (&new_dlist
, g
);
10616 g
= gimple_build_label (lab1
);
10617 gimple_seq_add_stmt (&new_dlist
, g
);
10618 gimple_seq_add_seq (&new_dlist
, last_list
);
10619 g
= gimple_build_label (lab2
);
10620 gimple_seq_add_stmt (&new_dlist
, g
);
10621 gimple_seq_add_seq (&new_dlist
, *dlist
);
10622 *dlist
= new_dlist
;
10625 /* Lower code for an OMP loop directive. */
10628 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10630 tree
*rhs_p
, block
;
10631 struct omp_for_data fd
, *fdp
= NULL
;
10632 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10634 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10635 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10636 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10639 push_gimplify_context ();
10641 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10643 block
= make_node (BLOCK
);
10644 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10645 /* Replace at gsi right away, so that 'stmt' is no member
10646 of a sequence anymore as we're going to add to a different
10648 gsi_replace (gsi_p
, new_stmt
, true);
10650 /* Move declaration of temporaries in the loop body before we make
10652 omp_for_body
= gimple_omp_body (stmt
);
10653 if (!gimple_seq_empty_p (omp_for_body
)
10654 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10657 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10658 tree vars
= gimple_bind_vars (inner_bind
);
10659 gimple_bind_append_vars (new_stmt
, vars
);
10660 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10661 keep them on the inner_bind and it's block. */
10662 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10663 if (gimple_bind_block (inner_bind
))
10664 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10667 if (gimple_omp_for_combined_into_p (stmt
))
10669 omp_extract_for_data (stmt
, &fd
, NULL
);
10672 /* We need two temporaries with fd.loop.v type (istart/iend)
10673 and then (fd.collapse - 1) temporaries with the same
10674 type for count2 ... countN-1 vars if not constant. */
10676 tree type
= fd
.iter_type
;
10677 if (fd
.collapse
> 1
10678 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10679 count
+= fd
.collapse
- 1;
10681 tree type2
= NULL_TREE
;
10683 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10684 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10685 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10687 tree clauses
= *pc
;
10688 if (fd
.collapse
> 1
10690 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
10691 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10692 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
10693 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10695 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
10696 type2
= TREE_TYPE (v
);
10702 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10703 OMP_CLAUSE__LOOPTEMP_
);
10704 if (ctx
->simt_stmt
)
10705 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10706 OMP_CLAUSE__LOOPTEMP_
);
10707 for (i
= 0; i
< count
+ count2
; i
++)
10712 gcc_assert (outerc
);
10713 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10714 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10715 OMP_CLAUSE__LOOPTEMP_
);
10719 /* If there are 2 adjacent SIMD stmts, one with _simt_
10720 clause, another without, make sure they have the same
10721 decls in _looptemp_ clauses, because the outer stmt
10722 they are combined into will look up just one inner_stmt. */
10723 if (ctx
->simt_stmt
)
10724 temp
= OMP_CLAUSE_DECL (simtc
);
10726 temp
= create_tmp_var (i
>= count
? type2
: type
);
10727 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10729 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10730 OMP_CLAUSE_DECL (*pc
) = temp
;
10731 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10732 if (ctx
->simt_stmt
)
10733 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10734 OMP_CLAUSE__LOOPTEMP_
);
10739 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10743 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10744 OMP_CLAUSE_REDUCTION
);
10745 tree rtmp
= NULL_TREE
;
10748 tree type
= build_pointer_type (pointer_sized_int_node
);
10749 tree temp
= create_tmp_var (type
);
10750 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10751 OMP_CLAUSE_DECL (c
) = temp
;
10752 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10753 gimple_omp_for_set_clauses (stmt
, c
);
10754 lower_omp_task_reductions (ctx
, OMP_FOR
,
10755 gimple_omp_for_clauses (stmt
),
10756 &tred_ilist
, &tred_dlist
);
10758 rtmp
= make_ssa_name (type
);
10759 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10762 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10765 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10767 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10768 gimple_omp_for_pre_body (stmt
));
10770 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10772 /* Lower the header expressions. At this point, we can assume that
10773 the header is of the form:
10775 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10777 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10778 using the .omp_data_s mapping, if needed. */
10779 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10781 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10782 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10784 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10785 TREE_VEC_ELT (*rhs_p
, 1)
10786 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10787 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10788 TREE_VEC_ELT (*rhs_p
, 2)
10789 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10791 else if (!is_gimple_min_invariant (*rhs_p
))
10792 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10793 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10794 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10796 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10797 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10799 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10800 TREE_VEC_ELT (*rhs_p
, 1)
10801 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10802 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10803 TREE_VEC_ELT (*rhs_p
, 2)
10804 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10806 else if (!is_gimple_min_invariant (*rhs_p
))
10807 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10808 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10809 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10811 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10812 if (!is_gimple_min_invariant (*rhs_p
))
10813 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10816 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10818 gimple_seq_add_seq (&body
, cnt_list
);
10820 /* Once lowered, extract the bounds and clauses. */
10821 omp_extract_for_data (stmt
, &fd
, NULL
);
10823 if (is_gimple_omp_oacc (ctx
->stmt
)
10824 && !ctx_in_oacc_kernels_region (ctx
))
10825 lower_oacc_head_tail (gimple_location (stmt
),
10826 gimple_omp_for_clauses (stmt
),
10827 &oacc_head
, &oacc_tail
, ctx
);
10829 /* Add OpenACC partitioning and reduction markers just before the loop. */
10831 gimple_seq_add_seq (&body
, oacc_head
);
10833 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10835 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10836 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10837 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10838 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10840 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10841 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10842 OMP_CLAUSE_LINEAR_STEP (c
)
10843 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10847 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10848 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10849 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10852 gimple_seq_add_stmt (&body
, stmt
);
10853 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10856 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10859 /* After the loop, add exit clauses. */
10860 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10864 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10865 gcall
*g
= gimple_build_call (fndecl
, 0);
10866 gimple_seq_add_stmt (&body
, g
);
10867 gimple_seq_add_seq (&body
, clist
);
10868 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10869 g
= gimple_build_call (fndecl
, 0);
10870 gimple_seq_add_stmt (&body
, g
);
10873 if (ctx
->cancellable
)
10874 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10876 gimple_seq_add_seq (&body
, dlist
);
10880 gimple_seq_add_seq (&tred_ilist
, body
);
10884 body
= maybe_catch_exception (body
);
10886 /* Region exit marker goes at the end of the loop body. */
10887 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10888 gimple_seq_add_stmt (&body
, g
);
10890 gimple_seq_add_seq (&body
, tred_dlist
);
10892 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10895 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10897 /* Add OpenACC joining and reduction markers just after the loop. */
10899 gimple_seq_add_seq (&body
, oacc_tail
);
10901 pop_gimplify_context (new_stmt
);
10903 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10904 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10905 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10906 if (BLOCK_VARS (block
))
10907 TREE_USED (block
) = 1;
10909 gimple_bind_set_body (new_stmt
, body
);
10910 gimple_omp_set_body (stmt
, NULL
);
10911 gimple_omp_for_set_pre_body (stmt
, NULL
);
10914 /* Callback for walk_stmts. Check if the current statement only contains
10915 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10918 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10919 bool *handled_ops_p
,
10920 struct walk_stmt_info
*wi
)
10922 int *info
= (int *) wi
->info
;
10923 gimple
*stmt
= gsi_stmt (*gsi_p
);
10925 *handled_ops_p
= true;
10926 switch (gimple_code (stmt
))
10932 case GIMPLE_OMP_FOR
:
10933 case GIMPLE_OMP_SECTIONS
:
10934 *info
= *info
== 0 ? 1 : -1;
10943 struct omp_taskcopy_context
10945 /* This field must be at the beginning, as we do "inheritance": Some
10946 callback functions for tree-inline.c (e.g., omp_copy_decl)
10947 receive a copy_body_data pointer that is up-casted to an
10948 omp_context pointer. */
10954 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10956 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10958 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10959 return create_tmp_var (TREE_TYPE (var
));
10965 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10967 tree name
, new_fields
= NULL
, type
, f
;
10969 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10970 name
= DECL_NAME (TYPE_NAME (orig_type
));
10971 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10972 TYPE_DECL
, name
, type
);
10973 TYPE_NAME (type
) = name
;
10975 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10977 tree new_f
= copy_node (f
);
10978 DECL_CONTEXT (new_f
) = type
;
10979 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10980 TREE_CHAIN (new_f
) = new_fields
;
10981 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10982 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10983 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10985 new_fields
= new_f
;
10986 tcctx
->cb
.decl_map
->put (f
, new_f
);
10988 TYPE_FIELDS (type
) = nreverse (new_fields
);
10989 layout_type (type
);
10993 /* Create task copyfn. */
10996 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10998 struct function
*child_cfun
;
10999 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
11000 tree record_type
, srecord_type
, bind
, list
;
11001 bool record_needs_remap
= false, srecord_needs_remap
= false;
11003 struct omp_taskcopy_context tcctx
;
11004 location_t loc
= gimple_location (task_stmt
);
11005 size_t looptempno
= 0;
11007 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
11008 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
11009 gcc_assert (child_cfun
->cfg
== NULL
);
11010 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
11012 /* Reset DECL_CONTEXT on function arguments. */
11013 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
11014 DECL_CONTEXT (t
) = child_fn
;
11016 /* Populate the function. */
11017 push_gimplify_context ();
11018 push_cfun (child_cfun
);
11020 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
11021 TREE_SIDE_EFFECTS (bind
) = 1;
11023 DECL_SAVED_TREE (child_fn
) = bind
;
11024 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
11026 /* Remap src and dst argument types if needed. */
11027 record_type
= ctx
->record_type
;
11028 srecord_type
= ctx
->srecord_type
;
11029 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
11030 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11032 record_needs_remap
= true;
11035 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
11036 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11038 srecord_needs_remap
= true;
11042 if (record_needs_remap
|| srecord_needs_remap
)
11044 memset (&tcctx
, '\0', sizeof (tcctx
));
11045 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
11046 tcctx
.cb
.dst_fn
= child_fn
;
11047 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
11048 gcc_checking_assert (tcctx
.cb
.src_node
);
11049 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
11050 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
11051 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
11052 tcctx
.cb
.eh_lp_nr
= 0;
11053 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
11054 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
11057 if (record_needs_remap
)
11058 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
11059 if (srecord_needs_remap
)
11060 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
11063 tcctx
.cb
.decl_map
= NULL
;
11065 arg
= DECL_ARGUMENTS (child_fn
);
11066 TREE_TYPE (arg
) = build_pointer_type (record_type
);
11067 sarg
= DECL_CHAIN (arg
);
11068 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
11070 /* First pass: initialize temporaries used in record_type and srecord_type
11071 sizes and field offsets. */
11072 if (tcctx
.cb
.decl_map
)
11073 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11074 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11078 decl
= OMP_CLAUSE_DECL (c
);
11079 p
= tcctx
.cb
.decl_map
->get (decl
);
11082 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11083 sf
= (tree
) n
->value
;
11084 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11085 src
= build_simple_mem_ref_loc (loc
, sarg
);
11086 src
= omp_build_component_ref (src
, sf
);
11087 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
11088 append_to_statement_list (t
, &list
);
11091 /* Second pass: copy shared var pointers and copy construct non-VLA
11092 firstprivate vars. */
11093 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11094 switch (OMP_CLAUSE_CODE (c
))
11096 splay_tree_key key
;
11097 case OMP_CLAUSE_SHARED
:
11098 decl
= OMP_CLAUSE_DECL (c
);
11099 key
= (splay_tree_key
) decl
;
11100 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
11101 key
= (splay_tree_key
) &DECL_UID (decl
);
11102 n
= splay_tree_lookup (ctx
->field_map
, key
);
11105 f
= (tree
) n
->value
;
11106 if (tcctx
.cb
.decl_map
)
11107 f
= *tcctx
.cb
.decl_map
->get (f
);
11108 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
11109 sf
= (tree
) n
->value
;
11110 if (tcctx
.cb
.decl_map
)
11111 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11112 src
= build_simple_mem_ref_loc (loc
, sarg
);
11113 src
= omp_build_component_ref (src
, sf
);
11114 dst
= build_simple_mem_ref_loc (loc
, arg
);
11115 dst
= omp_build_component_ref (dst
, f
);
11116 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11117 append_to_statement_list (t
, &list
);
11119 case OMP_CLAUSE_REDUCTION
:
11120 case OMP_CLAUSE_IN_REDUCTION
:
11121 decl
= OMP_CLAUSE_DECL (c
);
11122 if (TREE_CODE (decl
) == MEM_REF
)
11124 decl
= TREE_OPERAND (decl
, 0);
11125 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
11126 decl
= TREE_OPERAND (decl
, 0);
11127 if (TREE_CODE (decl
) == INDIRECT_REF
11128 || TREE_CODE (decl
) == ADDR_EXPR
)
11129 decl
= TREE_OPERAND (decl
, 0);
11131 key
= (splay_tree_key
) decl
;
11132 n
= splay_tree_lookup (ctx
->field_map
, key
);
11135 f
= (tree
) n
->value
;
11136 if (tcctx
.cb
.decl_map
)
11137 f
= *tcctx
.cb
.decl_map
->get (f
);
11138 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
11139 sf
= (tree
) n
->value
;
11140 if (tcctx
.cb
.decl_map
)
11141 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11142 src
= build_simple_mem_ref_loc (loc
, sarg
);
11143 src
= omp_build_component_ref (src
, sf
);
11144 if (decl
!= OMP_CLAUSE_DECL (c
)
11145 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
11146 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
11147 src
= build_simple_mem_ref_loc (loc
, src
);
11148 dst
= build_simple_mem_ref_loc (loc
, arg
);
11149 dst
= omp_build_component_ref (dst
, f
);
11150 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11151 append_to_statement_list (t
, &list
);
11153 case OMP_CLAUSE__LOOPTEMP_
:
11154 /* Fields for first two _looptemp_ clauses are initialized by
11155 GOMP_taskloop*, the rest are handled like firstprivate. */
11156 if (looptempno
< 2)
11162 case OMP_CLAUSE__REDUCTEMP_
:
11163 case OMP_CLAUSE_FIRSTPRIVATE
:
11164 decl
= OMP_CLAUSE_DECL (c
);
11165 if (is_variable_sized (decl
))
11167 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11170 f
= (tree
) n
->value
;
11171 if (tcctx
.cb
.decl_map
)
11172 f
= *tcctx
.cb
.decl_map
->get (f
);
11173 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11176 sf
= (tree
) n
->value
;
11177 if (tcctx
.cb
.decl_map
)
11178 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11179 src
= build_simple_mem_ref_loc (loc
, sarg
);
11180 src
= omp_build_component_ref (src
, sf
);
11181 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
11182 src
= build_simple_mem_ref_loc (loc
, src
);
11186 dst
= build_simple_mem_ref_loc (loc
, arg
);
11187 dst
= omp_build_component_ref (dst
, f
);
11188 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
11189 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11191 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11192 append_to_statement_list (t
, &list
);
11194 case OMP_CLAUSE_PRIVATE
:
11195 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
11197 decl
= OMP_CLAUSE_DECL (c
);
11198 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11199 f
= (tree
) n
->value
;
11200 if (tcctx
.cb
.decl_map
)
11201 f
= *tcctx
.cb
.decl_map
->get (f
);
11202 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11205 sf
= (tree
) n
->value
;
11206 if (tcctx
.cb
.decl_map
)
11207 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11208 src
= build_simple_mem_ref_loc (loc
, sarg
);
11209 src
= omp_build_component_ref (src
, sf
);
11210 if (use_pointer_for_field (decl
, NULL
))
11211 src
= build_simple_mem_ref_loc (loc
, src
);
11215 dst
= build_simple_mem_ref_loc (loc
, arg
);
11216 dst
= omp_build_component_ref (dst
, f
);
11217 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11218 append_to_statement_list (t
, &list
);
11224 /* Last pass: handle VLA firstprivates. */
11225 if (tcctx
.cb
.decl_map
)
11226 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11227 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11231 decl
= OMP_CLAUSE_DECL (c
);
11232 if (!is_variable_sized (decl
))
11234 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11237 f
= (tree
) n
->value
;
11238 f
= *tcctx
.cb
.decl_map
->get (f
);
11239 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
11240 ind
= DECL_VALUE_EXPR (decl
);
11241 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
11242 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
11243 n
= splay_tree_lookup (ctx
->sfield_map
,
11244 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11245 sf
= (tree
) n
->value
;
11246 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11247 src
= build_simple_mem_ref_loc (loc
, sarg
);
11248 src
= omp_build_component_ref (src
, sf
);
11249 src
= build_simple_mem_ref_loc (loc
, src
);
11250 dst
= build_simple_mem_ref_loc (loc
, arg
);
11251 dst
= omp_build_component_ref (dst
, f
);
11252 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11253 append_to_statement_list (t
, &list
);
11254 n
= splay_tree_lookup (ctx
->field_map
,
11255 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11256 df
= (tree
) n
->value
;
11257 df
= *tcctx
.cb
.decl_map
->get (df
);
11258 ptr
= build_simple_mem_ref_loc (loc
, arg
);
11259 ptr
= omp_build_component_ref (ptr
, df
);
11260 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
11261 build_fold_addr_expr_loc (loc
, dst
));
11262 append_to_statement_list (t
, &list
);
11265 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
11266 append_to_statement_list (t
, &list
);
11268 if (tcctx
.cb
.decl_map
)
11269 delete tcctx
.cb
.decl_map
;
11270 pop_gimplify_context (NULL
);
11271 BIND_EXPR_BODY (bind
) = list
;
11276 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
11280 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
11282 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
11283 gcc_assert (clauses
);
11284 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11285 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
11286 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11288 case OMP_CLAUSE_DEPEND_LAST
:
11289 /* Lowering already done at gimplification. */
11291 case OMP_CLAUSE_DEPEND_IN
:
11294 case OMP_CLAUSE_DEPEND_OUT
:
11295 case OMP_CLAUSE_DEPEND_INOUT
:
11298 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11301 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11304 case OMP_CLAUSE_DEPEND_SOURCE
:
11305 case OMP_CLAUSE_DEPEND_SINK
:
11308 gcc_unreachable ();
11310 if (cnt
[1] || cnt
[3])
11312 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
11313 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
11314 tree array
= create_tmp_var (type
);
11315 TREE_ADDRESSABLE (array
) = 1;
11316 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
11320 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
11321 gimple_seq_add_stmt (iseq
, g
);
11322 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
11325 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
11326 gimple_seq_add_stmt (iseq
, g
);
11327 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
11329 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
11330 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
11331 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
11332 gimple_seq_add_stmt (iseq
, g
);
11334 for (i
= 0; i
< 4; i
++)
11338 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11339 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
11343 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11345 case OMP_CLAUSE_DEPEND_IN
:
11349 case OMP_CLAUSE_DEPEND_OUT
:
11350 case OMP_CLAUSE_DEPEND_INOUT
:
11354 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11358 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11363 gcc_unreachable ();
11365 tree t
= OMP_CLAUSE_DECL (c
);
11366 t
= fold_convert (ptr_type_node
, t
);
11367 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
11368 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
11369 NULL_TREE
, NULL_TREE
);
11370 g
= gimple_build_assign (r
, t
);
11371 gimple_seq_add_stmt (iseq
, g
);
11374 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
11375 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
11376 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
11377 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
11379 tree clobber
= build_clobber (type
);
11380 g
= gimple_build_assign (array
, clobber
);
11381 gimple_seq_add_stmt (oseq
, g
);
11384 /* Lower the OpenMP parallel or task directive in the current statement
11385 in GSI_P. CTX holds context information for the directive. */
11388 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11392 gimple
*stmt
= gsi_stmt (*gsi_p
);
11393 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
11394 gimple_seq par_body
;
11395 location_t loc
= gimple_location (stmt
);
11397 clauses
= gimple_omp_taskreg_clauses (stmt
);
11398 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11399 && gimple_omp_task_taskwait_p (stmt
))
11407 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
11408 par_body
= gimple_bind_body (par_bind
);
11410 child_fn
= ctx
->cb
.dst_fn
;
11411 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11412 && !gimple_omp_parallel_combined_p (stmt
))
11414 struct walk_stmt_info wi
;
11417 memset (&wi
, 0, sizeof (wi
));
11419 wi
.val_only
= true;
11420 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
11422 gimple_omp_parallel_set_combined_p (stmt
, true);
11424 gimple_seq dep_ilist
= NULL
;
11425 gimple_seq dep_olist
= NULL
;
11426 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11427 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11429 push_gimplify_context ();
11430 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11431 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
11432 &dep_ilist
, &dep_olist
);
11435 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11436 && gimple_omp_task_taskwait_p (stmt
))
11440 gsi_replace (gsi_p
, dep_bind
, true);
11441 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11442 gimple_bind_add_stmt (dep_bind
, stmt
);
11443 gimple_bind_add_seq (dep_bind
, dep_olist
);
11444 pop_gimplify_context (dep_bind
);
11449 if (ctx
->srecord_type
)
11450 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
11452 gimple_seq tskred_ilist
= NULL
;
11453 gimple_seq tskred_olist
= NULL
;
11454 if ((is_task_ctx (ctx
)
11455 && gimple_omp_task_taskloop_p (ctx
->stmt
)
11456 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
11457 OMP_CLAUSE_REDUCTION
))
11458 || (is_parallel_ctx (ctx
)
11459 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
11460 OMP_CLAUSE__REDUCTEMP_
)))
11462 if (dep_bind
== NULL
)
11464 push_gimplify_context ();
11465 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11467 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
11469 gimple_omp_taskreg_clauses (ctx
->stmt
),
11470 &tskred_ilist
, &tskred_olist
);
11473 push_gimplify_context ();
11475 gimple_seq par_olist
= NULL
;
11476 gimple_seq par_ilist
= NULL
;
11477 gimple_seq par_rlist
= NULL
;
11478 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
11479 lower_omp (&par_body
, ctx
);
11480 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
11481 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
11483 /* Declare all the variables created by mapping and the variables
11484 declared in the scope of the parallel body. */
11485 record_vars_into (ctx
->block_vars
, child_fn
);
11486 maybe_remove_omp_member_access_dummy_vars (par_bind
);
11487 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
11489 if (ctx
->record_type
)
11492 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
11493 : ctx
->record_type
, ".omp_data_o");
11494 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11495 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11496 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
11499 gimple_seq olist
= NULL
;
11500 gimple_seq ilist
= NULL
;
11501 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
11502 lower_send_shared_vars (&ilist
, &olist
, ctx
);
11504 if (ctx
->record_type
)
11506 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
11507 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11511 /* Once all the expansions are done, sequence all the different
11512 fragments inside gimple_omp_body. */
11514 gimple_seq new_body
= NULL
;
11516 if (ctx
->record_type
)
11518 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11519 /* fixup_child_record_type might have changed receiver_decl's type. */
11520 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11521 gimple_seq_add_stmt (&new_body
,
11522 gimple_build_assign (ctx
->receiver_decl
, t
));
11525 gimple_seq_add_seq (&new_body
, par_ilist
);
11526 gimple_seq_add_seq (&new_body
, par_body
);
11527 gimple_seq_add_seq (&new_body
, par_rlist
);
11528 if (ctx
->cancellable
)
11529 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
11530 gimple_seq_add_seq (&new_body
, par_olist
);
11531 new_body
= maybe_catch_exception (new_body
);
11532 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
11533 gimple_seq_add_stmt (&new_body
,
11534 gimple_build_omp_continue (integer_zero_node
,
11535 integer_zero_node
));
11536 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11537 gimple_omp_set_body (stmt
, new_body
);
11539 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
11540 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11542 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
11543 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11544 gimple_bind_add_seq (bind
, ilist
);
11545 gimple_bind_add_stmt (bind
, stmt
);
11546 gimple_bind_add_seq (bind
, olist
);
11548 pop_gimplify_context (NULL
);
11552 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11553 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
11554 gimple_bind_add_stmt (dep_bind
, bind
);
11555 gimple_bind_add_seq (dep_bind
, tskred_olist
);
11556 gimple_bind_add_seq (dep_bind
, dep_olist
);
11557 pop_gimplify_context (dep_bind
);
11561 /* Lower the GIMPLE_OMP_TARGET in the current statement
11562 in GSI_P. CTX holds context information for the directive. */
11565 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11568 tree child_fn
, t
, c
;
11569 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11570 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11571 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11572 location_t loc
= gimple_location (stmt
);
11573 bool offloaded
, data_region
;
11574 unsigned int map_cnt
= 0;
11576 offloaded
= is_gimple_omp_offloaded (stmt
);
11577 switch (gimple_omp_target_kind (stmt
))
11579 case GF_OMP_TARGET_KIND_REGION
:
11580 case GF_OMP_TARGET_KIND_UPDATE
:
11581 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11582 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11583 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11584 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11585 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
11586 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11587 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11588 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11589 data_region
= false;
11591 case GF_OMP_TARGET_KIND_DATA
:
11592 case GF_OMP_TARGET_KIND_OACC_DATA
:
11593 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11594 data_region
= true;
11597 gcc_unreachable ();
11600 clauses
= gimple_omp_target_clauses (stmt
);
11602 gimple_seq dep_ilist
= NULL
;
11603 gimple_seq dep_olist
= NULL
;
11604 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11606 push_gimplify_context ();
11607 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11608 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11609 &dep_ilist
, &dep_olist
);
11616 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11617 tgt_body
= gimple_bind_body (tgt_bind
);
11619 else if (data_region
)
11620 tgt_body
= gimple_omp_body (stmt
);
11621 child_fn
= ctx
->cb
.dst_fn
;
11623 push_gimplify_context ();
11626 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11627 switch (OMP_CLAUSE_CODE (c
))
11633 case OMP_CLAUSE_MAP
:
11635 /* First check what we're prepared to handle in the following. */
11636 switch (OMP_CLAUSE_MAP_KIND (c
))
11638 case GOMP_MAP_ALLOC
:
11640 case GOMP_MAP_FROM
:
11641 case GOMP_MAP_TOFROM
:
11642 case GOMP_MAP_POINTER
:
11643 case GOMP_MAP_TO_PSET
:
11644 case GOMP_MAP_DELETE
:
11645 case GOMP_MAP_RELEASE
:
11646 case GOMP_MAP_ALWAYS_TO
:
11647 case GOMP_MAP_ALWAYS_FROM
:
11648 case GOMP_MAP_ALWAYS_TOFROM
:
11649 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11650 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11651 case GOMP_MAP_STRUCT
:
11652 case GOMP_MAP_ALWAYS_POINTER
:
11653 case GOMP_MAP_ATTACH
:
11654 case GOMP_MAP_DETACH
:
11656 case GOMP_MAP_IF_PRESENT
:
11657 case GOMP_MAP_FORCE_ALLOC
:
11658 case GOMP_MAP_FORCE_TO
:
11659 case GOMP_MAP_FORCE_FROM
:
11660 case GOMP_MAP_FORCE_TOFROM
:
11661 case GOMP_MAP_FORCE_PRESENT
:
11662 case GOMP_MAP_FORCE_DEVICEPTR
:
11663 case GOMP_MAP_DEVICE_RESIDENT
:
11664 case GOMP_MAP_LINK
:
11665 case GOMP_MAP_FORCE_DETACH
:
11666 gcc_assert (is_gimple_omp_oacc (stmt
));
11669 gcc_unreachable ();
11673 case OMP_CLAUSE_TO
:
11674 case OMP_CLAUSE_FROM
:
11676 var
= OMP_CLAUSE_DECL (c
);
11679 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11680 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11681 && (OMP_CLAUSE_MAP_KIND (c
)
11682 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11687 if (DECL_SIZE (var
)
11688 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11690 tree var2
= DECL_VALUE_EXPR (var
);
11691 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11692 var2
= TREE_OPERAND (var2
, 0);
11693 gcc_assert (DECL_P (var2
));
11698 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11699 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11700 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11702 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11704 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11705 && varpool_node::get_create (var
)->offloadable
)
11708 tree type
= build_pointer_type (TREE_TYPE (var
));
11709 tree new_var
= lookup_decl (var
, ctx
);
11710 x
= create_tmp_var_raw (type
, get_name (new_var
));
11711 gimple_add_tmp_var (x
);
11712 x
= build_simple_mem_ref (x
);
11713 SET_DECL_VALUE_EXPR (new_var
, x
);
11714 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11719 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11720 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
11721 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
11722 && is_omp_target (stmt
))
11724 gcc_assert (maybe_lookup_field (c
, ctx
));
11729 if (!maybe_lookup_field (var
, ctx
))
11732 /* Don't remap compute constructs' reduction variables, because the
11733 intermediate result must be local to each gang. */
11734 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11735 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11737 x
= build_receiver_ref (var
, true, ctx
);
11738 tree new_var
= lookup_decl (var
, ctx
);
11740 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11741 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11742 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11743 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11744 x
= build_simple_mem_ref (x
);
11745 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11747 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11748 if (omp_is_reference (new_var
)
11749 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
11750 || DECL_BY_REFERENCE (var
)))
11752 /* Create a local object to hold the instance
11754 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11755 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11756 tree inst
= create_tmp_var (type
, id
);
11757 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11758 x
= build_fold_addr_expr (inst
);
11760 gimplify_assign (new_var
, x
, &fplist
);
11762 else if (DECL_P (new_var
))
11764 SET_DECL_VALUE_EXPR (new_var
, x
);
11765 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11768 gcc_unreachable ();
11773 case OMP_CLAUSE_FIRSTPRIVATE
:
11774 if (is_oacc_parallel_or_serial (ctx
))
11775 goto oacc_firstprivate
;
11777 var
= OMP_CLAUSE_DECL (c
);
11778 if (!omp_is_reference (var
)
11779 && !is_gimple_reg_type (TREE_TYPE (var
)))
11781 tree new_var
= lookup_decl (var
, ctx
);
11782 if (is_variable_sized (var
))
11784 tree pvar
= DECL_VALUE_EXPR (var
);
11785 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11786 pvar
= TREE_OPERAND (pvar
, 0);
11787 gcc_assert (DECL_P (pvar
));
11788 tree new_pvar
= lookup_decl (pvar
, ctx
);
11789 x
= build_fold_indirect_ref (new_pvar
);
11790 TREE_THIS_NOTRAP (x
) = 1;
11793 x
= build_receiver_ref (var
, true, ctx
);
11794 SET_DECL_VALUE_EXPR (new_var
, x
);
11795 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11799 case OMP_CLAUSE_PRIVATE
:
11800 if (is_gimple_omp_oacc (ctx
->stmt
))
11802 var
= OMP_CLAUSE_DECL (c
);
11803 if (is_variable_sized (var
))
11805 tree new_var
= lookup_decl (var
, ctx
);
11806 tree pvar
= DECL_VALUE_EXPR (var
);
11807 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11808 pvar
= TREE_OPERAND (pvar
, 0);
11809 gcc_assert (DECL_P (pvar
));
11810 tree new_pvar
= lookup_decl (pvar
, ctx
);
11811 x
= build_fold_indirect_ref (new_pvar
);
11812 TREE_THIS_NOTRAP (x
) = 1;
11813 SET_DECL_VALUE_EXPR (new_var
, x
);
11814 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11818 case OMP_CLAUSE_USE_DEVICE_PTR
:
11819 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11820 case OMP_CLAUSE_IS_DEVICE_PTR
:
11821 var
= OMP_CLAUSE_DECL (c
);
11823 if (is_variable_sized (var
))
11825 tree new_var
= lookup_decl (var
, ctx
);
11826 tree pvar
= DECL_VALUE_EXPR (var
);
11827 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11828 pvar
= TREE_OPERAND (pvar
, 0);
11829 gcc_assert (DECL_P (pvar
));
11830 tree new_pvar
= lookup_decl (pvar
, ctx
);
11831 x
= build_fold_indirect_ref (new_pvar
);
11832 TREE_THIS_NOTRAP (x
) = 1;
11833 SET_DECL_VALUE_EXPR (new_var
, x
);
11834 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11836 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
11837 && !omp_is_reference (var
)
11838 && !omp_is_allocatable_or_ptr (var
)
11839 && !lang_hooks
.decls
.omp_array_data (var
, true))
11840 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11842 tree new_var
= lookup_decl (var
, ctx
);
11843 tree type
= build_pointer_type (TREE_TYPE (var
));
11844 x
= create_tmp_var_raw (type
, get_name (new_var
));
11845 gimple_add_tmp_var (x
);
11846 x
= build_simple_mem_ref (x
);
11847 SET_DECL_VALUE_EXPR (new_var
, x
);
11848 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11852 tree new_var
= lookup_decl (var
, ctx
);
11853 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11854 gimple_add_tmp_var (x
);
11855 SET_DECL_VALUE_EXPR (new_var
, x
);
11856 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11863 target_nesting_level
++;
11864 lower_omp (&tgt_body
, ctx
);
11865 target_nesting_level
--;
11867 else if (data_region
)
11868 lower_omp (&tgt_body
, ctx
);
11872 /* Declare all the variables created by mapping and the variables
11873 declared in the scope of the target body. */
11874 record_vars_into (ctx
->block_vars
, child_fn
);
11875 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11876 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11881 if (ctx
->record_type
)
11884 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11885 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11886 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11887 t
= make_tree_vec (3);
11888 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11889 TREE_VEC_ELT (t
, 1)
11890 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11891 ".omp_data_sizes");
11892 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11893 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11894 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11895 tree tkind_type
= short_unsigned_type_node
;
11896 int talign_shift
= 8;
11897 TREE_VEC_ELT (t
, 2)
11898 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11899 ".omp_data_kinds");
11900 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11901 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11902 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11903 gimple_omp_target_set_data_arg (stmt
, t
);
11905 vec
<constructor_elt
, va_gc
> *vsize
;
11906 vec
<constructor_elt
, va_gc
> *vkind
;
11907 vec_alloc (vsize
, map_cnt
);
11908 vec_alloc (vkind
, map_cnt
);
11909 unsigned int map_idx
= 0;
11911 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11912 switch (OMP_CLAUSE_CODE (c
))
11914 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11915 unsigned int talign
;
11920 case OMP_CLAUSE_MAP
:
11921 case OMP_CLAUSE_TO
:
11922 case OMP_CLAUSE_FROM
:
11923 oacc_firstprivate_map
:
11925 ovar
= OMP_CLAUSE_DECL (c
);
11926 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11927 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11928 || (OMP_CLAUSE_MAP_KIND (c
)
11929 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11931 if (!DECL_P (ovar
))
11933 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11934 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11936 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11937 == get_base_address (ovar
));
11938 nc
= OMP_CLAUSE_CHAIN (c
);
11939 ovar
= OMP_CLAUSE_DECL (nc
);
11943 tree x
= build_sender_ref (ovar
, ctx
);
11945 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11946 gimplify_assign (x
, v
, &ilist
);
11952 if (DECL_SIZE (ovar
)
11953 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11955 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11956 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11957 ovar2
= TREE_OPERAND (ovar2
, 0);
11958 gcc_assert (DECL_P (ovar2
));
11961 if (!maybe_lookup_field (ovar
, ctx
)
11962 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11963 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
11964 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
11968 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11969 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11970 talign
= DECL_ALIGN_UNIT (ovar
);
11973 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11974 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
11975 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
11976 && is_omp_target (stmt
))
11978 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11979 x
= build_sender_ref (c
, ctx
);
11980 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
11984 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11985 x
= build_sender_ref (ovar
, ctx
);
11987 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11988 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11989 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11990 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11992 gcc_assert (offloaded
);
11994 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11995 mark_addressable (avar
);
11996 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11997 talign
= DECL_ALIGN_UNIT (avar
);
11998 avar
= build_fold_addr_expr (avar
);
11999 gimplify_assign (x
, avar
, &ilist
);
12001 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12003 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12004 if (!omp_is_reference (var
))
12006 if (is_gimple_reg (var
)
12007 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12008 TREE_NO_WARNING (var
) = 1;
12009 var
= build_fold_addr_expr (var
);
12012 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12013 gimplify_assign (x
, var
, &ilist
);
12015 else if (is_gimple_reg (var
))
12017 gcc_assert (offloaded
);
12018 tree avar
= create_tmp_var (TREE_TYPE (var
));
12019 mark_addressable (avar
);
12020 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
12021 if (GOMP_MAP_COPY_TO_P (map_kind
)
12022 || map_kind
== GOMP_MAP_POINTER
12023 || map_kind
== GOMP_MAP_TO_PSET
12024 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
12026 /* If we need to initialize a temporary
12027 with VAR because it is not addressable, and
12028 the variable hasn't been initialized yet, then
12029 we'll get a warning for the store to avar.
12030 Don't warn in that case, the mapping might
12032 TREE_NO_WARNING (var
) = 1;
12033 gimplify_assign (avar
, var
, &ilist
);
12035 avar
= build_fold_addr_expr (avar
);
12036 gimplify_assign (x
, avar
, &ilist
);
12037 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
12038 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
12039 && !TYPE_READONLY (TREE_TYPE (var
)))
12041 x
= unshare_expr (x
);
12042 x
= build_simple_mem_ref (x
);
12043 gimplify_assign (var
, x
, &olist
);
12048 /* While MAP is handled explicitly by the FE,
12049 for 'target update', only the identified is passed. */
12050 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
12051 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
12052 && (omp_is_allocatable_or_ptr (var
)
12053 && omp_check_optional_argument (var
, false)))
12054 var
= build_fold_indirect_ref (var
);
12055 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
12056 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
12057 || (!omp_is_allocatable_or_ptr (var
)
12058 && !omp_check_optional_argument (var
, false)))
12059 var
= build_fold_addr_expr (var
);
12060 gimplify_assign (x
, var
, &ilist
);
12064 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12066 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
12067 s
= TREE_TYPE (ovar
);
12068 if (TREE_CODE (s
) == REFERENCE_TYPE
12069 || omp_check_optional_argument (ovar
, false))
12071 s
= TYPE_SIZE_UNIT (s
);
12074 s
= OMP_CLAUSE_SIZE (c
);
12075 if (s
== NULL_TREE
)
12076 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
12077 s
= fold_convert (size_type_node
, s
);
12078 purpose
= size_int (map_idx
++);
12079 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12080 if (TREE_CODE (s
) != INTEGER_CST
)
12081 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
12083 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
12084 switch (OMP_CLAUSE_CODE (c
))
12086 case OMP_CLAUSE_MAP
:
12087 tkind
= OMP_CLAUSE_MAP_KIND (c
);
12088 tkind_zero
= tkind
;
12089 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
12092 case GOMP_MAP_ALLOC
:
12093 case GOMP_MAP_IF_PRESENT
:
12095 case GOMP_MAP_FROM
:
12096 case GOMP_MAP_TOFROM
:
12097 case GOMP_MAP_ALWAYS_TO
:
12098 case GOMP_MAP_ALWAYS_FROM
:
12099 case GOMP_MAP_ALWAYS_TOFROM
:
12100 case GOMP_MAP_RELEASE
:
12101 case GOMP_MAP_FORCE_TO
:
12102 case GOMP_MAP_FORCE_FROM
:
12103 case GOMP_MAP_FORCE_TOFROM
:
12104 case GOMP_MAP_FORCE_PRESENT
:
12105 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
12107 case GOMP_MAP_DELETE
:
12108 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
12112 if (tkind_zero
!= tkind
)
12114 if (integer_zerop (s
))
12115 tkind
= tkind_zero
;
12116 else if (integer_nonzerop (s
))
12117 tkind_zero
= tkind
;
12120 case OMP_CLAUSE_FIRSTPRIVATE
:
12121 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
12122 tkind
= GOMP_MAP_TO
;
12123 tkind_zero
= tkind
;
12125 case OMP_CLAUSE_TO
:
12126 tkind
= GOMP_MAP_TO
;
12127 tkind_zero
= tkind
;
12129 case OMP_CLAUSE_FROM
:
12130 tkind
= GOMP_MAP_FROM
;
12131 tkind_zero
= tkind
;
12134 gcc_unreachable ();
12136 gcc_checking_assert (tkind
12137 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12138 gcc_checking_assert (tkind_zero
12139 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12140 talign
= ceil_log2 (talign
);
12141 tkind
|= talign
<< talign_shift
;
12142 tkind_zero
|= talign
<< talign_shift
;
12143 gcc_checking_assert (tkind
12144 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12145 gcc_checking_assert (tkind_zero
12146 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12147 if (tkind
== tkind_zero
)
12148 x
= build_int_cstu (tkind_type
, tkind
);
12151 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
12152 x
= build3 (COND_EXPR
, tkind_type
,
12153 fold_build2 (EQ_EXPR
, boolean_type_node
,
12154 unshare_expr (s
), size_zero_node
),
12155 build_int_cstu (tkind_type
, tkind_zero
),
12156 build_int_cstu (tkind_type
, tkind
));
12158 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
12163 case OMP_CLAUSE_FIRSTPRIVATE
:
12164 if (is_oacc_parallel_or_serial (ctx
))
12165 goto oacc_firstprivate_map
;
12166 ovar
= OMP_CLAUSE_DECL (c
);
12167 if (omp_is_reference (ovar
))
12168 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12170 talign
= DECL_ALIGN_UNIT (ovar
);
12171 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
12172 x
= build_sender_ref (ovar
, ctx
);
12173 tkind
= GOMP_MAP_FIRSTPRIVATE
;
12174 type
= TREE_TYPE (ovar
);
12175 if (omp_is_reference (ovar
))
12176 type
= TREE_TYPE (type
);
12177 if ((INTEGRAL_TYPE_P (type
)
12178 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12179 || TREE_CODE (type
) == POINTER_TYPE
)
12181 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
12183 if (omp_is_reference (var
))
12184 t
= build_simple_mem_ref (var
);
12185 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12186 TREE_NO_WARNING (var
) = 1;
12187 if (TREE_CODE (type
) != POINTER_TYPE
)
12188 t
= fold_convert (pointer_sized_int_node
, t
);
12189 t
= fold_convert (TREE_TYPE (x
), t
);
12190 gimplify_assign (x
, t
, &ilist
);
12192 else if (omp_is_reference (var
))
12193 gimplify_assign (x
, var
, &ilist
);
12194 else if (is_gimple_reg (var
))
12196 tree avar
= create_tmp_var (TREE_TYPE (var
));
12197 mark_addressable (avar
);
12198 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12199 TREE_NO_WARNING (var
) = 1;
12200 gimplify_assign (avar
, var
, &ilist
);
12201 avar
= build_fold_addr_expr (avar
);
12202 gimplify_assign (x
, avar
, &ilist
);
12206 var
= build_fold_addr_expr (var
);
12207 gimplify_assign (x
, var
, &ilist
);
12209 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
12211 else if (omp_is_reference (ovar
))
12212 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12214 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
12215 s
= fold_convert (size_type_node
, s
);
12216 purpose
= size_int (map_idx
++);
12217 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12218 if (TREE_CODE (s
) != INTEGER_CST
)
12219 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
12221 gcc_checking_assert (tkind
12222 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12223 talign
= ceil_log2 (talign
);
12224 tkind
|= talign
<< talign_shift
;
12225 gcc_checking_assert (tkind
12226 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12227 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12228 build_int_cstu (tkind_type
, tkind
));
12231 case OMP_CLAUSE_USE_DEVICE_PTR
:
12232 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12233 case OMP_CLAUSE_IS_DEVICE_PTR
:
12234 ovar
= OMP_CLAUSE_DECL (c
);
12235 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
12237 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12239 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
12240 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
12241 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
12243 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12245 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
12246 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
12250 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
12251 x
= build_sender_ref (ovar
, ctx
);
12254 if (is_gimple_omp_oacc (ctx
->stmt
))
12256 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
12258 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
12259 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
12262 type
= TREE_TYPE (ovar
);
12263 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12264 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
12265 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12266 && !omp_is_reference (ovar
)
12267 && !omp_is_allocatable_or_ptr (ovar
))
12268 || TREE_CODE (type
) == ARRAY_TYPE
)
12269 var
= build_fold_addr_expr (var
);
12272 if (omp_is_reference (ovar
)
12273 || omp_check_optional_argument (ovar
, false)
12274 || omp_is_allocatable_or_ptr (ovar
))
12276 type
= TREE_TYPE (type
);
12277 if (TREE_CODE (type
) != ARRAY_TYPE
12278 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12279 && !omp_is_allocatable_or_ptr (ovar
))
12280 || (omp_is_reference (ovar
)
12281 && omp_is_allocatable_or_ptr (ovar
))))
12282 var
= build_simple_mem_ref (var
);
12283 var
= fold_convert (TREE_TYPE (x
), var
);
12287 present
= omp_check_optional_argument (ovar
, true);
12290 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12291 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12292 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12293 tree new_x
= unshare_expr (x
);
12294 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
12296 gcond
*cond
= gimple_build_cond_from_tree (present
,
12299 gimple_seq_add_stmt (&ilist
, cond
);
12300 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
12301 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
12302 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
12303 gimple_seq_add_stmt (&ilist
,
12304 gimple_build_label (notnull_label
));
12305 gimplify_assign (x
, var
, &ilist
);
12306 gimple_seq_add_stmt (&ilist
,
12307 gimple_build_label (opt_arg_label
));
12310 gimplify_assign (x
, var
, &ilist
);
12312 purpose
= size_int (map_idx
++);
12313 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12314 gcc_checking_assert (tkind
12315 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12316 gcc_checking_assert (tkind
12317 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12318 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12319 build_int_cstu (tkind_type
, tkind
));
12323 gcc_assert (map_idx
== map_cnt
);
12325 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
12326 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
12327 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
12328 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
12329 for (int i
= 1; i
<= 2; i
++)
12330 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
12332 gimple_seq initlist
= NULL
;
12333 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
12334 TREE_VEC_ELT (t
, i
)),
12335 &initlist
, true, NULL_TREE
);
12336 gimple_seq_add_seq (&ilist
, initlist
);
12338 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
12339 gimple_seq_add_stmt (&olist
,
12340 gimple_build_assign (TREE_VEC_ELT (t
, i
),
12344 tree clobber
= build_clobber (ctx
->record_type
);
12345 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12349 /* Once all the expansions are done, sequence all the different
12350 fragments inside gimple_omp_body. */
12355 && ctx
->record_type
)
12357 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12358 /* fixup_child_record_type might have changed receiver_decl's type. */
12359 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12360 gimple_seq_add_stmt (&new_body
,
12361 gimple_build_assign (ctx
->receiver_decl
, t
));
12363 gimple_seq_add_seq (&new_body
, fplist
);
12365 if (offloaded
|| data_region
)
12367 tree prev
= NULL_TREE
;
12368 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12369 switch (OMP_CLAUSE_CODE (c
))
12374 case OMP_CLAUSE_FIRSTPRIVATE
:
12375 if (is_gimple_omp_oacc (ctx
->stmt
))
12377 var
= OMP_CLAUSE_DECL (c
);
12378 if (omp_is_reference (var
)
12379 || is_gimple_reg_type (TREE_TYPE (var
)))
12381 tree new_var
= lookup_decl (var
, ctx
);
12383 type
= TREE_TYPE (var
);
12384 if (omp_is_reference (var
))
12385 type
= TREE_TYPE (type
);
12386 if ((INTEGRAL_TYPE_P (type
)
12387 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12388 || TREE_CODE (type
) == POINTER_TYPE
)
12390 x
= build_receiver_ref (var
, false, ctx
);
12391 if (TREE_CODE (type
) != POINTER_TYPE
)
12392 x
= fold_convert (pointer_sized_int_node
, x
);
12393 x
= fold_convert (type
, x
);
12394 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12396 if (omp_is_reference (var
))
12398 tree v
= create_tmp_var_raw (type
, get_name (var
));
12399 gimple_add_tmp_var (v
);
12400 TREE_ADDRESSABLE (v
) = 1;
12401 gimple_seq_add_stmt (&new_body
,
12402 gimple_build_assign (v
, x
));
12403 x
= build_fold_addr_expr (v
);
12405 gimple_seq_add_stmt (&new_body
,
12406 gimple_build_assign (new_var
, x
));
12410 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
12411 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12413 gimple_seq_add_stmt (&new_body
,
12414 gimple_build_assign (new_var
, x
));
12417 else if (is_variable_sized (var
))
12419 tree pvar
= DECL_VALUE_EXPR (var
);
12420 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12421 pvar
= TREE_OPERAND (pvar
, 0);
12422 gcc_assert (DECL_P (pvar
));
12423 tree new_var
= lookup_decl (pvar
, ctx
);
12424 x
= build_receiver_ref (var
, false, ctx
);
12425 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12426 gimple_seq_add_stmt (&new_body
,
12427 gimple_build_assign (new_var
, x
));
12430 case OMP_CLAUSE_PRIVATE
:
12431 if (is_gimple_omp_oacc (ctx
->stmt
))
12433 var
= OMP_CLAUSE_DECL (c
);
12434 if (omp_is_reference (var
))
12436 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12437 tree new_var
= lookup_decl (var
, ctx
);
12438 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12439 if (TREE_CONSTANT (x
))
12441 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
12443 gimple_add_tmp_var (x
);
12444 TREE_ADDRESSABLE (x
) = 1;
12445 x
= build_fold_addr_expr_loc (clause_loc
, x
);
12450 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12451 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12452 gimple_seq_add_stmt (&new_body
,
12453 gimple_build_assign (new_var
, x
));
12456 case OMP_CLAUSE_USE_DEVICE_PTR
:
12457 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12458 case OMP_CLAUSE_IS_DEVICE_PTR
:
12460 gimple_seq assign_body
;
12461 bool is_array_data
;
12462 bool do_optional_check
;
12463 assign_body
= NULL
;
12464 do_optional_check
= false;
12465 var
= OMP_CLAUSE_DECL (c
);
12466 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
12468 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12469 x
= build_sender_ref (is_array_data
12470 ? (splay_tree_key
) &DECL_NAME (var
)
12471 : (splay_tree_key
) &DECL_UID (var
), ctx
);
12473 x
= build_receiver_ref (var
, false, ctx
);
12477 bool is_ref
= omp_is_reference (var
);
12478 do_optional_check
= true;
12479 /* First, we copy the descriptor data from the host; then
12480 we update its data to point to the target address. */
12481 new_var
= lookup_decl (var
, ctx
);
12482 new_var
= DECL_VALUE_EXPR (new_var
);
12487 var
= build_fold_indirect_ref (var
);
12488 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
12490 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
12491 gimple_add_tmp_var (v
);
12492 TREE_ADDRESSABLE (v
) = 1;
12493 gimple_seq_add_stmt (&assign_body
,
12494 gimple_build_assign (v
, var
));
12495 tree rhs
= build_fold_addr_expr (v
);
12496 gimple_seq_add_stmt (&assign_body
,
12497 gimple_build_assign (new_var
, rhs
));
12500 gimple_seq_add_stmt (&assign_body
,
12501 gimple_build_assign (new_var
, var
));
12503 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
12505 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12506 gimple_seq_add_stmt (&assign_body
,
12507 gimple_build_assign (v2
, x
));
12509 else if (is_variable_sized (var
))
12511 tree pvar
= DECL_VALUE_EXPR (var
);
12512 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12513 pvar
= TREE_OPERAND (pvar
, 0);
12514 gcc_assert (DECL_P (pvar
));
12515 new_var
= lookup_decl (pvar
, ctx
);
12516 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12517 gimple_seq_add_stmt (&assign_body
,
12518 gimple_build_assign (new_var
, x
));
12520 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12521 && !omp_is_reference (var
)
12522 && !omp_is_allocatable_or_ptr (var
))
12523 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12525 new_var
= lookup_decl (var
, ctx
);
12526 new_var
= DECL_VALUE_EXPR (new_var
);
12527 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
12528 new_var
= TREE_OPERAND (new_var
, 0);
12529 gcc_assert (DECL_P (new_var
));
12530 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12531 gimple_seq_add_stmt (&assign_body
,
12532 gimple_build_assign (new_var
, x
));
12536 tree type
= TREE_TYPE (var
);
12537 new_var
= lookup_decl (var
, ctx
);
12538 if (omp_is_reference (var
))
12540 type
= TREE_TYPE (type
);
12541 if (TREE_CODE (type
) != ARRAY_TYPE
12542 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12543 || (omp_is_reference (var
)
12544 && omp_is_allocatable_or_ptr (var
))))
12546 tree v
= create_tmp_var_raw (type
, get_name (var
));
12547 gimple_add_tmp_var (v
);
12548 TREE_ADDRESSABLE (v
) = 1;
12549 x
= fold_convert (type
, x
);
12550 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
12552 gimple_seq_add_stmt (&assign_body
,
12553 gimple_build_assign (v
, x
));
12554 x
= build_fold_addr_expr (v
);
12555 do_optional_check
= true;
12558 new_var
= DECL_VALUE_EXPR (new_var
);
12559 x
= fold_convert (TREE_TYPE (new_var
), x
);
12560 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12561 gimple_seq_add_stmt (&assign_body
,
12562 gimple_build_assign (new_var
, x
));
12565 present
= (do_optional_check
12566 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
12570 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12571 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12572 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12573 glabel
*null_glabel
= gimple_build_label (null_label
);
12574 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
12575 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
12576 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12578 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
12580 gcond
*cond
= gimple_build_cond_from_tree (present
,
12583 gimple_seq_add_stmt (&new_body
, cond
);
12584 gimple_seq_add_stmt (&new_body
, null_glabel
);
12585 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
12586 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
12587 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
12588 gimple_seq_add_seq (&new_body
, assign_body
);
12589 gimple_seq_add_stmt (&new_body
,
12590 gimple_build_label (opt_arg_label
));
12593 gimple_seq_add_seq (&new_body
, assign_body
);
12596 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12597 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12598 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12599 or references to VLAs. */
12600 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12601 switch (OMP_CLAUSE_CODE (c
))
12606 case OMP_CLAUSE_MAP
:
12607 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12608 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12610 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12611 poly_int64 offset
= 0;
12613 var
= OMP_CLAUSE_DECL (c
);
12615 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
12616 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
12618 && varpool_node::get_create (var
)->offloadable
)
12620 if (TREE_CODE (var
) == INDIRECT_REF
12621 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
12622 var
= TREE_OPERAND (var
, 0);
12623 if (TREE_CODE (var
) == COMPONENT_REF
)
12625 var
= get_addr_base_and_unit_offset (var
, &offset
);
12626 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
12628 else if (DECL_SIZE (var
)
12629 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12631 tree var2
= DECL_VALUE_EXPR (var
);
12632 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12633 var2
= TREE_OPERAND (var2
, 0);
12634 gcc_assert (DECL_P (var2
));
12637 tree new_var
= lookup_decl (var
, ctx
), x
;
12638 tree type
= TREE_TYPE (new_var
);
12640 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
12641 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12644 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
12646 new_var
= build2 (MEM_REF
, type
,
12647 build_fold_addr_expr (new_var
),
12648 build_int_cst (build_pointer_type (type
),
12651 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
12653 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
12654 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
12655 new_var
= build2 (MEM_REF
, type
,
12656 build_fold_addr_expr (new_var
),
12657 build_int_cst (build_pointer_type (type
),
12661 is_ref
= omp_is_reference (var
);
12662 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12664 bool ref_to_array
= false;
12667 type
= TREE_TYPE (type
);
12668 if (TREE_CODE (type
) == ARRAY_TYPE
)
12670 type
= build_pointer_type (type
);
12671 ref_to_array
= true;
12674 else if (TREE_CODE (type
) == ARRAY_TYPE
)
12676 tree decl2
= DECL_VALUE_EXPR (new_var
);
12677 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
12678 decl2
= TREE_OPERAND (decl2
, 0);
12679 gcc_assert (DECL_P (decl2
));
12681 type
= TREE_TYPE (new_var
);
12683 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
12684 x
= fold_convert_loc (clause_loc
, type
, x
);
12685 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
12687 tree bias
= OMP_CLAUSE_SIZE (c
);
12689 bias
= lookup_decl (bias
, ctx
);
12690 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
12691 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
12693 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
12694 TREE_TYPE (x
), x
, bias
);
12697 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12698 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12699 if (is_ref
&& !ref_to_array
)
12701 tree t
= create_tmp_var_raw (type
, get_name (var
));
12702 gimple_add_tmp_var (t
);
12703 TREE_ADDRESSABLE (t
) = 1;
12704 gimple_seq_add_stmt (&new_body
,
12705 gimple_build_assign (t
, x
));
12706 x
= build_fold_addr_expr_loc (clause_loc
, t
);
12708 gimple_seq_add_stmt (&new_body
,
12709 gimple_build_assign (new_var
, x
));
12712 else if (OMP_CLAUSE_CHAIN (c
)
12713 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
12715 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12716 == GOMP_MAP_FIRSTPRIVATE_POINTER
12717 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12718 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12721 case OMP_CLAUSE_PRIVATE
:
12722 var
= OMP_CLAUSE_DECL (c
);
12723 if (is_variable_sized (var
))
12725 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12726 tree new_var
= lookup_decl (var
, ctx
);
12727 tree pvar
= DECL_VALUE_EXPR (var
);
12728 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12729 pvar
= TREE_OPERAND (pvar
, 0);
12730 gcc_assert (DECL_P (pvar
));
12731 tree new_pvar
= lookup_decl (pvar
, ctx
);
12732 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12733 tree al
= size_int (DECL_ALIGN (var
));
12734 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
12735 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12736 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
12737 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12738 gimple_seq_add_stmt (&new_body
,
12739 gimple_build_assign (new_pvar
, x
));
12741 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12743 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12744 tree new_var
= lookup_decl (var
, ctx
);
12745 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12746 if (TREE_CONSTANT (x
))
12751 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12752 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12753 tree al
= size_int (TYPE_ALIGN (rtype
));
12754 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12757 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12758 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12759 gimple_seq_add_stmt (&new_body
,
12760 gimple_build_assign (new_var
, x
));
12765 gimple_seq fork_seq
= NULL
;
12766 gimple_seq join_seq
= NULL
;
12768 if (is_oacc_parallel_or_serial (ctx
))
12770 /* If there are reductions on the offloaded region itself, treat
12771 them as a dummy GANG loop. */
12772 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12774 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12775 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12778 gimple_seq_add_seq (&new_body
, fork_seq
);
12779 gimple_seq_add_seq (&new_body
, tgt_body
);
12780 gimple_seq_add_seq (&new_body
, join_seq
);
12783 new_body
= maybe_catch_exception (new_body
);
12785 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12786 gimple_omp_set_body (stmt
, new_body
);
12789 bind
= gimple_build_bind (NULL
, NULL
,
12790 tgt_bind
? gimple_bind_block (tgt_bind
)
12792 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12793 gimple_bind_add_seq (bind
, ilist
);
12794 gimple_bind_add_stmt (bind
, stmt
);
12795 gimple_bind_add_seq (bind
, olist
);
12797 pop_gimplify_context (NULL
);
12801 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12802 gimple_bind_add_stmt (dep_bind
, bind
);
12803 gimple_bind_add_seq (dep_bind
, dep_olist
);
12804 pop_gimplify_context (dep_bind
);
12808 /* Expand code for an OpenMP teams directive. */
12811 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12813 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12814 push_gimplify_context ();
12816 tree block
= make_node (BLOCK
);
12817 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12818 gsi_replace (gsi_p
, bind
, true);
12819 gimple_seq bind_body
= NULL
;
12820 gimple_seq dlist
= NULL
;
12821 gimple_seq olist
= NULL
;
12823 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12824 OMP_CLAUSE_NUM_TEAMS
);
12825 if (num_teams
== NULL_TREE
)
12826 num_teams
= build_int_cst (unsigned_type_node
, 0);
12829 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12830 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12831 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12833 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12834 OMP_CLAUSE_THREAD_LIMIT
);
12835 if (thread_limit
== NULL_TREE
)
12836 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12839 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12840 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12841 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12845 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12846 &bind_body
, &dlist
, ctx
, NULL
);
12847 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12848 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12850 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12852 location_t loc
= gimple_location (teams_stmt
);
12853 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12854 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12855 gimple_set_location (call
, loc
);
12856 gimple_seq_add_stmt (&bind_body
, call
);
12858 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12859 gimple_omp_set_body (teams_stmt
, NULL
);
12860 gimple_seq_add_seq (&bind_body
, olist
);
12861 gimple_seq_add_seq (&bind_body
, dlist
);
12862 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12863 gimple_bind_set_body (bind
, bind_body
);
12865 pop_gimplify_context (bind
);
12867 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12868 BLOCK_VARS (block
) = ctx
->block_vars
;
12869 if (BLOCK_VARS (block
))
12870 TREE_USED (block
) = 1;
12873 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12874 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12875 of OMP context, but with task_shared_vars set. */
12878 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12883 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12884 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12887 if (task_shared_vars
12889 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12892 /* If a global variable has been privatized, TREE_CONSTANT on
12893 ADDR_EXPR might be wrong. */
12894 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12895 recompute_tree_invariant_for_addr_expr (t
);
12897 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12901 /* Data to be communicated between lower_omp_regimplify_operands and
12902 lower_omp_regimplify_operands_p. */
12904 struct lower_omp_regimplify_operands_data
12910 /* Helper function for lower_omp_regimplify_operands. Find
12911 omp_member_access_dummy_var vars and adjust temporarily their
12912 DECL_VALUE_EXPRs if needed. */
12915 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12918 tree t
= omp_member_access_dummy_var (*tp
);
12921 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12922 lower_omp_regimplify_operands_data
*ldata
12923 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12924 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12927 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12928 ldata
->decls
->safe_push (*tp
);
12929 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12930 SET_DECL_VALUE_EXPR (*tp
, v
);
12933 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12937 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12938 of omp_member_access_dummy_var vars during regimplification. */
12941 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12942 gimple_stmt_iterator
*gsi_p
)
12944 auto_vec
<tree
, 10> decls
;
12947 struct walk_stmt_info wi
;
12948 memset (&wi
, '\0', sizeof (wi
));
12949 struct lower_omp_regimplify_operands_data data
;
12951 data
.decls
= &decls
;
12953 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12955 gimple_regimplify_operands (stmt
, gsi_p
);
12956 while (!decls
.is_empty ())
12958 tree t
= decls
.pop ();
12959 tree v
= decls
.pop ();
12960 SET_DECL_VALUE_EXPR (t
, v
);
12965 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12967 gimple
*stmt
= gsi_stmt (*gsi_p
);
12968 struct walk_stmt_info wi
;
12971 if (gimple_has_location (stmt
))
12972 input_location
= gimple_location (stmt
);
12974 if (task_shared_vars
)
12975 memset (&wi
, '\0', sizeof (wi
));
12977 /* If we have issued syntax errors, avoid doing any heavy lifting.
12978 Just replace the OMP directives with a NOP to avoid
12979 confusing RTL expansion. */
12980 if (seen_error () && is_gimple_omp (stmt
))
12982 gsi_replace (gsi_p
, gimple_build_nop (), true);
12986 switch (gimple_code (stmt
))
12990 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12991 if ((ctx
|| task_shared_vars
)
12992 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12993 lower_omp_regimplify_p
,
12994 ctx
? NULL
: &wi
, NULL
)
12995 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12996 lower_omp_regimplify_p
,
12997 ctx
? NULL
: &wi
, NULL
)))
12998 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
13002 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
13004 case GIMPLE_EH_FILTER
:
13005 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
13008 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
13009 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
13011 case GIMPLE_TRANSACTION
:
13012 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
13016 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
13017 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
13019 case GIMPLE_OMP_PARALLEL
:
13020 case GIMPLE_OMP_TASK
:
13021 ctx
= maybe_lookup_ctx (stmt
);
13023 if (ctx
->cancellable
)
13024 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
13025 lower_omp_taskreg (gsi_p
, ctx
);
13027 case GIMPLE_OMP_FOR
:
13028 ctx
= maybe_lookup_ctx (stmt
);
13030 if (ctx
->cancellable
)
13031 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
13032 lower_omp_for (gsi_p
, ctx
);
13034 case GIMPLE_OMP_SECTIONS
:
13035 ctx
= maybe_lookup_ctx (stmt
);
13037 if (ctx
->cancellable
)
13038 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
13039 lower_omp_sections (gsi_p
, ctx
);
13041 case GIMPLE_OMP_SINGLE
:
13042 ctx
= maybe_lookup_ctx (stmt
);
13044 lower_omp_single (gsi_p
, ctx
);
13046 case GIMPLE_OMP_MASTER
:
13047 ctx
= maybe_lookup_ctx (stmt
);
13049 lower_omp_master (gsi_p
, ctx
);
13051 case GIMPLE_OMP_TASKGROUP
:
13052 ctx
= maybe_lookup_ctx (stmt
);
13054 lower_omp_taskgroup (gsi_p
, ctx
);
13056 case GIMPLE_OMP_ORDERED
:
13057 ctx
= maybe_lookup_ctx (stmt
);
13059 lower_omp_ordered (gsi_p
, ctx
);
13061 case GIMPLE_OMP_SCAN
:
13062 ctx
= maybe_lookup_ctx (stmt
);
13064 lower_omp_scan (gsi_p
, ctx
);
13066 case GIMPLE_OMP_CRITICAL
:
13067 ctx
= maybe_lookup_ctx (stmt
);
13069 lower_omp_critical (gsi_p
, ctx
);
13071 case GIMPLE_OMP_ATOMIC_LOAD
:
13072 if ((ctx
|| task_shared_vars
)
13073 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13074 as_a
<gomp_atomic_load
*> (stmt
)),
13075 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
13076 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
13078 case GIMPLE_OMP_TARGET
:
13079 ctx
= maybe_lookup_ctx (stmt
);
13081 lower_omp_target (gsi_p
, ctx
);
13083 case GIMPLE_OMP_TEAMS
:
13084 ctx
= maybe_lookup_ctx (stmt
);
13086 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
13087 lower_omp_taskreg (gsi_p
, ctx
);
13089 lower_omp_teams (gsi_p
, ctx
);
13093 call_stmt
= as_a
<gcall
*> (stmt
);
13094 fndecl
= gimple_call_fndecl (call_stmt
);
13096 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
13097 switch (DECL_FUNCTION_CODE (fndecl
))
13099 case BUILT_IN_GOMP_BARRIER
:
13103 case BUILT_IN_GOMP_CANCEL
:
13104 case BUILT_IN_GOMP_CANCELLATION_POINT
:
13107 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
13108 cctx
= cctx
->outer
;
13109 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
13110 if (!cctx
->cancellable
)
13112 if (DECL_FUNCTION_CODE (fndecl
)
13113 == BUILT_IN_GOMP_CANCELLATION_POINT
)
13115 stmt
= gimple_build_nop ();
13116 gsi_replace (gsi_p
, stmt
, false);
13120 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
13122 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
13123 gimple_call_set_fndecl (call_stmt
, fndecl
);
13124 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
13127 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
13128 gimple_call_set_lhs (call_stmt
, lhs
);
13129 tree fallthru_label
;
13130 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
13132 g
= gimple_build_label (fallthru_label
);
13133 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13134 g
= gimple_build_cond (NE_EXPR
, lhs
,
13135 fold_convert (TREE_TYPE (lhs
),
13136 boolean_false_node
),
13137 cctx
->cancel_label
, fallthru_label
);
13138 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13145 case GIMPLE_ASSIGN
:
13146 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
13148 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
13149 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
13150 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
13151 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
13152 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
13153 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
13154 && (gimple_omp_target_kind (up
->stmt
)
13155 == GF_OMP_TARGET_KIND_DATA
)))
13157 else if (!up
->lastprivate_conditional_map
)
13159 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
13160 if (TREE_CODE (lhs
) == MEM_REF
13161 && DECL_P (TREE_OPERAND (lhs
, 0))
13162 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
13163 0))) == REFERENCE_TYPE
)
13164 lhs
= TREE_OPERAND (lhs
, 0);
13166 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
13169 if (up
->combined_into_simd_safelen1
)
13172 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
13175 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
13176 clauses
= gimple_omp_for_clauses (up
->stmt
);
13178 clauses
= gimple_omp_sections_clauses (up
->stmt
);
13179 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
13180 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
13181 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
13182 OMP_CLAUSE__CONDTEMP_
);
13183 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
13184 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
13185 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13192 if ((ctx
|| task_shared_vars
)
13193 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
13196 /* Just remove clobbers, this should happen only if we have
13197 "privatized" local addressable variables in SIMD regions,
13198 the clobber isn't needed in that case and gimplifying address
13199 of the ARRAY_REF into a pointer and creating MEM_REF based
13200 clobber would create worse code than we get with the clobber
13202 if (gimple_clobber_p (stmt
))
13204 gsi_replace (gsi_p
, gimple_build_nop (), true);
13207 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
13214 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
13216 location_t saved_location
= input_location
;
13217 gimple_stmt_iterator gsi
;
13218 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
13219 lower_omp_1 (&gsi
, ctx
);
13220 /* During gimplification, we haven't folded statments inside offloading
13221 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13222 if (target_nesting_level
|| taskreg_nesting_level
)
13223 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
13225 input_location
= saved_location
;
13228 /* Main entry point. */
13230 static unsigned int
13231 execute_lower_omp (void)
13237 /* This pass always runs, to provide PROP_gimple_lomp.
13238 But often, there is nothing to do. */
13239 if (flag_openacc
== 0 && flag_openmp
== 0
13240 && flag_openmp_simd
== 0)
13243 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
13244 delete_omp_context
);
13246 body
= gimple_body (current_function_decl
);
13248 scan_omp (&body
, NULL
);
13249 gcc_assert (taskreg_nesting_level
== 0);
13250 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
13251 finish_taskreg_scan (ctx
);
13252 taskreg_contexts
.release ();
13254 if (all_contexts
->root
)
13256 if (task_shared_vars
)
13257 push_gimplify_context ();
13258 lower_omp (&body
, NULL
);
13259 if (task_shared_vars
)
13260 pop_gimplify_context (NULL
);
13265 splay_tree_delete (all_contexts
);
13266 all_contexts
= NULL
;
13268 BITMAP_FREE (task_shared_vars
);
13269 BITMAP_FREE (global_nonaddressable_vars
);
13271 /* If current function is a method, remove artificial dummy VAR_DECL created
13272 for non-static data member privatization, they aren't needed for
13273 debuginfo nor anything else, have been already replaced everywhere in the
13274 IL and cause problems with LTO. */
13275 if (DECL_ARGUMENTS (current_function_decl
)
13276 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
13277 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
13279 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
13285 const pass_data pass_data_lower_omp
=
13287 GIMPLE_PASS
, /* type */
13288 "omplower", /* name */
13289 OPTGROUP_OMP
, /* optinfo_flags */
13290 TV_NONE
, /* tv_id */
13291 PROP_gimple_any
, /* properties_required */
13292 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
13293 0, /* properties_destroyed */
13294 0, /* todo_flags_start */
13295 0, /* todo_flags_finish */
13298 class pass_lower_omp
: public gimple_opt_pass
13301 pass_lower_omp (gcc::context
*ctxt
)
13302 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
13305 /* opt_pass methods: */
13306 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
13308 }; // class pass_lower_omp
13310 } // anon namespace
13313 make_pass_lower_omp (gcc::context
*ctxt
)
13315 return new pass_lower_omp (ctxt
);
13318 /* The following is a utility to diagnose structured block violations.
13319 It is not part of the "omplower" pass, as that's invoked too late. It
13320 should be invoked by the respective front ends after gimplification. */
13322 static splay_tree all_labels
;
13324 /* Check for mismatched contexts and generate an error if needed. Return
13325 true if an error is detected. */
13328 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
13329 gimple
*branch_ctx
, gimple
*label_ctx
)
13331 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
13332 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
13334 if (label_ctx
== branch_ctx
)
13337 const char* kind
= NULL
;
13341 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
13342 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
13344 gcc_checking_assert (kind
== NULL
);
13350 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
13354 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13355 so we could traverse it and issue a correct "exit" or "enter" error
13356 message upon a structured block violation.
13358 We built the context by building a list with tree_cons'ing, but there is
13359 no easy counterpart in gimple tuples. It seems like far too much work
13360 for issuing exit/enter error messages. If someone really misses the
13361 distinct error message... patches welcome. */
13364 /* Try to avoid confusing the user by producing and error message
13365 with correct "exit" or "enter" verbiage. We prefer "exit"
13366 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13367 if (branch_ctx
== NULL
)
13373 if (TREE_VALUE (label_ctx
) == branch_ctx
)
13378 label_ctx
= TREE_CHAIN (label_ctx
);
13383 error ("invalid exit from %s structured block", kind
);
13385 error ("invalid entry to %s structured block", kind
);
13388 /* If it's obvious we have an invalid entry, be specific about the error. */
13389 if (branch_ctx
== NULL
)
13390 error ("invalid entry to %s structured block", kind
);
13393 /* Otherwise, be vague and lazy, but efficient. */
13394 error ("invalid branch to/from %s structured block", kind
);
13397 gsi_replace (gsi_p
, gimple_build_nop (), false);
13401 /* Pass 1: Create a minimal tree of structured blocks, and record
13402 where each label is found. */
13405 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13406 struct walk_stmt_info
*wi
)
13408 gimple
*context
= (gimple
*) wi
->info
;
13409 gimple
*inner_context
;
13410 gimple
*stmt
= gsi_stmt (*gsi_p
);
13412 *handled_ops_p
= true;
13414 switch (gimple_code (stmt
))
13418 case GIMPLE_OMP_PARALLEL
:
13419 case GIMPLE_OMP_TASK
:
13420 case GIMPLE_OMP_SECTIONS
:
13421 case GIMPLE_OMP_SINGLE
:
13422 case GIMPLE_OMP_SECTION
:
13423 case GIMPLE_OMP_MASTER
:
13424 case GIMPLE_OMP_ORDERED
:
13425 case GIMPLE_OMP_SCAN
:
13426 case GIMPLE_OMP_CRITICAL
:
13427 case GIMPLE_OMP_TARGET
:
13428 case GIMPLE_OMP_TEAMS
:
13429 case GIMPLE_OMP_TASKGROUP
:
13430 /* The minimal context here is just the current OMP construct. */
13431 inner_context
= stmt
;
13432 wi
->info
= inner_context
;
13433 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13434 wi
->info
= context
;
13437 case GIMPLE_OMP_FOR
:
13438 inner_context
= stmt
;
13439 wi
->info
= inner_context
;
13440 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13442 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13443 diagnose_sb_1
, NULL
, wi
);
13444 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13445 wi
->info
= context
;
13449 splay_tree_insert (all_labels
,
13450 (splay_tree_key
) gimple_label_label (
13451 as_a
<glabel
*> (stmt
)),
13452 (splay_tree_value
) context
);
13462 /* Pass 2: Check each branch and see if its context differs from that of
13463 the destination label's context. */
13466 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13467 struct walk_stmt_info
*wi
)
13469 gimple
*context
= (gimple
*) wi
->info
;
13471 gimple
*stmt
= gsi_stmt (*gsi_p
);
13473 *handled_ops_p
= true;
13475 switch (gimple_code (stmt
))
13479 case GIMPLE_OMP_PARALLEL
:
13480 case GIMPLE_OMP_TASK
:
13481 case GIMPLE_OMP_SECTIONS
:
13482 case GIMPLE_OMP_SINGLE
:
13483 case GIMPLE_OMP_SECTION
:
13484 case GIMPLE_OMP_MASTER
:
13485 case GIMPLE_OMP_ORDERED
:
13486 case GIMPLE_OMP_SCAN
:
13487 case GIMPLE_OMP_CRITICAL
:
13488 case GIMPLE_OMP_TARGET
:
13489 case GIMPLE_OMP_TEAMS
:
13490 case GIMPLE_OMP_TASKGROUP
:
13492 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13493 wi
->info
= context
;
13496 case GIMPLE_OMP_FOR
:
13498 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13500 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
13501 diagnose_sb_2
, NULL
, wi
);
13502 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13503 wi
->info
= context
;
13508 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
13509 tree lab
= gimple_cond_true_label (cond_stmt
);
13512 n
= splay_tree_lookup (all_labels
,
13513 (splay_tree_key
) lab
);
13514 diagnose_sb_0 (gsi_p
, context
,
13515 n
? (gimple
*) n
->value
: NULL
);
13517 lab
= gimple_cond_false_label (cond_stmt
);
13520 n
= splay_tree_lookup (all_labels
,
13521 (splay_tree_key
) lab
);
13522 diagnose_sb_0 (gsi_p
, context
,
13523 n
? (gimple
*) n
->value
: NULL
);
13530 tree lab
= gimple_goto_dest (stmt
);
13531 if (TREE_CODE (lab
) != LABEL_DECL
)
13534 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13535 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
13539 case GIMPLE_SWITCH
:
13541 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
13543 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
13545 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
13546 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13547 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
13553 case GIMPLE_RETURN
:
13554 diagnose_sb_0 (gsi_p
, context
, NULL
);
13564 static unsigned int
13565 diagnose_omp_structured_block_errors (void)
13567 struct walk_stmt_info wi
;
13568 gimple_seq body
= gimple_body (current_function_decl
);
13570 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
13572 memset (&wi
, 0, sizeof (wi
));
13573 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
13575 memset (&wi
, 0, sizeof (wi
));
13576 wi
.want_locations
= true;
13577 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
13579 gimple_set_body (current_function_decl
, body
);
13581 splay_tree_delete (all_labels
);
13589 const pass_data pass_data_diagnose_omp_blocks
=
13591 GIMPLE_PASS
, /* type */
13592 "*diagnose_omp_blocks", /* name */
13593 OPTGROUP_OMP
, /* optinfo_flags */
13594 TV_NONE
, /* tv_id */
13595 PROP_gimple_any
, /* properties_required */
13596 0, /* properties_provided */
13597 0, /* properties_destroyed */
13598 0, /* todo_flags_start */
13599 0, /* todo_flags_finish */
13602 class pass_diagnose_omp_blocks
: public gimple_opt_pass
13605 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13606 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
13609 /* opt_pass methods: */
13610 virtual bool gate (function
*)
13612 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
13614 virtual unsigned int execute (function
*)
13616 return diagnose_omp_structured_block_errors ();
13619 }; // class pass_diagnose_omp_blocks
13621 } // anon namespace
13624 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13626 return new pass_diagnose_omp_blocks (ctxt
);
13630 #include "gt-omp-low.h"