d: Force TYPE_MODE of classes and non-POD structs as BLKmode
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
69
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
73
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
76
77 struct omp_context
78 {
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
84
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
88
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
95
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
103
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
107
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
111
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
115
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
120
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
124
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
128
129 /* And a hash map from the allocate variables to their corresponding
130 allocators. */
131 hash_map<tree, tree> *allocate_map;
132
133 /* A tree_list of the reduction clauses in this context. This is
134 only used for checking the consistency of OpenACC reduction
135 clauses in scan_omp_for and is not guaranteed to contain a valid
136 value outside of this function. */
137 tree local_reduction_clauses;
138
139 /* A tree_list of the reduction clauses in outer contexts. This is
140 only used for checking the consistency of OpenACC reduction
141 clauses in scan_omp_for and is not guaranteed to contain a valid
142 value outside of this function. */
143 tree outer_reduction_clauses;
144
145 /* Nesting depth of this context. Used to beautify error messages re
146 invalid gotos. The outermost ctx is depth 1, with depth 0 being
147 reserved for the main body of the function. */
148 int depth;
149
150 /* True if this parallel directive is nested within another. */
151 bool is_nested;
152
153 /* True if this construct can be cancelled. */
154 bool cancellable;
155
156 /* True if lower_omp_1 should look up lastprivate conditional in parent
157 context. */
158 bool combined_into_simd_safelen1;
159
160 /* True if there is nested scan context with inclusive clause. */
161 bool scan_inclusive;
162
163 /* True if there is nested scan context with exclusive clause. */
164 bool scan_exclusive;
165
166 /* True in the second simd loop of for simd with inscan reductions. */
167 bool for_simd_scan_phase;
168
169 /* True if there is order(concurrent) clause on the construct. */
170 bool order_concurrent;
171
172 /* True if there is bind clause on the construct (i.e. a loop construct). */
173 bool loop_p;
174 };
175
176 static splay_tree all_contexts;
177 static int taskreg_nesting_level;
178 static int target_nesting_level;
179 static bitmap task_shared_vars;
180 static bitmap global_nonaddressable_vars;
181 static vec<omp_context *> taskreg_contexts;
182
183 static void scan_omp (gimple_seq *, omp_context *);
184 static tree scan_omp_1_op (tree *, int *, void *);
185
186 #define WALK_SUBSTMTS \
187 case GIMPLE_BIND: \
188 case GIMPLE_TRY: \
189 case GIMPLE_CATCH: \
190 case GIMPLE_EH_FILTER: \
191 case GIMPLE_TRANSACTION: \
192 /* The sub-statements for these should be walked. */ \
193 *handled_ops_p = false; \
194 break;
195
196 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
197 (This doesn't include OpenACC 'kernels' decomposed parts.) */
198
199 static bool
200 is_oacc_parallel_or_serial (omp_context *ctx)
201 {
202 enum gimple_code outer_type = gimple_code (ctx->stmt);
203 return ((outer_type == GIMPLE_OMP_TARGET)
204 && ((gimple_omp_target_kind (ctx->stmt)
205 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
206 || (gimple_omp_target_kind (ctx->stmt)
207 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
208 }
209
210 /* Return whether CTX represents an OpenACC 'kernels' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212
213 static bool
214 is_oacc_kernels (omp_context *ctx)
215 {
216 enum gimple_code outer_type = gimple_code (ctx->stmt);
217 return ((outer_type == GIMPLE_OMP_TARGET)
218 && (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_KERNELS));
220 }
221
222 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
223
224 static bool
225 is_oacc_kernels_decomposed_part (omp_context *ctx)
226 {
227 enum gimple_code outer_type = gimple_code (ctx->stmt);
228 return ((outer_type == GIMPLE_OMP_TARGET)
229 && ((gimple_omp_target_kind (ctx->stmt)
230 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
231 || (gimple_omp_target_kind (ctx->stmt)
232 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
233 || (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
235 }
236
237 /* Return true if STMT corresponds to an OpenMP target region. */
238 static bool
239 is_omp_target (gimple *stmt)
240 {
241 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
242 {
243 int kind = gimple_omp_target_kind (stmt);
244 return (kind == GF_OMP_TARGET_KIND_REGION
245 || kind == GF_OMP_TARGET_KIND_DATA
246 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
247 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
248 }
249 return false;
250 }
251
252 /* If DECL is the artificial dummy VAR_DECL created for non-static
253 data member privatization, return the underlying "this" parameter,
254 otherwise return NULL. */
255
256 tree
257 omp_member_access_dummy_var (tree decl)
258 {
259 if (!VAR_P (decl)
260 || !DECL_ARTIFICIAL (decl)
261 || !DECL_IGNORED_P (decl)
262 || !DECL_HAS_VALUE_EXPR_P (decl)
263 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
264 return NULL_TREE;
265
266 tree v = DECL_VALUE_EXPR (decl);
267 if (TREE_CODE (v) != COMPONENT_REF)
268 return NULL_TREE;
269
270 while (1)
271 switch (TREE_CODE (v))
272 {
273 case COMPONENT_REF:
274 case MEM_REF:
275 case INDIRECT_REF:
276 CASE_CONVERT:
277 case POINTER_PLUS_EXPR:
278 v = TREE_OPERAND (v, 0);
279 continue;
280 case PARM_DECL:
281 if (DECL_CONTEXT (v) == current_function_decl
282 && DECL_ARTIFICIAL (v)
283 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
284 return v;
285 return NULL_TREE;
286 default:
287 return NULL_TREE;
288 }
289 }
290
291 /* Helper for unshare_and_remap, called through walk_tree. */
292
293 static tree
294 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
295 {
296 tree *pair = (tree *) data;
297 if (*tp == pair[0])
298 {
299 *tp = unshare_expr (pair[1]);
300 *walk_subtrees = 0;
301 }
302 else if (IS_TYPE_OR_DECL_P (*tp))
303 *walk_subtrees = 0;
304 return NULL_TREE;
305 }
306
307 /* Return unshare_expr (X) with all occurrences of FROM
308 replaced with TO. */
309
310 static tree
311 unshare_and_remap (tree x, tree from, tree to)
312 {
313 tree pair[2] = { from, to };
314 x = unshare_expr (x);
315 walk_tree (&x, unshare_and_remap_1, pair, NULL);
316 return x;
317 }
318
319 /* Convenience function for calling scan_omp_1_op on tree operands. */
320
321 static inline tree
322 scan_omp_op (tree *tp, omp_context *ctx)
323 {
324 struct walk_stmt_info wi;
325
326 memset (&wi, 0, sizeof (wi));
327 wi.info = ctx;
328 wi.want_locations = true;
329
330 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
331 }
332
333 static void lower_omp (gimple_seq *, omp_context *);
334 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
335 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
336
337 /* Return true if CTX is for an omp parallel. */
338
339 static inline bool
340 is_parallel_ctx (omp_context *ctx)
341 {
342 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
343 }
344
345
346 /* Return true if CTX is for an omp task. */
347
348 static inline bool
349 is_task_ctx (omp_context *ctx)
350 {
351 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
352 }
353
354
355 /* Return true if CTX is for an omp taskloop. */
356
357 static inline bool
358 is_taskloop_ctx (omp_context *ctx)
359 {
360 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
361 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
362 }
363
364
365 /* Return true if CTX is for a host omp teams. */
366
367 static inline bool
368 is_host_teams_ctx (omp_context *ctx)
369 {
370 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
371 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
372 }
373
374 /* Return true if CTX is for an omp parallel or omp task or host omp teams
375 (the last one is strictly not a task region in OpenMP speak, but we
376 need to treat it similarly). */
377
378 static inline bool
379 is_taskreg_ctx (omp_context *ctx)
380 {
381 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
382 }
383
384 /* Return true if EXPR is variable sized. */
385
386 static inline bool
387 is_variable_sized (const_tree expr)
388 {
389 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
390 }
391
392 /* Lookup variables. The "maybe" form
393 allows for the variable form to not have been entered, otherwise we
394 assert that the variable must have been entered. */
395
396 static inline tree
397 lookup_decl (tree var, omp_context *ctx)
398 {
399 tree *n = ctx->cb.decl_map->get (var);
400 return *n;
401 }
402
403 static inline tree
404 maybe_lookup_decl (const_tree var, omp_context *ctx)
405 {
406 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
407 return n ? *n : NULL_TREE;
408 }
409
410 static inline tree
411 lookup_field (tree var, omp_context *ctx)
412 {
413 splay_tree_node n;
414 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
415 return (tree) n->value;
416 }
417
418 static inline tree
419 lookup_sfield (splay_tree_key key, omp_context *ctx)
420 {
421 splay_tree_node n;
422 n = splay_tree_lookup (ctx->sfield_map
423 ? ctx->sfield_map : ctx->field_map, key);
424 return (tree) n->value;
425 }
426
427 static inline tree
428 lookup_sfield (tree var, omp_context *ctx)
429 {
430 return lookup_sfield ((splay_tree_key) var, ctx);
431 }
432
433 static inline tree
434 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
435 {
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->field_map, key);
438 return n ? (tree) n->value : NULL_TREE;
439 }
440
441 static inline tree
442 maybe_lookup_field (tree var, omp_context *ctx)
443 {
444 return maybe_lookup_field ((splay_tree_key) var, ctx);
445 }
446
447 /* Return true if DECL should be copied by pointer. SHARED_CTX is
448 the parallel context if DECL is to be shared. */
449
450 static bool
451 use_pointer_for_field (tree decl, omp_context *shared_ctx)
452 {
453 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
454 || TYPE_ATOMIC (TREE_TYPE (decl)))
455 return true;
456
457 /* We can only use copy-in/copy-out semantics for shared variables
458 when we know the value is not accessible from an outer scope. */
459 if (shared_ctx)
460 {
461 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
462
463 /* ??? Trivially accessible from anywhere. But why would we even
464 be passing an address in this case? Should we simply assert
465 this to be false, or should we have a cleanup pass that removes
466 these from the list of mappings? */
467 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
468 return true;
469
470 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
471 without analyzing the expression whether or not its location
472 is accessible to anyone else. In the case of nested parallel
473 regions it certainly may be. */
474 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
475 return true;
476
477 /* Do not use copy-in/copy-out for variables that have their
478 address taken. */
479 if (is_global_var (decl))
480 {
481 /* For file scope vars, track whether we've seen them as
482 non-addressable initially and in that case, keep the same
483 answer for the duration of the pass, even when they are made
484 addressable later on e.g. through reduction expansion. Global
485 variables which weren't addressable before the pass will not
486 have their privatized copies address taken. See PR91216. */
487 if (!TREE_ADDRESSABLE (decl))
488 {
489 if (!global_nonaddressable_vars)
490 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
491 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
492 }
493 else if (!global_nonaddressable_vars
494 || !bitmap_bit_p (global_nonaddressable_vars,
495 DECL_UID (decl)))
496 return true;
497 }
498 else if (TREE_ADDRESSABLE (decl))
499 return true;
500
501 /* lower_send_shared_vars only uses copy-in, but not copy-out
502 for these. */
503 if (TREE_READONLY (decl)
504 || ((TREE_CODE (decl) == RESULT_DECL
505 || TREE_CODE (decl) == PARM_DECL)
506 && DECL_BY_REFERENCE (decl)))
507 return false;
508
509 /* Disallow copy-in/out in nested parallel if
510 decl is shared in outer parallel, otherwise
511 each thread could store the shared variable
512 in its own copy-in location, making the
513 variable no longer really shared. */
514 if (shared_ctx->is_nested)
515 {
516 omp_context *up;
517
518 for (up = shared_ctx->outer; up; up = up->outer)
519 if ((is_taskreg_ctx (up)
520 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
521 && is_gimple_omp_offloaded (up->stmt)))
522 && maybe_lookup_decl (decl, up))
523 break;
524
525 if (up)
526 {
527 tree c;
528
529 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
530 {
531 for (c = gimple_omp_target_clauses (up->stmt);
532 c; c = OMP_CLAUSE_CHAIN (c))
533 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
534 && OMP_CLAUSE_DECL (c) == decl)
535 break;
536 }
537 else
538 for (c = gimple_omp_taskreg_clauses (up->stmt);
539 c; c = OMP_CLAUSE_CHAIN (c))
540 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
541 && OMP_CLAUSE_DECL (c) == decl)
542 break;
543
544 if (c)
545 goto maybe_mark_addressable_and_ret;
546 }
547 }
548
549 /* For tasks avoid using copy-in/out. As tasks can be
550 deferred or executed in different thread, when GOMP_task
551 returns, the task hasn't necessarily terminated. */
552 if (is_task_ctx (shared_ctx))
553 {
554 tree outer;
555 maybe_mark_addressable_and_ret:
556 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
557 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
558 {
559 /* Taking address of OUTER in lower_send_shared_vars
560 might need regimplification of everything that uses the
561 variable. */
562 if (!task_shared_vars)
563 task_shared_vars = BITMAP_ALLOC (NULL);
564 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
565 TREE_ADDRESSABLE (outer) = 1;
566 }
567 return true;
568 }
569 }
570
571 return false;
572 }
573
574 /* Construct a new automatic decl similar to VAR. */
575
576 static tree
577 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
578 {
579 tree copy = copy_var_decl (var, name, type);
580
581 DECL_CONTEXT (copy) = current_function_decl;
582 DECL_CHAIN (copy) = ctx->block_vars;
583 /* If VAR is listed in task_shared_vars, it means it wasn't
584 originally addressable and is just because task needs to take
585 it's address. But we don't need to take address of privatizations
586 from that var. */
587 if (TREE_ADDRESSABLE (var)
588 && ((task_shared_vars
589 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
590 || (global_nonaddressable_vars
591 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
592 TREE_ADDRESSABLE (copy) = 0;
593 ctx->block_vars = copy;
594
595 return copy;
596 }
597
598 static tree
599 omp_copy_decl_1 (tree var, omp_context *ctx)
600 {
601 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
602 }
603
604 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
605 as appropriate. */
606 static tree
607 omp_build_component_ref (tree obj, tree field)
608 {
609 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
610 if (TREE_THIS_VOLATILE (field))
611 TREE_THIS_VOLATILE (ret) |= 1;
612 if (TREE_READONLY (field))
613 TREE_READONLY (ret) |= 1;
614 return ret;
615 }
616
617 /* Build tree nodes to access the field for VAR on the receiver side. */
618
619 static tree
620 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
621 {
622 tree x, field = lookup_field (var, ctx);
623
624 /* If the receiver record type was remapped in the child function,
625 remap the field into the new record type. */
626 x = maybe_lookup_field (field, ctx);
627 if (x != NULL)
628 field = x;
629
630 x = build_simple_mem_ref (ctx->receiver_decl);
631 TREE_THIS_NOTRAP (x) = 1;
632 x = omp_build_component_ref (x, field);
633 if (by_ref)
634 {
635 x = build_simple_mem_ref (x);
636 TREE_THIS_NOTRAP (x) = 1;
637 }
638
639 return x;
640 }
641
642 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
643 of a parallel, this is a component reference; for workshare constructs
644 this is some variable. */
645
646 static tree
647 build_outer_var_ref (tree var, omp_context *ctx,
648 enum omp_clause_code code = OMP_CLAUSE_ERROR)
649 {
650 tree x;
651 omp_context *outer = ctx->outer;
652 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
653 outer = outer->outer;
654
655 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
656 x = var;
657 else if (is_variable_sized (var))
658 {
659 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
660 x = build_outer_var_ref (x, ctx, code);
661 x = build_simple_mem_ref (x);
662 }
663 else if (is_taskreg_ctx (ctx))
664 {
665 bool by_ref = use_pointer_for_field (var, NULL);
666 x = build_receiver_ref (var, by_ref, ctx);
667 }
668 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
669 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
670 || ctx->loop_p
671 || (code == OMP_CLAUSE_PRIVATE
672 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
673 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
674 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
675 {
676 /* #pragma omp simd isn't a worksharing construct, and can reference
677 even private vars in its linear etc. clauses.
678 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
679 to private vars in all worksharing constructs. */
680 x = NULL_TREE;
681 if (outer && is_taskreg_ctx (outer))
682 x = lookup_decl (var, outer);
683 else if (outer)
684 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
685 if (x == NULL_TREE)
686 x = var;
687 }
688 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
689 {
690 gcc_assert (outer);
691 splay_tree_node n
692 = splay_tree_lookup (outer->field_map,
693 (splay_tree_key) &DECL_UID (var));
694 if (n == NULL)
695 {
696 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
697 x = var;
698 else
699 x = lookup_decl (var, outer);
700 }
701 else
702 {
703 tree field = (tree) n->value;
704 /* If the receiver record type was remapped in the child function,
705 remap the field into the new record type. */
706 x = maybe_lookup_field (field, outer);
707 if (x != NULL)
708 field = x;
709
710 x = build_simple_mem_ref (outer->receiver_decl);
711 x = omp_build_component_ref (x, field);
712 if (use_pointer_for_field (var, outer))
713 x = build_simple_mem_ref (x);
714 }
715 }
716 else if (outer)
717 x = lookup_decl (var, outer);
718 else if (omp_is_reference (var))
719 /* This can happen with orphaned constructs. If var is reference, it is
720 possible it is shared and as such valid. */
721 x = var;
722 else if (omp_member_access_dummy_var (var))
723 x = var;
724 else
725 gcc_unreachable ();
726
727 if (x == var)
728 {
729 tree t = omp_member_access_dummy_var (var);
730 if (t)
731 {
732 x = DECL_VALUE_EXPR (var);
733 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
734 if (o != t)
735 x = unshare_and_remap (x, t, o);
736 else
737 x = unshare_expr (x);
738 }
739 }
740
741 if (omp_is_reference (var))
742 x = build_simple_mem_ref (x);
743
744 return x;
745 }
746
747 /* Build tree nodes to access the field for VAR on the sender side. */
748
749 static tree
750 build_sender_ref (splay_tree_key key, omp_context *ctx)
751 {
752 tree field = lookup_sfield (key, ctx);
753 return omp_build_component_ref (ctx->sender_decl, field);
754 }
755
756 static tree
757 build_sender_ref (tree var, omp_context *ctx)
758 {
759 return build_sender_ref ((splay_tree_key) var, ctx);
760 }
761
762 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
763 BASE_POINTERS_RESTRICT, declare the field with restrict. */
764
765 static void
766 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
767 {
768 tree field, type, sfield = NULL_TREE;
769 splay_tree_key key = (splay_tree_key) var;
770
771 if ((mask & 16) != 0)
772 {
773 key = (splay_tree_key) &DECL_NAME (var);
774 gcc_checking_assert (key != (splay_tree_key) var);
775 }
776 if ((mask & 8) != 0)
777 {
778 key = (splay_tree_key) &DECL_UID (var);
779 gcc_checking_assert (key != (splay_tree_key) var);
780 }
781 gcc_assert ((mask & 1) == 0
782 || !splay_tree_lookup (ctx->field_map, key));
783 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
784 || !splay_tree_lookup (ctx->sfield_map, key));
785 gcc_assert ((mask & 3) == 3
786 || !is_gimple_omp_oacc (ctx->stmt));
787
788 type = TREE_TYPE (var);
789 if ((mask & 16) != 0)
790 type = lang_hooks.decls.omp_array_data (var, true);
791
792 /* Prevent redeclaring the var in the split-off function with a restrict
793 pointer type. Note that we only clear type itself, restrict qualifiers in
794 the pointed-to type will be ignored by points-to analysis. */
795 if (POINTER_TYPE_P (type)
796 && TYPE_RESTRICT (type))
797 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
798
799 if (mask & 4)
800 {
801 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
802 type = build_pointer_type (build_pointer_type (type));
803 }
804 else if (by_ref)
805 type = build_pointer_type (type);
806 else if ((mask & (32 | 3)) == 1 && omp_is_reference (var))
807 type = TREE_TYPE (type);
808
809 field = build_decl (DECL_SOURCE_LOCATION (var),
810 FIELD_DECL, DECL_NAME (var), type);
811
812 /* Remember what variable this field was created for. This does have a
813 side effect of making dwarf2out ignore this member, so for helpful
814 debugging we clear it later in delete_omp_context. */
815 DECL_ABSTRACT_ORIGIN (field) = var;
816 if ((mask & 16) == 0 && type == TREE_TYPE (var))
817 {
818 SET_DECL_ALIGN (field, DECL_ALIGN (var));
819 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
820 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
821 }
822 else
823 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
824
825 if ((mask & 3) == 3)
826 {
827 insert_field_into_struct (ctx->record_type, field);
828 if (ctx->srecord_type)
829 {
830 sfield = build_decl (DECL_SOURCE_LOCATION (var),
831 FIELD_DECL, DECL_NAME (var), type);
832 DECL_ABSTRACT_ORIGIN (sfield) = var;
833 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
834 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
835 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
836 insert_field_into_struct (ctx->srecord_type, sfield);
837 }
838 }
839 else
840 {
841 if (ctx->srecord_type == NULL_TREE)
842 {
843 tree t;
844
845 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
846 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
847 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
848 {
849 sfield = build_decl (DECL_SOURCE_LOCATION (t),
850 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
851 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
852 insert_field_into_struct (ctx->srecord_type, sfield);
853 splay_tree_insert (ctx->sfield_map,
854 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
855 (splay_tree_value) sfield);
856 }
857 }
858 sfield = field;
859 insert_field_into_struct ((mask & 1) ? ctx->record_type
860 : ctx->srecord_type, field);
861 }
862
863 if (mask & 1)
864 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
865 if ((mask & 2) && ctx->sfield_map)
866 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
867 }
868
869 static tree
870 install_var_local (tree var, omp_context *ctx)
871 {
872 tree new_var = omp_copy_decl_1 (var, ctx);
873 insert_decl_map (&ctx->cb, var, new_var);
874 return new_var;
875 }
876
877 /* Adjust the replacement for DECL in CTX for the new context. This means
878 copying the DECL_VALUE_EXPR, and fixing up the type. */
879
880 static void
881 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
882 {
883 tree new_decl, size;
884
885 new_decl = lookup_decl (decl, ctx);
886
887 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
888
889 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
890 && DECL_HAS_VALUE_EXPR_P (decl))
891 {
892 tree ve = DECL_VALUE_EXPR (decl);
893 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
894 SET_DECL_VALUE_EXPR (new_decl, ve);
895 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
896 }
897
898 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
899 {
900 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
901 if (size == error_mark_node)
902 size = TYPE_SIZE (TREE_TYPE (new_decl));
903 DECL_SIZE (new_decl) = size;
904
905 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
906 if (size == error_mark_node)
907 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
908 DECL_SIZE_UNIT (new_decl) = size;
909 }
910 }
911
912 /* The callback for remap_decl. Search all containing contexts for a
913 mapping of the variable; this avoids having to duplicate the splay
914 tree ahead of time. We know a mapping doesn't already exist in the
915 given context. Create new mappings to implement default semantics. */
916
917 static tree
918 omp_copy_decl (tree var, copy_body_data *cb)
919 {
920 omp_context *ctx = (omp_context *) cb;
921 tree new_var;
922
923 if (TREE_CODE (var) == LABEL_DECL)
924 {
925 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
926 return var;
927 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
928 DECL_CONTEXT (new_var) = current_function_decl;
929 insert_decl_map (&ctx->cb, var, new_var);
930 return new_var;
931 }
932
933 while (!is_taskreg_ctx (ctx))
934 {
935 ctx = ctx->outer;
936 if (ctx == NULL)
937 return var;
938 new_var = maybe_lookup_decl (var, ctx);
939 if (new_var)
940 return new_var;
941 }
942
943 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
944 return var;
945
946 return error_mark_node;
947 }
948
949 /* Create a new context, with OUTER_CTX being the surrounding context. */
950
951 static omp_context *
952 new_omp_context (gimple *stmt, omp_context *outer_ctx)
953 {
954 omp_context *ctx = XCNEW (omp_context);
955
956 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
957 (splay_tree_value) ctx);
958 ctx->stmt = stmt;
959
960 if (outer_ctx)
961 {
962 ctx->outer = outer_ctx;
963 ctx->cb = outer_ctx->cb;
964 ctx->cb.block = NULL;
965 ctx->depth = outer_ctx->depth + 1;
966 }
967 else
968 {
969 ctx->cb.src_fn = current_function_decl;
970 ctx->cb.dst_fn = current_function_decl;
971 ctx->cb.src_node = cgraph_node::get (current_function_decl);
972 gcc_checking_assert (ctx->cb.src_node);
973 ctx->cb.dst_node = ctx->cb.src_node;
974 ctx->cb.src_cfun = cfun;
975 ctx->cb.copy_decl = omp_copy_decl;
976 ctx->cb.eh_lp_nr = 0;
977 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
978 ctx->cb.adjust_array_error_bounds = true;
979 ctx->cb.dont_remap_vla_if_no_change = true;
980 ctx->depth = 1;
981 }
982
983 ctx->cb.decl_map = new hash_map<tree, tree>;
984
985 return ctx;
986 }
987
988 static gimple_seq maybe_catch_exception (gimple_seq);
989
990 /* Finalize task copyfn. */
991
992 static void
993 finalize_task_copyfn (gomp_task *task_stmt)
994 {
995 struct function *child_cfun;
996 tree child_fn;
997 gimple_seq seq = NULL, new_seq;
998 gbind *bind;
999
1000 child_fn = gimple_omp_task_copy_fn (task_stmt);
1001 if (child_fn == NULL_TREE)
1002 return;
1003
1004 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1005 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1006
1007 push_cfun (child_cfun);
1008 bind = gimplify_body (child_fn, false);
1009 gimple_seq_add_stmt (&seq, bind);
1010 new_seq = maybe_catch_exception (seq);
1011 if (new_seq != seq)
1012 {
1013 bind = gimple_build_bind (NULL, new_seq, NULL);
1014 seq = NULL;
1015 gimple_seq_add_stmt (&seq, bind);
1016 }
1017 gimple_set_body (child_fn, seq);
1018 pop_cfun ();
1019
1020 /* Inform the callgraph about the new function. */
1021 cgraph_node *node = cgraph_node::get_create (child_fn);
1022 node->parallelized_function = 1;
1023 cgraph_node::add_new_function (child_fn, false);
1024 }
1025
1026 /* Destroy a omp_context data structures. Called through the splay tree
1027 value delete callback. */
1028
1029 static void
1030 delete_omp_context (splay_tree_value value)
1031 {
1032 omp_context *ctx = (omp_context *) value;
1033
1034 delete ctx->cb.decl_map;
1035
1036 if (ctx->field_map)
1037 splay_tree_delete (ctx->field_map);
1038 if (ctx->sfield_map)
1039 splay_tree_delete (ctx->sfield_map);
1040
1041 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1042 it produces corrupt debug information. */
1043 if (ctx->record_type)
1044 {
1045 tree t;
1046 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1047 DECL_ABSTRACT_ORIGIN (t) = NULL;
1048 }
1049 if (ctx->srecord_type)
1050 {
1051 tree t;
1052 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1053 DECL_ABSTRACT_ORIGIN (t) = NULL;
1054 }
1055
1056 if (is_task_ctx (ctx))
1057 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1058
1059 if (ctx->task_reduction_map)
1060 {
1061 ctx->task_reductions.release ();
1062 delete ctx->task_reduction_map;
1063 }
1064
1065 delete ctx->lastprivate_conditional_map;
1066 delete ctx->allocate_map;
1067
1068 XDELETE (ctx);
1069 }
1070
1071 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1072 context. */
1073
1074 static void
1075 fixup_child_record_type (omp_context *ctx)
1076 {
1077 tree f, type = ctx->record_type;
1078
1079 if (!ctx->receiver_decl)
1080 return;
1081 /* ??? It isn't sufficient to just call remap_type here, because
1082 variably_modified_type_p doesn't work the way we expect for
1083 record types. Testing each field for whether it needs remapping
1084 and creating a new record by hand works, however. */
1085 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1086 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1087 break;
1088 if (f)
1089 {
1090 tree name, new_fields = NULL;
1091
1092 type = lang_hooks.types.make_type (RECORD_TYPE);
1093 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1094 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1095 TYPE_DECL, name, type);
1096 TYPE_NAME (type) = name;
1097
1098 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1099 {
1100 tree new_f = copy_node (f);
1101 DECL_CONTEXT (new_f) = type;
1102 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1103 DECL_CHAIN (new_f) = new_fields;
1104 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1105 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1106 &ctx->cb, NULL);
1107 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1108 &ctx->cb, NULL);
1109 new_fields = new_f;
1110
1111 /* Arrange to be able to look up the receiver field
1112 given the sender field. */
1113 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1114 (splay_tree_value) new_f);
1115 }
1116 TYPE_FIELDS (type) = nreverse (new_fields);
1117 layout_type (type);
1118 }
1119
1120 /* In a target region we never modify any of the pointers in *.omp_data_i,
1121 so attempt to help the optimizers. */
1122 if (is_gimple_omp_offloaded (ctx->stmt))
1123 type = build_qualified_type (type, TYPE_QUAL_CONST);
1124
1125 TREE_TYPE (ctx->receiver_decl)
1126 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1127 }
1128
1129 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1130 specified by CLAUSES. */
1131
1132 static void
1133 scan_sharing_clauses (tree clauses, omp_context *ctx)
1134 {
1135 tree c, decl;
1136 bool scan_array_reductions = false;
1137
1138 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1139 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1140 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1141 /* omp_default_mem_alloc is 1 */
1142 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))))
1143 {
1144 if (ctx->allocate_map == NULL)
1145 ctx->allocate_map = new hash_map<tree, tree>;
1146 ctx->allocate_map->put (OMP_CLAUSE_DECL (c),
1147 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1148 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1149 : integer_zero_node);
1150 }
1151
1152 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1153 {
1154 bool by_ref;
1155
1156 switch (OMP_CLAUSE_CODE (c))
1157 {
1158 case OMP_CLAUSE_PRIVATE:
1159 decl = OMP_CLAUSE_DECL (c);
1160 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1161 goto do_private;
1162 else if (!is_variable_sized (decl))
1163 install_var_local (decl, ctx);
1164 break;
1165
1166 case OMP_CLAUSE_SHARED:
1167 decl = OMP_CLAUSE_DECL (c);
1168 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1169 ctx->allocate_map->remove (decl);
1170 /* Ignore shared directives in teams construct inside of
1171 target construct. */
1172 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1173 && !is_host_teams_ctx (ctx))
1174 {
1175 /* Global variables don't need to be copied,
1176 the receiver side will use them directly. */
1177 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1178 if (is_global_var (odecl))
1179 break;
1180 insert_decl_map (&ctx->cb, decl, odecl);
1181 break;
1182 }
1183 gcc_assert (is_taskreg_ctx (ctx));
1184 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1185 || !is_variable_sized (decl));
1186 /* Global variables don't need to be copied,
1187 the receiver side will use them directly. */
1188 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1189 break;
1190 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1191 {
1192 use_pointer_for_field (decl, ctx);
1193 break;
1194 }
1195 by_ref = use_pointer_for_field (decl, NULL);
1196 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1197 || TREE_ADDRESSABLE (decl)
1198 || by_ref
1199 || omp_is_reference (decl))
1200 {
1201 by_ref = use_pointer_for_field (decl, ctx);
1202 install_var_field (decl, by_ref, 3, ctx);
1203 install_var_local (decl, ctx);
1204 break;
1205 }
1206 /* We don't need to copy const scalar vars back. */
1207 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1208 goto do_private;
1209
1210 case OMP_CLAUSE_REDUCTION:
1211 /* Collect 'reduction' clauses on OpenACC compute construct. */
1212 if (is_gimple_omp_oacc (ctx->stmt)
1213 && is_gimple_omp_offloaded (ctx->stmt))
1214 {
1215 /* No 'reduction' clauses on OpenACC 'kernels'. */
1216 gcc_checking_assert (!is_oacc_kernels (ctx));
1217 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1218 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1219
1220 ctx->local_reduction_clauses
1221 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1222 }
1223 /* FALLTHRU */
1224
1225 case OMP_CLAUSE_IN_REDUCTION:
1226 decl = OMP_CLAUSE_DECL (c);
1227 if (ctx->allocate_map
1228 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1229 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1230 || OMP_CLAUSE_REDUCTION_TASK (c)))
1231 || is_task_ctx (ctx)))
1232 {
1233 /* For now. */
1234 if (ctx->allocate_map->get (decl))
1235 ctx->allocate_map->remove (decl);
1236 }
1237 if (TREE_CODE (decl) == MEM_REF)
1238 {
1239 tree t = TREE_OPERAND (decl, 0);
1240 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1241 t = TREE_OPERAND (t, 0);
1242 if (TREE_CODE (t) == INDIRECT_REF
1243 || TREE_CODE (t) == ADDR_EXPR)
1244 t = TREE_OPERAND (t, 0);
1245 install_var_local (t, ctx);
1246 if (is_taskreg_ctx (ctx)
1247 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1248 || (is_task_ctx (ctx)
1249 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1250 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1251 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1252 == POINTER_TYPE)))))
1253 && !is_variable_sized (t)
1254 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1255 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1256 && !is_task_ctx (ctx))))
1257 {
1258 by_ref = use_pointer_for_field (t, NULL);
1259 if (is_task_ctx (ctx)
1260 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1261 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1262 {
1263 install_var_field (t, false, 1, ctx);
1264 install_var_field (t, by_ref, 2, ctx);
1265 }
1266 else
1267 install_var_field (t, by_ref, 3, ctx);
1268 }
1269 break;
1270 }
1271 if (is_task_ctx (ctx)
1272 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1273 && OMP_CLAUSE_REDUCTION_TASK (c)
1274 && is_parallel_ctx (ctx)))
1275 {
1276 /* Global variables don't need to be copied,
1277 the receiver side will use them directly. */
1278 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1279 {
1280 by_ref = use_pointer_for_field (decl, ctx);
1281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1282 install_var_field (decl, by_ref, 3, ctx);
1283 }
1284 install_var_local (decl, ctx);
1285 break;
1286 }
1287 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1288 && OMP_CLAUSE_REDUCTION_TASK (c))
1289 {
1290 install_var_local (decl, ctx);
1291 break;
1292 }
1293 goto do_private;
1294
1295 case OMP_CLAUSE_LASTPRIVATE:
1296 /* Let the corresponding firstprivate clause create
1297 the variable. */
1298 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1299 break;
1300 /* FALLTHRU */
1301
1302 case OMP_CLAUSE_FIRSTPRIVATE:
1303 case OMP_CLAUSE_LINEAR:
1304 decl = OMP_CLAUSE_DECL (c);
1305 do_private:
1306 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1307 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1308 && is_gimple_omp_offloaded (ctx->stmt))
1309 {
1310 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1311 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1312 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1313 install_var_field (decl, true, 3, ctx);
1314 else
1315 install_var_field (decl, false, 3, ctx);
1316 }
1317 if (is_variable_sized (decl))
1318 {
1319 if (is_task_ctx (ctx))
1320 {
1321 if (ctx->allocate_map
1322 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1323 {
1324 /* For now. */
1325 if (ctx->allocate_map->get (decl))
1326 ctx->allocate_map->remove (decl);
1327 }
1328 install_var_field (decl, false, 1, ctx);
1329 }
1330 break;
1331 }
1332 else if (is_taskreg_ctx (ctx))
1333 {
1334 bool global
1335 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1336 by_ref = use_pointer_for_field (decl, NULL);
1337
1338 if (is_task_ctx (ctx)
1339 && (global || by_ref || omp_is_reference (decl)))
1340 {
1341 if (ctx->allocate_map
1342 && ctx->allocate_map->get (decl))
1343 install_var_field (decl, by_ref, 32 | 1, ctx);
1344 else
1345 install_var_field (decl, false, 1, ctx);
1346 if (!global)
1347 install_var_field (decl, by_ref, 2, ctx);
1348 }
1349 else if (!global)
1350 install_var_field (decl, by_ref, 3, ctx);
1351 }
1352 install_var_local (decl, ctx);
1353 break;
1354
1355 case OMP_CLAUSE_USE_DEVICE_PTR:
1356 case OMP_CLAUSE_USE_DEVICE_ADDR:
1357 decl = OMP_CLAUSE_DECL (c);
1358
1359 /* Fortran array descriptors. */
1360 if (lang_hooks.decls.omp_array_data (decl, true))
1361 install_var_field (decl, false, 19, ctx);
1362 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1363 && !omp_is_reference (decl)
1364 && !omp_is_allocatable_or_ptr (decl))
1365 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1366 install_var_field (decl, true, 11, ctx);
1367 else
1368 install_var_field (decl, false, 11, ctx);
1369 if (DECL_SIZE (decl)
1370 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1371 {
1372 tree decl2 = DECL_VALUE_EXPR (decl);
1373 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1374 decl2 = TREE_OPERAND (decl2, 0);
1375 gcc_assert (DECL_P (decl2));
1376 install_var_local (decl2, ctx);
1377 }
1378 install_var_local (decl, ctx);
1379 break;
1380
1381 case OMP_CLAUSE_IS_DEVICE_PTR:
1382 decl = OMP_CLAUSE_DECL (c);
1383 goto do_private;
1384
1385 case OMP_CLAUSE__LOOPTEMP_:
1386 case OMP_CLAUSE__REDUCTEMP_:
1387 gcc_assert (is_taskreg_ctx (ctx));
1388 decl = OMP_CLAUSE_DECL (c);
1389 install_var_field (decl, false, 3, ctx);
1390 install_var_local (decl, ctx);
1391 break;
1392
1393 case OMP_CLAUSE_COPYPRIVATE:
1394 case OMP_CLAUSE_COPYIN:
1395 decl = OMP_CLAUSE_DECL (c);
1396 by_ref = use_pointer_for_field (decl, NULL);
1397 install_var_field (decl, by_ref, 3, ctx);
1398 break;
1399
1400 case OMP_CLAUSE_FINAL:
1401 case OMP_CLAUSE_IF:
1402 case OMP_CLAUSE_NUM_THREADS:
1403 case OMP_CLAUSE_NUM_TEAMS:
1404 case OMP_CLAUSE_THREAD_LIMIT:
1405 case OMP_CLAUSE_DEVICE:
1406 case OMP_CLAUSE_SCHEDULE:
1407 case OMP_CLAUSE_DIST_SCHEDULE:
1408 case OMP_CLAUSE_DEPEND:
1409 case OMP_CLAUSE_PRIORITY:
1410 case OMP_CLAUSE_GRAINSIZE:
1411 case OMP_CLAUSE_NUM_TASKS:
1412 case OMP_CLAUSE_NUM_GANGS:
1413 case OMP_CLAUSE_NUM_WORKERS:
1414 case OMP_CLAUSE_VECTOR_LENGTH:
1415 if (ctx->outer)
1416 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1417 break;
1418
1419 case OMP_CLAUSE_TO:
1420 case OMP_CLAUSE_FROM:
1421 case OMP_CLAUSE_MAP:
1422 if (ctx->outer)
1423 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1424 decl = OMP_CLAUSE_DECL (c);
1425 /* Global variables with "omp declare target" attribute
1426 don't need to be copied, the receiver side will use them
1427 directly. However, global variables with "omp declare target link"
1428 attribute need to be copied. Or when ALWAYS modifier is used. */
1429 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1430 && DECL_P (decl)
1431 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1432 && (OMP_CLAUSE_MAP_KIND (c)
1433 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1434 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1435 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1436 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1437 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1438 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1439 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1440 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1441 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1442 && varpool_node::get_create (decl)->offloadable
1443 && !lookup_attribute ("omp declare target link",
1444 DECL_ATTRIBUTES (decl)))
1445 break;
1446 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1447 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1448 {
1449 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1450 not offloaded; there is nothing to map for those. */
1451 if (!is_gimple_omp_offloaded (ctx->stmt)
1452 && !POINTER_TYPE_P (TREE_TYPE (decl))
1453 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1454 break;
1455 }
1456 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1457 && DECL_P (decl)
1458 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1459 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1460 && is_omp_target (ctx->stmt))
1461 {
1462 /* If this is an offloaded region, an attach operation should
1463 only exist when the pointer variable is mapped in a prior
1464 clause. */
1465 if (is_gimple_omp_offloaded (ctx->stmt))
1466 gcc_assert
1467 (maybe_lookup_decl (decl, ctx)
1468 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1469 && lookup_attribute ("omp declare target",
1470 DECL_ATTRIBUTES (decl))));
1471
1472 /* By itself, attach/detach is generated as part of pointer
1473 variable mapping and should not create new variables in the
1474 offloaded region, however sender refs for it must be created
1475 for its address to be passed to the runtime. */
1476 tree field
1477 = build_decl (OMP_CLAUSE_LOCATION (c),
1478 FIELD_DECL, NULL_TREE, ptr_type_node);
1479 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1480 insert_field_into_struct (ctx->record_type, field);
1481 /* To not clash with a map of the pointer variable itself,
1482 attach/detach maps have their field looked up by the *clause*
1483 tree expression, not the decl. */
1484 gcc_assert (!splay_tree_lookup (ctx->field_map,
1485 (splay_tree_key) c));
1486 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1487 (splay_tree_value) field);
1488 break;
1489 }
1490 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1491 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1492 || (OMP_CLAUSE_MAP_KIND (c)
1493 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1494 {
1495 if (TREE_CODE (decl) == COMPONENT_REF
1496 || (TREE_CODE (decl) == INDIRECT_REF
1497 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1498 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1499 == REFERENCE_TYPE)))
1500 break;
1501 if (DECL_SIZE (decl)
1502 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1503 {
1504 tree decl2 = DECL_VALUE_EXPR (decl);
1505 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1506 decl2 = TREE_OPERAND (decl2, 0);
1507 gcc_assert (DECL_P (decl2));
1508 install_var_local (decl2, ctx);
1509 }
1510 install_var_local (decl, ctx);
1511 break;
1512 }
1513 if (DECL_P (decl))
1514 {
1515 if (DECL_SIZE (decl)
1516 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1517 {
1518 tree decl2 = DECL_VALUE_EXPR (decl);
1519 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1520 decl2 = TREE_OPERAND (decl2, 0);
1521 gcc_assert (DECL_P (decl2));
1522 install_var_field (decl2, true, 3, ctx);
1523 install_var_local (decl2, ctx);
1524 install_var_local (decl, ctx);
1525 }
1526 else
1527 {
1528 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1529 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1530 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1531 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1532 install_var_field (decl, true, 7, ctx);
1533 else
1534 install_var_field (decl, true, 3, ctx);
1535 if (is_gimple_omp_offloaded (ctx->stmt)
1536 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1537 install_var_local (decl, ctx);
1538 }
1539 }
1540 else
1541 {
1542 tree base = get_base_address (decl);
1543 tree nc = OMP_CLAUSE_CHAIN (c);
1544 if (DECL_P (base)
1545 && nc != NULL_TREE
1546 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1547 && OMP_CLAUSE_DECL (nc) == base
1548 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1549 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1550 {
1551 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1552 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1553 }
1554 else
1555 {
1556 if (ctx->outer)
1557 {
1558 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1559 decl = OMP_CLAUSE_DECL (c);
1560 }
1561 gcc_assert (!splay_tree_lookup (ctx->field_map,
1562 (splay_tree_key) decl));
1563 tree field
1564 = build_decl (OMP_CLAUSE_LOCATION (c),
1565 FIELD_DECL, NULL_TREE, ptr_type_node);
1566 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1567 insert_field_into_struct (ctx->record_type, field);
1568 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1569 (splay_tree_value) field);
1570 }
1571 }
1572 break;
1573
1574 case OMP_CLAUSE_ORDER:
1575 ctx->order_concurrent = true;
1576 break;
1577
1578 case OMP_CLAUSE_BIND:
1579 ctx->loop_p = true;
1580 break;
1581
1582 case OMP_CLAUSE_NOWAIT:
1583 case OMP_CLAUSE_ORDERED:
1584 case OMP_CLAUSE_COLLAPSE:
1585 case OMP_CLAUSE_UNTIED:
1586 case OMP_CLAUSE_MERGEABLE:
1587 case OMP_CLAUSE_PROC_BIND:
1588 case OMP_CLAUSE_SAFELEN:
1589 case OMP_CLAUSE_SIMDLEN:
1590 case OMP_CLAUSE_THREADS:
1591 case OMP_CLAUSE_SIMD:
1592 case OMP_CLAUSE_NOGROUP:
1593 case OMP_CLAUSE_DEFAULTMAP:
1594 case OMP_CLAUSE_ASYNC:
1595 case OMP_CLAUSE_WAIT:
1596 case OMP_CLAUSE_GANG:
1597 case OMP_CLAUSE_WORKER:
1598 case OMP_CLAUSE_VECTOR:
1599 case OMP_CLAUSE_INDEPENDENT:
1600 case OMP_CLAUSE_AUTO:
1601 case OMP_CLAUSE_SEQ:
1602 case OMP_CLAUSE_TILE:
1603 case OMP_CLAUSE__SIMT_:
1604 case OMP_CLAUSE_DEFAULT:
1605 case OMP_CLAUSE_NONTEMPORAL:
1606 case OMP_CLAUSE_IF_PRESENT:
1607 case OMP_CLAUSE_FINALIZE:
1608 case OMP_CLAUSE_TASK_REDUCTION:
1609 case OMP_CLAUSE_ALLOCATE:
1610 break;
1611
1612 case OMP_CLAUSE_ALIGNED:
1613 decl = OMP_CLAUSE_DECL (c);
1614 if (is_global_var (decl)
1615 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1616 install_var_local (decl, ctx);
1617 break;
1618
1619 case OMP_CLAUSE__CONDTEMP_:
1620 decl = OMP_CLAUSE_DECL (c);
1621 if (is_parallel_ctx (ctx))
1622 {
1623 install_var_field (decl, false, 3, ctx);
1624 install_var_local (decl, ctx);
1625 }
1626 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1627 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1628 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1629 install_var_local (decl, ctx);
1630 break;
1631
1632 case OMP_CLAUSE__CACHE_:
1633 default:
1634 gcc_unreachable ();
1635 }
1636 }
1637
1638 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1639 {
1640 switch (OMP_CLAUSE_CODE (c))
1641 {
1642 case OMP_CLAUSE_LASTPRIVATE:
1643 /* Let the corresponding firstprivate clause create
1644 the variable. */
1645 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1646 scan_array_reductions = true;
1647 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1648 break;
1649 /* FALLTHRU */
1650
1651 case OMP_CLAUSE_FIRSTPRIVATE:
1652 case OMP_CLAUSE_PRIVATE:
1653 case OMP_CLAUSE_LINEAR:
1654 case OMP_CLAUSE_IS_DEVICE_PTR:
1655 decl = OMP_CLAUSE_DECL (c);
1656 if (is_variable_sized (decl))
1657 {
1658 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1659 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1660 && is_gimple_omp_offloaded (ctx->stmt))
1661 {
1662 tree decl2 = DECL_VALUE_EXPR (decl);
1663 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1664 decl2 = TREE_OPERAND (decl2, 0);
1665 gcc_assert (DECL_P (decl2));
1666 install_var_local (decl2, ctx);
1667 fixup_remapped_decl (decl2, ctx, false);
1668 }
1669 install_var_local (decl, ctx);
1670 }
1671 fixup_remapped_decl (decl, ctx,
1672 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1673 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1674 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1675 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1676 scan_array_reductions = true;
1677 break;
1678
1679 case OMP_CLAUSE_REDUCTION:
1680 case OMP_CLAUSE_IN_REDUCTION:
1681 decl = OMP_CLAUSE_DECL (c);
1682 if (TREE_CODE (decl) != MEM_REF)
1683 {
1684 if (is_variable_sized (decl))
1685 install_var_local (decl, ctx);
1686 fixup_remapped_decl (decl, ctx, false);
1687 }
1688 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1689 scan_array_reductions = true;
1690 break;
1691
1692 case OMP_CLAUSE_TASK_REDUCTION:
1693 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1694 scan_array_reductions = true;
1695 break;
1696
1697 case OMP_CLAUSE_SHARED:
1698 /* Ignore shared directives in teams construct inside of
1699 target construct. */
1700 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1701 && !is_host_teams_ctx (ctx))
1702 break;
1703 decl = OMP_CLAUSE_DECL (c);
1704 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1705 break;
1706 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1707 {
1708 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1709 ctx->outer)))
1710 break;
1711 bool by_ref = use_pointer_for_field (decl, ctx);
1712 install_var_field (decl, by_ref, 11, ctx);
1713 break;
1714 }
1715 fixup_remapped_decl (decl, ctx, false);
1716 break;
1717
1718 case OMP_CLAUSE_MAP:
1719 if (!is_gimple_omp_offloaded (ctx->stmt))
1720 break;
1721 decl = OMP_CLAUSE_DECL (c);
1722 if (DECL_P (decl)
1723 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1724 && (OMP_CLAUSE_MAP_KIND (c)
1725 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1726 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1727 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1728 && varpool_node::get_create (decl)->offloadable)
1729 break;
1730 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1731 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1732 && is_omp_target (ctx->stmt)
1733 && !is_gimple_omp_offloaded (ctx->stmt))
1734 break;
1735 if (DECL_P (decl))
1736 {
1737 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1738 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1739 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1740 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1741 {
1742 tree new_decl = lookup_decl (decl, ctx);
1743 TREE_TYPE (new_decl)
1744 = remap_type (TREE_TYPE (decl), &ctx->cb);
1745 }
1746 else if (DECL_SIZE (decl)
1747 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1748 {
1749 tree decl2 = DECL_VALUE_EXPR (decl);
1750 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1751 decl2 = TREE_OPERAND (decl2, 0);
1752 gcc_assert (DECL_P (decl2));
1753 fixup_remapped_decl (decl2, ctx, false);
1754 fixup_remapped_decl (decl, ctx, true);
1755 }
1756 else
1757 fixup_remapped_decl (decl, ctx, false);
1758 }
1759 break;
1760
1761 case OMP_CLAUSE_COPYPRIVATE:
1762 case OMP_CLAUSE_COPYIN:
1763 case OMP_CLAUSE_DEFAULT:
1764 case OMP_CLAUSE_IF:
1765 case OMP_CLAUSE_NUM_THREADS:
1766 case OMP_CLAUSE_NUM_TEAMS:
1767 case OMP_CLAUSE_THREAD_LIMIT:
1768 case OMP_CLAUSE_DEVICE:
1769 case OMP_CLAUSE_SCHEDULE:
1770 case OMP_CLAUSE_DIST_SCHEDULE:
1771 case OMP_CLAUSE_NOWAIT:
1772 case OMP_CLAUSE_ORDERED:
1773 case OMP_CLAUSE_COLLAPSE:
1774 case OMP_CLAUSE_UNTIED:
1775 case OMP_CLAUSE_FINAL:
1776 case OMP_CLAUSE_MERGEABLE:
1777 case OMP_CLAUSE_PROC_BIND:
1778 case OMP_CLAUSE_SAFELEN:
1779 case OMP_CLAUSE_SIMDLEN:
1780 case OMP_CLAUSE_ALIGNED:
1781 case OMP_CLAUSE_DEPEND:
1782 case OMP_CLAUSE_ALLOCATE:
1783 case OMP_CLAUSE__LOOPTEMP_:
1784 case OMP_CLAUSE__REDUCTEMP_:
1785 case OMP_CLAUSE_TO:
1786 case OMP_CLAUSE_FROM:
1787 case OMP_CLAUSE_PRIORITY:
1788 case OMP_CLAUSE_GRAINSIZE:
1789 case OMP_CLAUSE_NUM_TASKS:
1790 case OMP_CLAUSE_THREADS:
1791 case OMP_CLAUSE_SIMD:
1792 case OMP_CLAUSE_NOGROUP:
1793 case OMP_CLAUSE_DEFAULTMAP:
1794 case OMP_CLAUSE_ORDER:
1795 case OMP_CLAUSE_BIND:
1796 case OMP_CLAUSE_USE_DEVICE_PTR:
1797 case OMP_CLAUSE_USE_DEVICE_ADDR:
1798 case OMP_CLAUSE_NONTEMPORAL:
1799 case OMP_CLAUSE_ASYNC:
1800 case OMP_CLAUSE_WAIT:
1801 case OMP_CLAUSE_NUM_GANGS:
1802 case OMP_CLAUSE_NUM_WORKERS:
1803 case OMP_CLAUSE_VECTOR_LENGTH:
1804 case OMP_CLAUSE_GANG:
1805 case OMP_CLAUSE_WORKER:
1806 case OMP_CLAUSE_VECTOR:
1807 case OMP_CLAUSE_INDEPENDENT:
1808 case OMP_CLAUSE_AUTO:
1809 case OMP_CLAUSE_SEQ:
1810 case OMP_CLAUSE_TILE:
1811 case OMP_CLAUSE__SIMT_:
1812 case OMP_CLAUSE_IF_PRESENT:
1813 case OMP_CLAUSE_FINALIZE:
1814 case OMP_CLAUSE__CONDTEMP_:
1815 break;
1816
1817 case OMP_CLAUSE__CACHE_:
1818 default:
1819 gcc_unreachable ();
1820 }
1821 }
1822
1823 gcc_checking_assert (!scan_array_reductions
1824 || !is_gimple_omp_oacc (ctx->stmt));
1825 if (scan_array_reductions)
1826 {
1827 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1828 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1829 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1830 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1831 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1832 {
1833 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1834 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1835 }
1836 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1837 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1838 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1839 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1840 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1841 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1842 }
1843 }
1844
1845 /* Create a new name for omp child function. Returns an identifier. */
1846
1847 static tree
1848 create_omp_child_function_name (bool task_copy)
1849 {
1850 return clone_function_name_numbered (current_function_decl,
1851 task_copy ? "_omp_cpyfn" : "_omp_fn");
1852 }
1853
1854 /* Return true if CTX may belong to offloaded code: either if current function
1855 is offloaded, or any enclosing context corresponds to a target region. */
1856
1857 static bool
1858 omp_maybe_offloaded_ctx (omp_context *ctx)
1859 {
1860 if (cgraph_node::get (current_function_decl)->offloadable)
1861 return true;
1862 for (; ctx; ctx = ctx->outer)
1863 if (is_gimple_omp_offloaded (ctx->stmt))
1864 return true;
1865 return false;
1866 }
1867
1868 /* Build a decl for the omp child function. It'll not contain a body
1869 yet, just the bare decl. */
1870
1871 static void
1872 create_omp_child_function (omp_context *ctx, bool task_copy)
1873 {
1874 tree decl, type, name, t;
1875
1876 name = create_omp_child_function_name (task_copy);
1877 if (task_copy)
1878 type = build_function_type_list (void_type_node, ptr_type_node,
1879 ptr_type_node, NULL_TREE);
1880 else
1881 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1882
1883 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1884
1885 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1886 || !task_copy);
1887 if (!task_copy)
1888 ctx->cb.dst_fn = decl;
1889 else
1890 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1891
1892 TREE_STATIC (decl) = 1;
1893 TREE_USED (decl) = 1;
1894 DECL_ARTIFICIAL (decl) = 1;
1895 DECL_IGNORED_P (decl) = 0;
1896 TREE_PUBLIC (decl) = 0;
1897 DECL_UNINLINABLE (decl) = 1;
1898 DECL_EXTERNAL (decl) = 0;
1899 DECL_CONTEXT (decl) = NULL_TREE;
1900 DECL_INITIAL (decl) = make_node (BLOCK);
1901 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1902 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1903 /* Remove omp declare simd attribute from the new attributes. */
1904 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1905 {
1906 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1907 a = a2;
1908 a = TREE_CHAIN (a);
1909 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1910 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1911 *p = TREE_CHAIN (*p);
1912 else
1913 {
1914 tree chain = TREE_CHAIN (*p);
1915 *p = copy_node (*p);
1916 p = &TREE_CHAIN (*p);
1917 *p = chain;
1918 }
1919 }
1920 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1921 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1922 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1923 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1924 DECL_FUNCTION_VERSIONED (decl)
1925 = DECL_FUNCTION_VERSIONED (current_function_decl);
1926
1927 if (omp_maybe_offloaded_ctx (ctx))
1928 {
1929 cgraph_node::get_create (decl)->offloadable = 1;
1930 if (ENABLE_OFFLOADING)
1931 g->have_offload = true;
1932 }
1933
1934 if (cgraph_node::get_create (decl)->offloadable
1935 && !lookup_attribute ("omp declare target",
1936 DECL_ATTRIBUTES (current_function_decl)))
1937 {
1938 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1939 ? "omp target entrypoint"
1940 : "omp declare target");
1941 DECL_ATTRIBUTES (decl)
1942 = tree_cons (get_identifier (target_attr),
1943 NULL_TREE, DECL_ATTRIBUTES (decl));
1944 }
1945
1946 t = build_decl (DECL_SOURCE_LOCATION (decl),
1947 RESULT_DECL, NULL_TREE, void_type_node);
1948 DECL_ARTIFICIAL (t) = 1;
1949 DECL_IGNORED_P (t) = 1;
1950 DECL_CONTEXT (t) = decl;
1951 DECL_RESULT (decl) = t;
1952
1953 tree data_name = get_identifier (".omp_data_i");
1954 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1955 ptr_type_node);
1956 DECL_ARTIFICIAL (t) = 1;
1957 DECL_NAMELESS (t) = 1;
1958 DECL_ARG_TYPE (t) = ptr_type_node;
1959 DECL_CONTEXT (t) = current_function_decl;
1960 TREE_USED (t) = 1;
1961 TREE_READONLY (t) = 1;
1962 DECL_ARGUMENTS (decl) = t;
1963 if (!task_copy)
1964 ctx->receiver_decl = t;
1965 else
1966 {
1967 t = build_decl (DECL_SOURCE_LOCATION (decl),
1968 PARM_DECL, get_identifier (".omp_data_o"),
1969 ptr_type_node);
1970 DECL_ARTIFICIAL (t) = 1;
1971 DECL_NAMELESS (t) = 1;
1972 DECL_ARG_TYPE (t) = ptr_type_node;
1973 DECL_CONTEXT (t) = current_function_decl;
1974 TREE_USED (t) = 1;
1975 TREE_ADDRESSABLE (t) = 1;
1976 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1977 DECL_ARGUMENTS (decl) = t;
1978 }
1979
1980 /* Allocate memory for the function structure. The call to
1981 allocate_struct_function clobbers CFUN, so we need to restore
1982 it afterward. */
1983 push_struct_function (decl);
1984 cfun->function_end_locus = gimple_location (ctx->stmt);
1985 init_tree_ssa (cfun);
1986 pop_cfun ();
1987 }
1988
1989 /* Callback for walk_gimple_seq. Check if combined parallel
1990 contains gimple_omp_for_combined_into_p OMP_FOR. */
1991
1992 tree
1993 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1994 bool *handled_ops_p,
1995 struct walk_stmt_info *wi)
1996 {
1997 gimple *stmt = gsi_stmt (*gsi_p);
1998
1999 *handled_ops_p = true;
2000 switch (gimple_code (stmt))
2001 {
2002 WALK_SUBSTMTS;
2003
2004 case GIMPLE_OMP_FOR:
2005 if (gimple_omp_for_combined_into_p (stmt)
2006 && gimple_omp_for_kind (stmt)
2007 == *(const enum gf_mask *) (wi->info))
2008 {
2009 wi->info = stmt;
2010 return integer_zero_node;
2011 }
2012 break;
2013 default:
2014 break;
2015 }
2016 return NULL;
2017 }
2018
2019 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2020
2021 static void
2022 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2023 omp_context *outer_ctx)
2024 {
2025 struct walk_stmt_info wi;
2026
2027 memset (&wi, 0, sizeof (wi));
2028 wi.val_only = true;
2029 wi.info = (void *) &msk;
2030 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2031 if (wi.info != (void *) &msk)
2032 {
2033 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2034 struct omp_for_data fd;
2035 omp_extract_for_data (for_stmt, &fd, NULL);
2036 /* We need two temporaries with fd.loop.v type (istart/iend)
2037 and then (fd.collapse - 1) temporaries with the same
2038 type for count2 ... countN-1 vars if not constant. */
2039 size_t count = 2, i;
2040 tree type = fd.iter_type;
2041 if (fd.collapse > 1
2042 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2043 {
2044 count += fd.collapse - 1;
2045 /* If there are lastprivate clauses on the inner
2046 GIMPLE_OMP_FOR, add one more temporaries for the total number
2047 of iterations (product of count1 ... countN-1). */
2048 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2049 OMP_CLAUSE_LASTPRIVATE)
2050 || (msk == GF_OMP_FOR_KIND_FOR
2051 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2052 OMP_CLAUSE_LASTPRIVATE)))
2053 {
2054 tree temp = create_tmp_var (type);
2055 tree c = build_omp_clause (UNKNOWN_LOCATION,
2056 OMP_CLAUSE__LOOPTEMP_);
2057 insert_decl_map (&outer_ctx->cb, temp, temp);
2058 OMP_CLAUSE_DECL (c) = temp;
2059 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2060 gimple_omp_taskreg_set_clauses (stmt, c);
2061 }
2062 if (fd.non_rect
2063 && fd.last_nonrect == fd.first_nonrect + 1)
2064 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2065 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2066 {
2067 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2068 tree type2 = TREE_TYPE (v);
2069 count++;
2070 for (i = 0; i < 3; i++)
2071 {
2072 tree temp = create_tmp_var (type2);
2073 tree c = build_omp_clause (UNKNOWN_LOCATION,
2074 OMP_CLAUSE__LOOPTEMP_);
2075 insert_decl_map (&outer_ctx->cb, temp, temp);
2076 OMP_CLAUSE_DECL (c) = temp;
2077 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2078 gimple_omp_taskreg_set_clauses (stmt, c);
2079 }
2080 }
2081 }
2082 for (i = 0; i < count; i++)
2083 {
2084 tree temp = create_tmp_var (type);
2085 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2086 insert_decl_map (&outer_ctx->cb, temp, temp);
2087 OMP_CLAUSE_DECL (c) = temp;
2088 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2089 gimple_omp_taskreg_set_clauses (stmt, c);
2090 }
2091 }
2092 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2093 && omp_find_clause (gimple_omp_task_clauses (stmt),
2094 OMP_CLAUSE_REDUCTION))
2095 {
2096 tree type = build_pointer_type (pointer_sized_int_node);
2097 tree temp = create_tmp_var (type);
2098 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2099 insert_decl_map (&outer_ctx->cb, temp, temp);
2100 OMP_CLAUSE_DECL (c) = temp;
2101 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2102 gimple_omp_task_set_clauses (stmt, c);
2103 }
2104 }
2105
2106 /* Scan an OpenMP parallel directive. */
2107
2108 static void
2109 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2110 {
2111 omp_context *ctx;
2112 tree name;
2113 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2114
2115 /* Ignore parallel directives with empty bodies, unless there
2116 are copyin clauses. */
2117 if (optimize > 0
2118 && empty_body_p (gimple_omp_body (stmt))
2119 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2120 OMP_CLAUSE_COPYIN) == NULL)
2121 {
2122 gsi_replace (gsi, gimple_build_nop (), false);
2123 return;
2124 }
2125
2126 if (gimple_omp_parallel_combined_p (stmt))
2127 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2128 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2129 OMP_CLAUSE_REDUCTION);
2130 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2131 if (OMP_CLAUSE_REDUCTION_TASK (c))
2132 {
2133 tree type = build_pointer_type (pointer_sized_int_node);
2134 tree temp = create_tmp_var (type);
2135 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2136 if (outer_ctx)
2137 insert_decl_map (&outer_ctx->cb, temp, temp);
2138 OMP_CLAUSE_DECL (c) = temp;
2139 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2140 gimple_omp_parallel_set_clauses (stmt, c);
2141 break;
2142 }
2143 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2144 break;
2145
2146 ctx = new_omp_context (stmt, outer_ctx);
2147 taskreg_contexts.safe_push (ctx);
2148 if (taskreg_nesting_level > 1)
2149 ctx->is_nested = true;
2150 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2151 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2152 name = create_tmp_var_name (".omp_data_s");
2153 name = build_decl (gimple_location (stmt),
2154 TYPE_DECL, name, ctx->record_type);
2155 DECL_ARTIFICIAL (name) = 1;
2156 DECL_NAMELESS (name) = 1;
2157 TYPE_NAME (ctx->record_type) = name;
2158 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2159 create_omp_child_function (ctx, false);
2160 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2161
2162 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2163 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2164
2165 if (TYPE_FIELDS (ctx->record_type) == NULL)
2166 ctx->record_type = ctx->receiver_decl = NULL;
2167 }
2168
2169 /* Scan an OpenMP task directive. */
2170
2171 static void
2172 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2173 {
2174 omp_context *ctx;
2175 tree name, t;
2176 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2177
2178 /* Ignore task directives with empty bodies, unless they have depend
2179 clause. */
2180 if (optimize > 0
2181 && gimple_omp_body (stmt)
2182 && empty_body_p (gimple_omp_body (stmt))
2183 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2184 {
2185 gsi_replace (gsi, gimple_build_nop (), false);
2186 return;
2187 }
2188
2189 if (gimple_omp_task_taskloop_p (stmt))
2190 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2191
2192 ctx = new_omp_context (stmt, outer_ctx);
2193
2194 if (gimple_omp_task_taskwait_p (stmt))
2195 {
2196 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2197 return;
2198 }
2199
2200 taskreg_contexts.safe_push (ctx);
2201 if (taskreg_nesting_level > 1)
2202 ctx->is_nested = true;
2203 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2204 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2205 name = create_tmp_var_name (".omp_data_s");
2206 name = build_decl (gimple_location (stmt),
2207 TYPE_DECL, name, ctx->record_type);
2208 DECL_ARTIFICIAL (name) = 1;
2209 DECL_NAMELESS (name) = 1;
2210 TYPE_NAME (ctx->record_type) = name;
2211 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2212 create_omp_child_function (ctx, false);
2213 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2214
2215 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2216
2217 if (ctx->srecord_type)
2218 {
2219 name = create_tmp_var_name (".omp_data_a");
2220 name = build_decl (gimple_location (stmt),
2221 TYPE_DECL, name, ctx->srecord_type);
2222 DECL_ARTIFICIAL (name) = 1;
2223 DECL_NAMELESS (name) = 1;
2224 TYPE_NAME (ctx->srecord_type) = name;
2225 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2226 create_omp_child_function (ctx, true);
2227 }
2228
2229 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2230
2231 if (TYPE_FIELDS (ctx->record_type) == NULL)
2232 {
2233 ctx->record_type = ctx->receiver_decl = NULL;
2234 t = build_int_cst (long_integer_type_node, 0);
2235 gimple_omp_task_set_arg_size (stmt, t);
2236 t = build_int_cst (long_integer_type_node, 1);
2237 gimple_omp_task_set_arg_align (stmt, t);
2238 }
2239 }
2240
2241 /* Helper function for finish_taskreg_scan, called through walk_tree.
2242 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2243 tree, replace it in the expression. */
2244
2245 static tree
2246 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2247 {
2248 if (VAR_P (*tp))
2249 {
2250 omp_context *ctx = (omp_context *) data;
2251 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2252 if (t != *tp)
2253 {
2254 if (DECL_HAS_VALUE_EXPR_P (t))
2255 t = unshare_expr (DECL_VALUE_EXPR (t));
2256 *tp = t;
2257 }
2258 *walk_subtrees = 0;
2259 }
2260 else if (IS_TYPE_OR_DECL_P (*tp))
2261 *walk_subtrees = 0;
2262 return NULL_TREE;
2263 }
2264
2265 /* If any decls have been made addressable during scan_omp,
2266 adjust their fields if needed, and layout record types
2267 of parallel/task constructs. */
2268
2269 static void
2270 finish_taskreg_scan (omp_context *ctx)
2271 {
2272 if (ctx->record_type == NULL_TREE)
2273 return;
2274
2275 /* If any task_shared_vars were needed, verify all
2276 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2277 statements if use_pointer_for_field hasn't changed
2278 because of that. If it did, update field types now. */
2279 if (task_shared_vars)
2280 {
2281 tree c;
2282
2283 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2284 c; c = OMP_CLAUSE_CHAIN (c))
2285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2286 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2287 {
2288 tree decl = OMP_CLAUSE_DECL (c);
2289
2290 /* Global variables don't need to be copied,
2291 the receiver side will use them directly. */
2292 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2293 continue;
2294 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2295 || !use_pointer_for_field (decl, ctx))
2296 continue;
2297 tree field = lookup_field (decl, ctx);
2298 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2299 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2300 continue;
2301 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2302 TREE_THIS_VOLATILE (field) = 0;
2303 DECL_USER_ALIGN (field) = 0;
2304 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2305 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2306 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2307 if (ctx->srecord_type)
2308 {
2309 tree sfield = lookup_sfield (decl, ctx);
2310 TREE_TYPE (sfield) = TREE_TYPE (field);
2311 TREE_THIS_VOLATILE (sfield) = 0;
2312 DECL_USER_ALIGN (sfield) = 0;
2313 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2314 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2315 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2316 }
2317 }
2318 }
2319
2320 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2321 {
2322 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2323 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2324 if (c)
2325 {
2326 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2327 expects to find it at the start of data. */
2328 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2329 tree *p = &TYPE_FIELDS (ctx->record_type);
2330 while (*p)
2331 if (*p == f)
2332 {
2333 *p = DECL_CHAIN (*p);
2334 break;
2335 }
2336 else
2337 p = &DECL_CHAIN (*p);
2338 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2339 TYPE_FIELDS (ctx->record_type) = f;
2340 }
2341 layout_type (ctx->record_type);
2342 fixup_child_record_type (ctx);
2343 }
2344 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2345 {
2346 layout_type (ctx->record_type);
2347 fixup_child_record_type (ctx);
2348 }
2349 else
2350 {
2351 location_t loc = gimple_location (ctx->stmt);
2352 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2353 /* Move VLA fields to the end. */
2354 p = &TYPE_FIELDS (ctx->record_type);
2355 while (*p)
2356 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2357 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2358 {
2359 *q = *p;
2360 *p = TREE_CHAIN (*p);
2361 TREE_CHAIN (*q) = NULL_TREE;
2362 q = &TREE_CHAIN (*q);
2363 }
2364 else
2365 p = &DECL_CHAIN (*p);
2366 *p = vla_fields;
2367 if (gimple_omp_task_taskloop_p (ctx->stmt))
2368 {
2369 /* Move fields corresponding to first and second _looptemp_
2370 clause first. There are filled by GOMP_taskloop
2371 and thus need to be in specific positions. */
2372 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2373 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2374 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2375 OMP_CLAUSE__LOOPTEMP_);
2376 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2377 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2378 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2379 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2380 p = &TYPE_FIELDS (ctx->record_type);
2381 while (*p)
2382 if (*p == f1 || *p == f2 || *p == f3)
2383 *p = DECL_CHAIN (*p);
2384 else
2385 p = &DECL_CHAIN (*p);
2386 DECL_CHAIN (f1) = f2;
2387 if (c3)
2388 {
2389 DECL_CHAIN (f2) = f3;
2390 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2391 }
2392 else
2393 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2394 TYPE_FIELDS (ctx->record_type) = f1;
2395 if (ctx->srecord_type)
2396 {
2397 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2398 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2399 if (c3)
2400 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2401 p = &TYPE_FIELDS (ctx->srecord_type);
2402 while (*p)
2403 if (*p == f1 || *p == f2 || *p == f3)
2404 *p = DECL_CHAIN (*p);
2405 else
2406 p = &DECL_CHAIN (*p);
2407 DECL_CHAIN (f1) = f2;
2408 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2409 if (c3)
2410 {
2411 DECL_CHAIN (f2) = f3;
2412 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2413 }
2414 else
2415 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2416 TYPE_FIELDS (ctx->srecord_type) = f1;
2417 }
2418 }
2419 layout_type (ctx->record_type);
2420 fixup_child_record_type (ctx);
2421 if (ctx->srecord_type)
2422 layout_type (ctx->srecord_type);
2423 tree t = fold_convert_loc (loc, long_integer_type_node,
2424 TYPE_SIZE_UNIT (ctx->record_type));
2425 if (TREE_CODE (t) != INTEGER_CST)
2426 {
2427 t = unshare_expr (t);
2428 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2429 }
2430 gimple_omp_task_set_arg_size (ctx->stmt, t);
2431 t = build_int_cst (long_integer_type_node,
2432 TYPE_ALIGN_UNIT (ctx->record_type));
2433 gimple_omp_task_set_arg_align (ctx->stmt, t);
2434 }
2435 }
2436
2437 /* Find the enclosing offload context. */
2438
2439 static omp_context *
2440 enclosing_target_ctx (omp_context *ctx)
2441 {
2442 for (; ctx; ctx = ctx->outer)
2443 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2444 break;
2445
2446 return ctx;
2447 }
2448
2449 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2450 construct.
2451 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2452
2453 static bool
2454 ctx_in_oacc_kernels_region (omp_context *ctx)
2455 {
2456 for (;ctx != NULL; ctx = ctx->outer)
2457 {
2458 gimple *stmt = ctx->stmt;
2459 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2460 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2461 return true;
2462 }
2463
2464 return false;
2465 }
2466
2467 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2468 (This doesn't include OpenACC 'kernels' decomposed parts.)
2469 Until kernels handling moves to use the same loop indirection
2470 scheme as parallel, we need to do this checking early. */
2471
2472 static unsigned
2473 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2474 {
2475 bool checking = true;
2476 unsigned outer_mask = 0;
2477 unsigned this_mask = 0;
2478 bool has_seq = false, has_auto = false;
2479
2480 if (ctx->outer)
2481 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2482 if (!stmt)
2483 {
2484 checking = false;
2485 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2486 return outer_mask;
2487 stmt = as_a <gomp_for *> (ctx->stmt);
2488 }
2489
2490 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2491 {
2492 switch (OMP_CLAUSE_CODE (c))
2493 {
2494 case OMP_CLAUSE_GANG:
2495 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2496 break;
2497 case OMP_CLAUSE_WORKER:
2498 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2499 break;
2500 case OMP_CLAUSE_VECTOR:
2501 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2502 break;
2503 case OMP_CLAUSE_SEQ:
2504 has_seq = true;
2505 break;
2506 case OMP_CLAUSE_AUTO:
2507 has_auto = true;
2508 break;
2509 default:
2510 break;
2511 }
2512 }
2513
2514 if (checking)
2515 {
2516 if (has_seq && (this_mask || has_auto))
2517 error_at (gimple_location (stmt), "%<seq%> overrides other"
2518 " OpenACC loop specifiers");
2519 else if (has_auto && this_mask)
2520 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2521 " OpenACC loop specifiers");
2522
2523 if (this_mask & outer_mask)
2524 error_at (gimple_location (stmt), "inner loop uses same"
2525 " OpenACC parallelism as containing loop");
2526 }
2527
2528 return outer_mask | this_mask;
2529 }
2530
2531 /* Scan a GIMPLE_OMP_FOR. */
2532
2533 static omp_context *
2534 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2535 {
2536 omp_context *ctx;
2537 size_t i;
2538 tree clauses = gimple_omp_for_clauses (stmt);
2539
2540 ctx = new_omp_context (stmt, outer_ctx);
2541
2542 if (is_gimple_omp_oacc (stmt))
2543 {
2544 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2545
2546 if (!(tgt && is_oacc_kernels (tgt)))
2547 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2548 {
2549 tree c_op0;
2550 switch (OMP_CLAUSE_CODE (c))
2551 {
2552 case OMP_CLAUSE_GANG:
2553 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2554 break;
2555
2556 case OMP_CLAUSE_WORKER:
2557 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2558 break;
2559
2560 case OMP_CLAUSE_VECTOR:
2561 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2562 break;
2563
2564 default:
2565 continue;
2566 }
2567
2568 if (c_op0)
2569 {
2570 /* By construction, this is impossible for OpenACC 'kernels'
2571 decomposed parts. */
2572 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2573
2574 error_at (OMP_CLAUSE_LOCATION (c),
2575 "argument not permitted on %qs clause",
2576 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2577 if (tgt)
2578 inform (gimple_location (tgt->stmt),
2579 "enclosing parent compute construct");
2580 else if (oacc_get_fn_attrib (current_function_decl))
2581 inform (DECL_SOURCE_LOCATION (current_function_decl),
2582 "enclosing routine");
2583 else
2584 gcc_unreachable ();
2585 }
2586 }
2587
2588 if (tgt && is_oacc_kernels (tgt))
2589 check_oacc_kernel_gwv (stmt, ctx);
2590
2591 /* Collect all variables named in reductions on this loop. Ensure
2592 that, if this loop has a reduction on some variable v, and there is
2593 a reduction on v somewhere in an outer context, then there is a
2594 reduction on v on all intervening loops as well. */
2595 tree local_reduction_clauses = NULL;
2596 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2597 {
2598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2599 local_reduction_clauses
2600 = tree_cons (NULL, c, local_reduction_clauses);
2601 }
2602 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2603 ctx->outer_reduction_clauses
2604 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2605 ctx->outer->outer_reduction_clauses);
2606 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2607 tree local_iter = local_reduction_clauses;
2608 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2609 {
2610 tree local_clause = TREE_VALUE (local_iter);
2611 tree local_var = OMP_CLAUSE_DECL (local_clause);
2612 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2613 bool have_outer_reduction = false;
2614 tree ctx_iter = outer_reduction_clauses;
2615 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2616 {
2617 tree outer_clause = TREE_VALUE (ctx_iter);
2618 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2619 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2620 if (outer_var == local_var && outer_op != local_op)
2621 {
2622 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2623 "conflicting reduction operations for %qE",
2624 local_var);
2625 inform (OMP_CLAUSE_LOCATION (outer_clause),
2626 "location of the previous reduction for %qE",
2627 outer_var);
2628 }
2629 if (outer_var == local_var)
2630 {
2631 have_outer_reduction = true;
2632 break;
2633 }
2634 }
2635 if (have_outer_reduction)
2636 {
2637 /* There is a reduction on outer_var both on this loop and on
2638 some enclosing loop. Walk up the context tree until such a
2639 loop with a reduction on outer_var is found, and complain
2640 about all intervening loops that do not have such a
2641 reduction. */
2642 struct omp_context *curr_loop = ctx->outer;
2643 bool found = false;
2644 while (curr_loop != NULL)
2645 {
2646 tree curr_iter = curr_loop->local_reduction_clauses;
2647 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2648 {
2649 tree curr_clause = TREE_VALUE (curr_iter);
2650 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2651 if (curr_var == local_var)
2652 {
2653 found = true;
2654 break;
2655 }
2656 }
2657 if (!found)
2658 warning_at (gimple_location (curr_loop->stmt), 0,
2659 "nested loop in reduction needs "
2660 "reduction clause for %qE",
2661 local_var);
2662 else
2663 break;
2664 curr_loop = curr_loop->outer;
2665 }
2666 }
2667 }
2668 ctx->local_reduction_clauses = local_reduction_clauses;
2669 ctx->outer_reduction_clauses
2670 = chainon (unshare_expr (ctx->local_reduction_clauses),
2671 ctx->outer_reduction_clauses);
2672
2673 if (tgt && is_oacc_kernels (tgt))
2674 {
2675 /* Strip out reductions, as they are not handled yet. */
2676 tree *prev_ptr = &clauses;
2677
2678 while (tree probe = *prev_ptr)
2679 {
2680 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2681
2682 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2683 *prev_ptr = *next_ptr;
2684 else
2685 prev_ptr = next_ptr;
2686 }
2687
2688 gimple_omp_for_set_clauses (stmt, clauses);
2689 }
2690 }
2691
2692 scan_sharing_clauses (clauses, ctx);
2693
2694 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2695 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2696 {
2697 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2698 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2699 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2700 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2701 }
2702 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2703 return ctx;
2704 }
2705
2706 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2707
2708 static void
2709 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2710 omp_context *outer_ctx)
2711 {
2712 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2713 gsi_replace (gsi, bind, false);
2714 gimple_seq seq = NULL;
2715 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2716 tree cond = create_tmp_var_raw (integer_type_node);
2717 DECL_CONTEXT (cond) = current_function_decl;
2718 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2719 gimple_bind_set_vars (bind, cond);
2720 gimple_call_set_lhs (g, cond);
2721 gimple_seq_add_stmt (&seq, g);
2722 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2723 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2724 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2725 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2726 gimple_seq_add_stmt (&seq, g);
2727 g = gimple_build_label (lab1);
2728 gimple_seq_add_stmt (&seq, g);
2729 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2730 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2731 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2732 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2733 gimple_omp_for_set_clauses (new_stmt, clause);
2734 gimple_seq_add_stmt (&seq, new_stmt);
2735 g = gimple_build_goto (lab3);
2736 gimple_seq_add_stmt (&seq, g);
2737 g = gimple_build_label (lab2);
2738 gimple_seq_add_stmt (&seq, g);
2739 gimple_seq_add_stmt (&seq, stmt);
2740 g = gimple_build_label (lab3);
2741 gimple_seq_add_stmt (&seq, g);
2742 gimple_bind_set_body (bind, seq);
2743 update_stmt (bind);
2744 scan_omp_for (new_stmt, outer_ctx);
2745 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2746 }
2747
2748 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2749 struct walk_stmt_info *);
2750 static omp_context *maybe_lookup_ctx (gimple *);
2751
2752 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2753 for scan phase loop. */
2754
2755 static void
2756 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2757 omp_context *outer_ctx)
2758 {
2759 /* The only change between inclusive and exclusive scan will be
2760 within the first simd loop, so just use inclusive in the
2761 worksharing loop. */
2762 outer_ctx->scan_inclusive = true;
2763 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2764 OMP_CLAUSE_DECL (c) = integer_zero_node;
2765
2766 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2767 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2768 gsi_replace (gsi, input_stmt, false);
2769 gimple_seq input_body = NULL;
2770 gimple_seq_add_stmt (&input_body, stmt);
2771 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2772
2773 gimple_stmt_iterator input1_gsi = gsi_none ();
2774 struct walk_stmt_info wi;
2775 memset (&wi, 0, sizeof (wi));
2776 wi.val_only = true;
2777 wi.info = (void *) &input1_gsi;
2778 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2779 gcc_assert (!gsi_end_p (input1_gsi));
2780
2781 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2782 gsi_next (&input1_gsi);
2783 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2784 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2785 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2786 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2787 std::swap (input_stmt1, scan_stmt1);
2788
2789 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2790 gimple_omp_set_body (input_stmt1, NULL);
2791
2792 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2793 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2794
2795 gimple_omp_set_body (input_stmt1, input_body1);
2796 gimple_omp_set_body (scan_stmt1, NULL);
2797
2798 gimple_stmt_iterator input2_gsi = gsi_none ();
2799 memset (&wi, 0, sizeof (wi));
2800 wi.val_only = true;
2801 wi.info = (void *) &input2_gsi;
2802 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2803 NULL, &wi);
2804 gcc_assert (!gsi_end_p (input2_gsi));
2805
2806 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2807 gsi_next (&input2_gsi);
2808 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2809 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2810 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2811 std::swap (input_stmt2, scan_stmt2);
2812
2813 gimple_omp_set_body (input_stmt2, NULL);
2814
2815 gimple_omp_set_body (input_stmt, input_body);
2816 gimple_omp_set_body (scan_stmt, scan_body);
2817
2818 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2819 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2820
2821 ctx = new_omp_context (scan_stmt, outer_ctx);
2822 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2823
2824 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2825 }
2826
2827 /* Scan an OpenMP sections directive. */
2828
2829 static void
2830 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2831 {
2832 omp_context *ctx;
2833
2834 ctx = new_omp_context (stmt, outer_ctx);
2835 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2836 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2837 }
2838
2839 /* Scan an OpenMP single directive. */
2840
2841 static void
2842 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2843 {
2844 omp_context *ctx;
2845 tree name;
2846
2847 ctx = new_omp_context (stmt, outer_ctx);
2848 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2849 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2850 name = create_tmp_var_name (".omp_copy_s");
2851 name = build_decl (gimple_location (stmt),
2852 TYPE_DECL, name, ctx->record_type);
2853 TYPE_NAME (ctx->record_type) = name;
2854
2855 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2856 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2857
2858 if (TYPE_FIELDS (ctx->record_type) == NULL)
2859 ctx->record_type = NULL;
2860 else
2861 layout_type (ctx->record_type);
2862 }
2863
2864 /* Scan a GIMPLE_OMP_TARGET. */
2865
2866 static void
2867 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2868 {
2869 omp_context *ctx;
2870 tree name;
2871 bool offloaded = is_gimple_omp_offloaded (stmt);
2872 tree clauses = gimple_omp_target_clauses (stmt);
2873
2874 ctx = new_omp_context (stmt, outer_ctx);
2875 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2876 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2877 name = create_tmp_var_name (".omp_data_t");
2878 name = build_decl (gimple_location (stmt),
2879 TYPE_DECL, name, ctx->record_type);
2880 DECL_ARTIFICIAL (name) = 1;
2881 DECL_NAMELESS (name) = 1;
2882 TYPE_NAME (ctx->record_type) = name;
2883 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2884
2885 if (offloaded)
2886 {
2887 create_omp_child_function (ctx, false);
2888 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2889 }
2890
2891 scan_sharing_clauses (clauses, ctx);
2892 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2893
2894 if (TYPE_FIELDS (ctx->record_type) == NULL)
2895 ctx->record_type = ctx->receiver_decl = NULL;
2896 else
2897 {
2898 TYPE_FIELDS (ctx->record_type)
2899 = nreverse (TYPE_FIELDS (ctx->record_type));
2900 if (flag_checking)
2901 {
2902 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2903 for (tree field = TYPE_FIELDS (ctx->record_type);
2904 field;
2905 field = DECL_CHAIN (field))
2906 gcc_assert (DECL_ALIGN (field) == align);
2907 }
2908 layout_type (ctx->record_type);
2909 if (offloaded)
2910 fixup_child_record_type (ctx);
2911 }
2912 }
2913
2914 /* Scan an OpenMP teams directive. */
2915
2916 static void
2917 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2918 {
2919 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2920
2921 if (!gimple_omp_teams_host (stmt))
2922 {
2923 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2924 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2925 return;
2926 }
2927 taskreg_contexts.safe_push (ctx);
2928 gcc_assert (taskreg_nesting_level == 1);
2929 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2930 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2931 tree name = create_tmp_var_name (".omp_data_s");
2932 name = build_decl (gimple_location (stmt),
2933 TYPE_DECL, name, ctx->record_type);
2934 DECL_ARTIFICIAL (name) = 1;
2935 DECL_NAMELESS (name) = 1;
2936 TYPE_NAME (ctx->record_type) = name;
2937 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2938 create_omp_child_function (ctx, false);
2939 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2940
2941 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2942 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2943
2944 if (TYPE_FIELDS (ctx->record_type) == NULL)
2945 ctx->record_type = ctx->receiver_decl = NULL;
2946 }
2947
2948 /* Check nesting restrictions. */
2949 static bool
2950 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2951 {
2952 tree c;
2953
2954 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2955 inside an OpenACC CTX. */
2956 if (!(is_gimple_omp (stmt)
2957 && is_gimple_omp_oacc (stmt))
2958 /* Except for atomic codes that we share with OpenMP. */
2959 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2960 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2961 {
2962 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2963 {
2964 error_at (gimple_location (stmt),
2965 "non-OpenACC construct inside of OpenACC routine");
2966 return false;
2967 }
2968 else
2969 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2970 if (is_gimple_omp (octx->stmt)
2971 && is_gimple_omp_oacc (octx->stmt))
2972 {
2973 error_at (gimple_location (stmt),
2974 "non-OpenACC construct inside of OpenACC region");
2975 return false;
2976 }
2977 }
2978
2979 if (ctx != NULL)
2980 {
2981 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2982 && ctx->outer
2983 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2984 ctx = ctx->outer;
2985 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2986 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2987 && !ctx->loop_p)
2988 {
2989 c = NULL_TREE;
2990 if (ctx->order_concurrent
2991 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2992 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2993 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2994 {
2995 error_at (gimple_location (stmt),
2996 "OpenMP constructs other than %<parallel%>, %<loop%>"
2997 " or %<simd%> may not be nested inside a region with"
2998 " the %<order(concurrent)%> clause");
2999 return false;
3000 }
3001 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3002 {
3003 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3004 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3005 {
3006 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3007 && (ctx->outer == NULL
3008 || !gimple_omp_for_combined_into_p (ctx->stmt)
3009 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3010 || (gimple_omp_for_kind (ctx->outer->stmt)
3011 != GF_OMP_FOR_KIND_FOR)
3012 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3013 {
3014 error_at (gimple_location (stmt),
3015 "%<ordered simd threads%> must be closely "
3016 "nested inside of %<%s simd%> region",
3017 lang_GNU_Fortran () ? "do" : "for");
3018 return false;
3019 }
3020 return true;
3021 }
3022 }
3023 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3024 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3025 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3026 return true;
3027 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3028 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3029 return true;
3030 error_at (gimple_location (stmt),
3031 "OpenMP constructs other than "
3032 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3033 "not be nested inside %<simd%> region");
3034 return false;
3035 }
3036 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3037 {
3038 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3039 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3040 && omp_find_clause (gimple_omp_for_clauses (stmt),
3041 OMP_CLAUSE_BIND) == NULL_TREE))
3042 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3043 {
3044 error_at (gimple_location (stmt),
3045 "only %<distribute%>, %<parallel%> or %<loop%> "
3046 "regions are allowed to be strictly nested inside "
3047 "%<teams%> region");
3048 return false;
3049 }
3050 }
3051 else if (ctx->order_concurrent
3052 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3053 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3054 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3055 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3056 {
3057 if (ctx->loop_p)
3058 error_at (gimple_location (stmt),
3059 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3060 "%<simd%> may not be nested inside a %<loop%> region");
3061 else
3062 error_at (gimple_location (stmt),
3063 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3064 "%<simd%> may not be nested inside a region with "
3065 "the %<order(concurrent)%> clause");
3066 return false;
3067 }
3068 }
3069 switch (gimple_code (stmt))
3070 {
3071 case GIMPLE_OMP_FOR:
3072 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3073 return true;
3074 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3075 {
3076 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3077 {
3078 error_at (gimple_location (stmt),
3079 "%<distribute%> region must be strictly nested "
3080 "inside %<teams%> construct");
3081 return false;
3082 }
3083 return true;
3084 }
3085 /* We split taskloop into task and nested taskloop in it. */
3086 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3087 return true;
3088 /* For now, hope this will change and loop bind(parallel) will not
3089 be allowed in lots of contexts. */
3090 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3091 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3092 return true;
3093 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3094 {
3095 bool ok = false;
3096
3097 if (ctx)
3098 switch (gimple_code (ctx->stmt))
3099 {
3100 case GIMPLE_OMP_FOR:
3101 ok = (gimple_omp_for_kind (ctx->stmt)
3102 == GF_OMP_FOR_KIND_OACC_LOOP);
3103 break;
3104
3105 case GIMPLE_OMP_TARGET:
3106 switch (gimple_omp_target_kind (ctx->stmt))
3107 {
3108 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3109 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3110 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3111 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3112 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3113 ok = true;
3114 break;
3115
3116 default:
3117 break;
3118 }
3119
3120 default:
3121 break;
3122 }
3123 else if (oacc_get_fn_attrib (current_function_decl))
3124 ok = true;
3125 if (!ok)
3126 {
3127 error_at (gimple_location (stmt),
3128 "OpenACC loop directive must be associated with"
3129 " an OpenACC compute region");
3130 return false;
3131 }
3132 }
3133 /* FALLTHRU */
3134 case GIMPLE_CALL:
3135 if (is_gimple_call (stmt)
3136 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3137 == BUILT_IN_GOMP_CANCEL
3138 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3139 == BUILT_IN_GOMP_CANCELLATION_POINT))
3140 {
3141 const char *bad = NULL;
3142 const char *kind = NULL;
3143 const char *construct
3144 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3145 == BUILT_IN_GOMP_CANCEL)
3146 ? "cancel"
3147 : "cancellation point";
3148 if (ctx == NULL)
3149 {
3150 error_at (gimple_location (stmt), "orphaned %qs construct",
3151 construct);
3152 return false;
3153 }
3154 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3155 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3156 : 0)
3157 {
3158 case 1:
3159 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3160 bad = "parallel";
3161 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3162 == BUILT_IN_GOMP_CANCEL
3163 && !integer_zerop (gimple_call_arg (stmt, 1)))
3164 ctx->cancellable = true;
3165 kind = "parallel";
3166 break;
3167 case 2:
3168 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3169 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3170 bad = "for";
3171 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3172 == BUILT_IN_GOMP_CANCEL
3173 && !integer_zerop (gimple_call_arg (stmt, 1)))
3174 {
3175 ctx->cancellable = true;
3176 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3177 OMP_CLAUSE_NOWAIT))
3178 warning_at (gimple_location (stmt), 0,
3179 "%<cancel for%> inside "
3180 "%<nowait%> for construct");
3181 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3182 OMP_CLAUSE_ORDERED))
3183 warning_at (gimple_location (stmt), 0,
3184 "%<cancel for%> inside "
3185 "%<ordered%> for construct");
3186 }
3187 kind = "for";
3188 break;
3189 case 4:
3190 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3191 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3192 bad = "sections";
3193 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3194 == BUILT_IN_GOMP_CANCEL
3195 && !integer_zerop (gimple_call_arg (stmt, 1)))
3196 {
3197 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3198 {
3199 ctx->cancellable = true;
3200 if (omp_find_clause (gimple_omp_sections_clauses
3201 (ctx->stmt),
3202 OMP_CLAUSE_NOWAIT))
3203 warning_at (gimple_location (stmt), 0,
3204 "%<cancel sections%> inside "
3205 "%<nowait%> sections construct");
3206 }
3207 else
3208 {
3209 gcc_assert (ctx->outer
3210 && gimple_code (ctx->outer->stmt)
3211 == GIMPLE_OMP_SECTIONS);
3212 ctx->outer->cancellable = true;
3213 if (omp_find_clause (gimple_omp_sections_clauses
3214 (ctx->outer->stmt),
3215 OMP_CLAUSE_NOWAIT))
3216 warning_at (gimple_location (stmt), 0,
3217 "%<cancel sections%> inside "
3218 "%<nowait%> sections construct");
3219 }
3220 }
3221 kind = "sections";
3222 break;
3223 case 8:
3224 if (!is_task_ctx (ctx)
3225 && (!is_taskloop_ctx (ctx)
3226 || ctx->outer == NULL
3227 || !is_task_ctx (ctx->outer)))
3228 bad = "task";
3229 else
3230 {
3231 for (omp_context *octx = ctx->outer;
3232 octx; octx = octx->outer)
3233 {
3234 switch (gimple_code (octx->stmt))
3235 {
3236 case GIMPLE_OMP_TASKGROUP:
3237 break;
3238 case GIMPLE_OMP_TARGET:
3239 if (gimple_omp_target_kind (octx->stmt)
3240 != GF_OMP_TARGET_KIND_REGION)
3241 continue;
3242 /* FALLTHRU */
3243 case GIMPLE_OMP_PARALLEL:
3244 case GIMPLE_OMP_TEAMS:
3245 error_at (gimple_location (stmt),
3246 "%<%s taskgroup%> construct not closely "
3247 "nested inside of %<taskgroup%> region",
3248 construct);
3249 return false;
3250 case GIMPLE_OMP_TASK:
3251 if (gimple_omp_task_taskloop_p (octx->stmt)
3252 && octx->outer
3253 && is_taskloop_ctx (octx->outer))
3254 {
3255 tree clauses
3256 = gimple_omp_for_clauses (octx->outer->stmt);
3257 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3258 break;
3259 }
3260 continue;
3261 default:
3262 continue;
3263 }
3264 break;
3265 }
3266 ctx->cancellable = true;
3267 }
3268 kind = "taskgroup";
3269 break;
3270 default:
3271 error_at (gimple_location (stmt), "invalid arguments");
3272 return false;
3273 }
3274 if (bad)
3275 {
3276 error_at (gimple_location (stmt),
3277 "%<%s %s%> construct not closely nested inside of %qs",
3278 construct, kind, bad);
3279 return false;
3280 }
3281 }
3282 /* FALLTHRU */
3283 case GIMPLE_OMP_SECTIONS:
3284 case GIMPLE_OMP_SINGLE:
3285 for (; ctx != NULL; ctx = ctx->outer)
3286 switch (gimple_code (ctx->stmt))
3287 {
3288 case GIMPLE_OMP_FOR:
3289 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3290 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3291 break;
3292 /* FALLTHRU */
3293 case GIMPLE_OMP_SECTIONS:
3294 case GIMPLE_OMP_SINGLE:
3295 case GIMPLE_OMP_ORDERED:
3296 case GIMPLE_OMP_MASTER:
3297 case GIMPLE_OMP_TASK:
3298 case GIMPLE_OMP_CRITICAL:
3299 if (is_gimple_call (stmt))
3300 {
3301 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3302 != BUILT_IN_GOMP_BARRIER)
3303 return true;
3304 error_at (gimple_location (stmt),
3305 "barrier region may not be closely nested inside "
3306 "of work-sharing, %<loop%>, %<critical%>, "
3307 "%<ordered%>, %<master%>, explicit %<task%> or "
3308 "%<taskloop%> region");
3309 return false;
3310 }
3311 error_at (gimple_location (stmt),
3312 "work-sharing region may not be closely nested inside "
3313 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3314 "%<master%>, explicit %<task%> or %<taskloop%> region");
3315 return false;
3316 case GIMPLE_OMP_PARALLEL:
3317 case GIMPLE_OMP_TEAMS:
3318 return true;
3319 case GIMPLE_OMP_TARGET:
3320 if (gimple_omp_target_kind (ctx->stmt)
3321 == GF_OMP_TARGET_KIND_REGION)
3322 return true;
3323 break;
3324 default:
3325 break;
3326 }
3327 break;
3328 case GIMPLE_OMP_MASTER:
3329 for (; ctx != NULL; ctx = ctx->outer)
3330 switch (gimple_code (ctx->stmt))
3331 {
3332 case GIMPLE_OMP_FOR:
3333 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3334 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3335 break;
3336 /* FALLTHRU */
3337 case GIMPLE_OMP_SECTIONS:
3338 case GIMPLE_OMP_SINGLE:
3339 case GIMPLE_OMP_TASK:
3340 error_at (gimple_location (stmt),
3341 "%<master%> region may not be closely nested inside "
3342 "of work-sharing, %<loop%>, explicit %<task%> or "
3343 "%<taskloop%> region");
3344 return false;
3345 case GIMPLE_OMP_PARALLEL:
3346 case GIMPLE_OMP_TEAMS:
3347 return true;
3348 case GIMPLE_OMP_TARGET:
3349 if (gimple_omp_target_kind (ctx->stmt)
3350 == GF_OMP_TARGET_KIND_REGION)
3351 return true;
3352 break;
3353 default:
3354 break;
3355 }
3356 break;
3357 case GIMPLE_OMP_TASK:
3358 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3359 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3360 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3361 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3362 {
3363 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3364 error_at (OMP_CLAUSE_LOCATION (c),
3365 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3366 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3367 return false;
3368 }
3369 break;
3370 case GIMPLE_OMP_ORDERED:
3371 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3372 c; c = OMP_CLAUSE_CHAIN (c))
3373 {
3374 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3375 {
3376 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3377 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3378 continue;
3379 }
3380 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3381 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3382 || kind == OMP_CLAUSE_DEPEND_SINK)
3383 {
3384 tree oclause;
3385 /* Look for containing ordered(N) loop. */
3386 if (ctx == NULL
3387 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3388 || (oclause
3389 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3390 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3391 {
3392 error_at (OMP_CLAUSE_LOCATION (c),
3393 "%<ordered%> construct with %<depend%> clause "
3394 "must be closely nested inside an %<ordered%> "
3395 "loop");
3396 return false;
3397 }
3398 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3399 {
3400 error_at (OMP_CLAUSE_LOCATION (c),
3401 "%<ordered%> construct with %<depend%> clause "
3402 "must be closely nested inside a loop with "
3403 "%<ordered%> clause with a parameter");
3404 return false;
3405 }
3406 }
3407 else
3408 {
3409 error_at (OMP_CLAUSE_LOCATION (c),
3410 "invalid depend kind in omp %<ordered%> %<depend%>");
3411 return false;
3412 }
3413 }
3414 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3415 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3416 {
3417 /* ordered simd must be closely nested inside of simd region,
3418 and simd region must not encounter constructs other than
3419 ordered simd, therefore ordered simd may be either orphaned,
3420 or ctx->stmt must be simd. The latter case is handled already
3421 earlier. */
3422 if (ctx != NULL)
3423 {
3424 error_at (gimple_location (stmt),
3425 "%<ordered%> %<simd%> must be closely nested inside "
3426 "%<simd%> region");
3427 return false;
3428 }
3429 }
3430 for (; ctx != NULL; ctx = ctx->outer)
3431 switch (gimple_code (ctx->stmt))
3432 {
3433 case GIMPLE_OMP_CRITICAL:
3434 case GIMPLE_OMP_TASK:
3435 case GIMPLE_OMP_ORDERED:
3436 ordered_in_taskloop:
3437 error_at (gimple_location (stmt),
3438 "%<ordered%> region may not be closely nested inside "
3439 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3440 "%<taskloop%> region");
3441 return false;
3442 case GIMPLE_OMP_FOR:
3443 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3444 goto ordered_in_taskloop;
3445 tree o;
3446 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3447 OMP_CLAUSE_ORDERED);
3448 if (o == NULL)
3449 {
3450 error_at (gimple_location (stmt),
3451 "%<ordered%> region must be closely nested inside "
3452 "a loop region with an %<ordered%> clause");
3453 return false;
3454 }
3455 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3456 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3457 {
3458 error_at (gimple_location (stmt),
3459 "%<ordered%> region without %<depend%> clause may "
3460 "not be closely nested inside a loop region with "
3461 "an %<ordered%> clause with a parameter");
3462 return false;
3463 }
3464 return true;
3465 case GIMPLE_OMP_TARGET:
3466 if (gimple_omp_target_kind (ctx->stmt)
3467 != GF_OMP_TARGET_KIND_REGION)
3468 break;
3469 /* FALLTHRU */
3470 case GIMPLE_OMP_PARALLEL:
3471 case GIMPLE_OMP_TEAMS:
3472 error_at (gimple_location (stmt),
3473 "%<ordered%> region must be closely nested inside "
3474 "a loop region with an %<ordered%> clause");
3475 return false;
3476 default:
3477 break;
3478 }
3479 break;
3480 case GIMPLE_OMP_CRITICAL:
3481 {
3482 tree this_stmt_name
3483 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3484 for (; ctx != NULL; ctx = ctx->outer)
3485 if (gomp_critical *other_crit
3486 = dyn_cast <gomp_critical *> (ctx->stmt))
3487 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3488 {
3489 error_at (gimple_location (stmt),
3490 "%<critical%> region may not be nested inside "
3491 "a %<critical%> region with the same name");
3492 return false;
3493 }
3494 }
3495 break;
3496 case GIMPLE_OMP_TEAMS:
3497 if (ctx == NULL)
3498 break;
3499 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3500 || (gimple_omp_target_kind (ctx->stmt)
3501 != GF_OMP_TARGET_KIND_REGION))
3502 {
3503 /* Teams construct can appear either strictly nested inside of
3504 target construct with no intervening stmts, or can be encountered
3505 only by initial task (so must not appear inside any OpenMP
3506 construct. */
3507 error_at (gimple_location (stmt),
3508 "%<teams%> construct must be closely nested inside of "
3509 "%<target%> construct or not nested in any OpenMP "
3510 "construct");
3511 return false;
3512 }
3513 break;
3514 case GIMPLE_OMP_TARGET:
3515 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3516 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3517 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3518 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3519 {
3520 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3521 error_at (OMP_CLAUSE_LOCATION (c),
3522 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3523 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3524 return false;
3525 }
3526 if (is_gimple_omp_offloaded (stmt)
3527 && oacc_get_fn_attrib (cfun->decl) != NULL)
3528 {
3529 error_at (gimple_location (stmt),
3530 "OpenACC region inside of OpenACC routine, nested "
3531 "parallelism not supported yet");
3532 return false;
3533 }
3534 for (; ctx != NULL; ctx = ctx->outer)
3535 {
3536 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3537 {
3538 if (is_gimple_omp (stmt)
3539 && is_gimple_omp_oacc (stmt)
3540 && is_gimple_omp (ctx->stmt))
3541 {
3542 error_at (gimple_location (stmt),
3543 "OpenACC construct inside of non-OpenACC region");
3544 return false;
3545 }
3546 continue;
3547 }
3548
3549 const char *stmt_name, *ctx_stmt_name;
3550 switch (gimple_omp_target_kind (stmt))
3551 {
3552 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3553 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3554 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3555 case GF_OMP_TARGET_KIND_ENTER_DATA:
3556 stmt_name = "target enter data"; break;
3557 case GF_OMP_TARGET_KIND_EXIT_DATA:
3558 stmt_name = "target exit data"; break;
3559 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3560 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3561 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3562 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3563 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3564 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3565 stmt_name = "enter/exit data"; break;
3566 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3567 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3568 break;
3569 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3570 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3571 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3572 /* OpenACC 'kernels' decomposed parts. */
3573 stmt_name = "kernels"; break;
3574 default: gcc_unreachable ();
3575 }
3576 switch (gimple_omp_target_kind (ctx->stmt))
3577 {
3578 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3579 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3580 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3581 ctx_stmt_name = "parallel"; break;
3582 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3583 ctx_stmt_name = "kernels"; break;
3584 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3585 ctx_stmt_name = "serial"; break;
3586 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3587 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3588 ctx_stmt_name = "host_data"; break;
3589 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3590 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3591 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3592 /* OpenACC 'kernels' decomposed parts. */
3593 ctx_stmt_name = "kernels"; break;
3594 default: gcc_unreachable ();
3595 }
3596
3597 /* OpenACC/OpenMP mismatch? */
3598 if (is_gimple_omp_oacc (stmt)
3599 != is_gimple_omp_oacc (ctx->stmt))
3600 {
3601 error_at (gimple_location (stmt),
3602 "%s %qs construct inside of %s %qs region",
3603 (is_gimple_omp_oacc (stmt)
3604 ? "OpenACC" : "OpenMP"), stmt_name,
3605 (is_gimple_omp_oacc (ctx->stmt)
3606 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3607 return false;
3608 }
3609 if (is_gimple_omp_offloaded (ctx->stmt))
3610 {
3611 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3612 if (is_gimple_omp_oacc (ctx->stmt))
3613 {
3614 error_at (gimple_location (stmt),
3615 "%qs construct inside of %qs region",
3616 stmt_name, ctx_stmt_name);
3617 return false;
3618 }
3619 else
3620 {
3621 warning_at (gimple_location (stmt), 0,
3622 "%qs construct inside of %qs region",
3623 stmt_name, ctx_stmt_name);
3624 }
3625 }
3626 }
3627 break;
3628 default:
3629 break;
3630 }
3631 return true;
3632 }
3633
3634
3635 /* Helper function scan_omp.
3636
3637 Callback for walk_tree or operators in walk_gimple_stmt used to
3638 scan for OMP directives in TP. */
3639
3640 static tree
3641 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3642 {
3643 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3644 omp_context *ctx = (omp_context *) wi->info;
3645 tree t = *tp;
3646
3647 switch (TREE_CODE (t))
3648 {
3649 case VAR_DECL:
3650 case PARM_DECL:
3651 case LABEL_DECL:
3652 case RESULT_DECL:
3653 if (ctx)
3654 {
3655 tree repl = remap_decl (t, &ctx->cb);
3656 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3657 *tp = repl;
3658 }
3659 break;
3660
3661 default:
3662 if (ctx && TYPE_P (t))
3663 *tp = remap_type (t, &ctx->cb);
3664 else if (!DECL_P (t))
3665 {
3666 *walk_subtrees = 1;
3667 if (ctx)
3668 {
3669 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3670 if (tem != TREE_TYPE (t))
3671 {
3672 if (TREE_CODE (t) == INTEGER_CST)
3673 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3674 else
3675 TREE_TYPE (t) = tem;
3676 }
3677 }
3678 }
3679 break;
3680 }
3681
3682 return NULL_TREE;
3683 }
3684
3685 /* Return true if FNDECL is a setjmp or a longjmp. */
3686
3687 static bool
3688 setjmp_or_longjmp_p (const_tree fndecl)
3689 {
3690 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3691 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3692 return true;
3693
3694 tree declname = DECL_NAME (fndecl);
3695 if (!declname
3696 || (DECL_CONTEXT (fndecl) != NULL_TREE
3697 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3698 || !TREE_PUBLIC (fndecl))
3699 return false;
3700
3701 const char *name = IDENTIFIER_POINTER (declname);
3702 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3703 }
3704
3705 /* Return true if FNDECL is an omp_* runtime API call. */
3706
3707 static bool
3708 omp_runtime_api_call (const_tree fndecl)
3709 {
3710 tree declname = DECL_NAME (fndecl);
3711 if (!declname
3712 || (DECL_CONTEXT (fndecl) != NULL_TREE
3713 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3714 || !TREE_PUBLIC (fndecl))
3715 return false;
3716
3717 const char *name = IDENTIFIER_POINTER (declname);
3718 if (strncmp (name, "omp_", 4) != 0)
3719 return false;
3720
3721 static const char *omp_runtime_apis[] =
3722 {
3723 /* This array has 3 sections. First omp_* calls that don't
3724 have any suffixes. */
3725 "target_alloc",
3726 "target_associate_ptr",
3727 "target_disassociate_ptr",
3728 "target_free",
3729 "target_is_present",
3730 "target_memcpy",
3731 "target_memcpy_rect",
3732 NULL,
3733 /* Now omp_* calls that are available as omp_* and omp_*_. */
3734 "capture_affinity",
3735 "destroy_lock",
3736 "destroy_nest_lock",
3737 "display_affinity",
3738 "get_active_level",
3739 "get_affinity_format",
3740 "get_cancellation",
3741 "get_default_device",
3742 "get_dynamic",
3743 "get_initial_device",
3744 "get_level",
3745 "get_max_active_levels",
3746 "get_max_task_priority",
3747 "get_max_threads",
3748 "get_nested",
3749 "get_num_devices",
3750 "get_num_places",
3751 "get_num_procs",
3752 "get_num_teams",
3753 "get_num_threads",
3754 "get_partition_num_places",
3755 "get_place_num",
3756 "get_proc_bind",
3757 "get_team_num",
3758 "get_thread_limit",
3759 "get_thread_num",
3760 "get_wtick",
3761 "get_wtime",
3762 "in_final",
3763 "in_parallel",
3764 "init_lock",
3765 "init_nest_lock",
3766 "is_initial_device",
3767 "pause_resource",
3768 "pause_resource_all",
3769 "set_affinity_format",
3770 "set_lock",
3771 "set_nest_lock",
3772 "test_lock",
3773 "test_nest_lock",
3774 "unset_lock",
3775 "unset_nest_lock",
3776 NULL,
3777 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3778 "get_ancestor_thread_num",
3779 "get_partition_place_nums",
3780 "get_place_num_procs",
3781 "get_place_proc_ids",
3782 "get_schedule",
3783 "get_team_size",
3784 "set_default_device",
3785 "set_dynamic",
3786 "set_max_active_levels",
3787 "set_nested",
3788 "set_num_threads",
3789 "set_schedule"
3790 };
3791
3792 int mode = 0;
3793 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3794 {
3795 if (omp_runtime_apis[i] == NULL)
3796 {
3797 mode++;
3798 continue;
3799 }
3800 size_t len = strlen (omp_runtime_apis[i]);
3801 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3802 && (name[4 + len] == '\0'
3803 || (mode > 0
3804 && name[4 + len] == '_'
3805 && (name[4 + len + 1] == '\0'
3806 || (mode > 1
3807 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3808 return true;
3809 }
3810 return false;
3811 }
3812
3813 /* Helper function for scan_omp.
3814
3815 Callback for walk_gimple_stmt used to scan for OMP directives in
3816 the current statement in GSI. */
3817
3818 static tree
3819 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3820 struct walk_stmt_info *wi)
3821 {
3822 gimple *stmt = gsi_stmt (*gsi);
3823 omp_context *ctx = (omp_context *) wi->info;
3824
3825 if (gimple_has_location (stmt))
3826 input_location = gimple_location (stmt);
3827
3828 /* Check the nesting restrictions. */
3829 bool remove = false;
3830 if (is_gimple_omp (stmt))
3831 remove = !check_omp_nesting_restrictions (stmt, ctx);
3832 else if (is_gimple_call (stmt))
3833 {
3834 tree fndecl = gimple_call_fndecl (stmt);
3835 if (fndecl)
3836 {
3837 if (ctx
3838 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3839 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3840 && setjmp_or_longjmp_p (fndecl)
3841 && !ctx->loop_p)
3842 {
3843 remove = true;
3844 error_at (gimple_location (stmt),
3845 "setjmp/longjmp inside %<simd%> construct");
3846 }
3847 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3848 switch (DECL_FUNCTION_CODE (fndecl))
3849 {
3850 case BUILT_IN_GOMP_BARRIER:
3851 case BUILT_IN_GOMP_CANCEL:
3852 case BUILT_IN_GOMP_CANCELLATION_POINT:
3853 case BUILT_IN_GOMP_TASKYIELD:
3854 case BUILT_IN_GOMP_TASKWAIT:
3855 case BUILT_IN_GOMP_TASKGROUP_START:
3856 case BUILT_IN_GOMP_TASKGROUP_END:
3857 remove = !check_omp_nesting_restrictions (stmt, ctx);
3858 break;
3859 default:
3860 break;
3861 }
3862 else if (ctx)
3863 {
3864 omp_context *octx = ctx;
3865 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3866 octx = ctx->outer;
3867 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3868 {
3869 remove = true;
3870 error_at (gimple_location (stmt),
3871 "OpenMP runtime API call %qD in a region with "
3872 "%<order(concurrent)%> clause", fndecl);
3873 }
3874 }
3875 }
3876 }
3877 if (remove)
3878 {
3879 stmt = gimple_build_nop ();
3880 gsi_replace (gsi, stmt, false);
3881 }
3882
3883 *handled_ops_p = true;
3884
3885 switch (gimple_code (stmt))
3886 {
3887 case GIMPLE_OMP_PARALLEL:
3888 taskreg_nesting_level++;
3889 scan_omp_parallel (gsi, ctx);
3890 taskreg_nesting_level--;
3891 break;
3892
3893 case GIMPLE_OMP_TASK:
3894 taskreg_nesting_level++;
3895 scan_omp_task (gsi, ctx);
3896 taskreg_nesting_level--;
3897 break;
3898
3899 case GIMPLE_OMP_FOR:
3900 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3901 == GF_OMP_FOR_KIND_SIMD)
3902 && gimple_omp_for_combined_into_p (stmt)
3903 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3904 {
3905 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3906 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3907 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3908 {
3909 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3910 break;
3911 }
3912 }
3913 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3914 == GF_OMP_FOR_KIND_SIMD)
3915 && omp_maybe_offloaded_ctx (ctx)
3916 && omp_max_simt_vf ()
3917 && gimple_omp_for_collapse (stmt) == 1)
3918 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3919 else
3920 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3921 break;
3922
3923 case GIMPLE_OMP_SECTIONS:
3924 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3925 break;
3926
3927 case GIMPLE_OMP_SINGLE:
3928 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3929 break;
3930
3931 case GIMPLE_OMP_SCAN:
3932 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3933 {
3934 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3935 ctx->scan_inclusive = true;
3936 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3937 ctx->scan_exclusive = true;
3938 }
3939 /* FALLTHRU */
3940 case GIMPLE_OMP_SECTION:
3941 case GIMPLE_OMP_MASTER:
3942 case GIMPLE_OMP_ORDERED:
3943 case GIMPLE_OMP_CRITICAL:
3944 ctx = new_omp_context (stmt, ctx);
3945 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3946 break;
3947
3948 case GIMPLE_OMP_TASKGROUP:
3949 ctx = new_omp_context (stmt, ctx);
3950 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3951 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3952 break;
3953
3954 case GIMPLE_OMP_TARGET:
3955 if (is_gimple_omp_offloaded (stmt))
3956 {
3957 taskreg_nesting_level++;
3958 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3959 taskreg_nesting_level--;
3960 }
3961 else
3962 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3963 break;
3964
3965 case GIMPLE_OMP_TEAMS:
3966 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3967 {
3968 taskreg_nesting_level++;
3969 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3970 taskreg_nesting_level--;
3971 }
3972 else
3973 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3974 break;
3975
3976 case GIMPLE_BIND:
3977 {
3978 tree var;
3979
3980 *handled_ops_p = false;
3981 if (ctx)
3982 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3983 var ;
3984 var = DECL_CHAIN (var))
3985 insert_decl_map (&ctx->cb, var, var);
3986 }
3987 break;
3988 default:
3989 *handled_ops_p = false;
3990 break;
3991 }
3992
3993 return NULL_TREE;
3994 }
3995
3996
3997 /* Scan all the statements starting at the current statement. CTX
3998 contains context information about the OMP directives and
3999 clauses found during the scan. */
4000
4001 static void
4002 scan_omp (gimple_seq *body_p, omp_context *ctx)
4003 {
4004 location_t saved_location;
4005 struct walk_stmt_info wi;
4006
4007 memset (&wi, 0, sizeof (wi));
4008 wi.info = ctx;
4009 wi.want_locations = true;
4010
4011 saved_location = input_location;
4012 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4013 input_location = saved_location;
4014 }
4015 \f
4016 /* Re-gimplification and code generation routines. */
4017
4018 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4019 of BIND if in a method. */
4020
4021 static void
4022 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4023 {
4024 if (DECL_ARGUMENTS (current_function_decl)
4025 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4026 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4027 == POINTER_TYPE))
4028 {
4029 tree vars = gimple_bind_vars (bind);
4030 for (tree *pvar = &vars; *pvar; )
4031 if (omp_member_access_dummy_var (*pvar))
4032 *pvar = DECL_CHAIN (*pvar);
4033 else
4034 pvar = &DECL_CHAIN (*pvar);
4035 gimple_bind_set_vars (bind, vars);
4036 }
4037 }
4038
4039 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4040 block and its subblocks. */
4041
4042 static void
4043 remove_member_access_dummy_vars (tree block)
4044 {
4045 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4046 if (omp_member_access_dummy_var (*pvar))
4047 *pvar = DECL_CHAIN (*pvar);
4048 else
4049 pvar = &DECL_CHAIN (*pvar);
4050
4051 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4052 remove_member_access_dummy_vars (block);
4053 }
4054
4055 /* If a context was created for STMT when it was scanned, return it. */
4056
4057 static omp_context *
4058 maybe_lookup_ctx (gimple *stmt)
4059 {
4060 splay_tree_node n;
4061 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4062 return n ? (omp_context *) n->value : NULL;
4063 }
4064
4065
4066 /* Find the mapping for DECL in CTX or the immediately enclosing
4067 context that has a mapping for DECL.
4068
4069 If CTX is a nested parallel directive, we may have to use the decl
4070 mappings created in CTX's parent context. Suppose that we have the
4071 following parallel nesting (variable UIDs showed for clarity):
4072
4073 iD.1562 = 0;
4074 #omp parallel shared(iD.1562) -> outer parallel
4075 iD.1562 = iD.1562 + 1;
4076
4077 #omp parallel shared (iD.1562) -> inner parallel
4078 iD.1562 = iD.1562 - 1;
4079
4080 Each parallel structure will create a distinct .omp_data_s structure
4081 for copying iD.1562 in/out of the directive:
4082
4083 outer parallel .omp_data_s.1.i -> iD.1562
4084 inner parallel .omp_data_s.2.i -> iD.1562
4085
4086 A shared variable mapping will produce a copy-out operation before
4087 the parallel directive and a copy-in operation after it. So, in
4088 this case we would have:
4089
4090 iD.1562 = 0;
4091 .omp_data_o.1.i = iD.1562;
4092 #omp parallel shared(iD.1562) -> outer parallel
4093 .omp_data_i.1 = &.omp_data_o.1
4094 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4095
4096 .omp_data_o.2.i = iD.1562; -> **
4097 #omp parallel shared(iD.1562) -> inner parallel
4098 .omp_data_i.2 = &.omp_data_o.2
4099 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4100
4101
4102 ** This is a problem. The symbol iD.1562 cannot be referenced
4103 inside the body of the outer parallel region. But since we are
4104 emitting this copy operation while expanding the inner parallel
4105 directive, we need to access the CTX structure of the outer
4106 parallel directive to get the correct mapping:
4107
4108 .omp_data_o.2.i = .omp_data_i.1->i
4109
4110 Since there may be other workshare or parallel directives enclosing
4111 the parallel directive, it may be necessary to walk up the context
4112 parent chain. This is not a problem in general because nested
4113 parallelism happens only rarely. */
4114
4115 static tree
4116 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4117 {
4118 tree t;
4119 omp_context *up;
4120
4121 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4122 t = maybe_lookup_decl (decl, up);
4123
4124 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4125
4126 return t ? t : decl;
4127 }
4128
4129
4130 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4131 in outer contexts. */
4132
4133 static tree
4134 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4135 {
4136 tree t = NULL;
4137 omp_context *up;
4138
4139 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4140 t = maybe_lookup_decl (decl, up);
4141
4142 return t ? t : decl;
4143 }
4144
4145
4146 /* Construct the initialization value for reduction operation OP. */
4147
4148 tree
4149 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4150 {
4151 switch (op)
4152 {
4153 case PLUS_EXPR:
4154 case MINUS_EXPR:
4155 case BIT_IOR_EXPR:
4156 case BIT_XOR_EXPR:
4157 case TRUTH_OR_EXPR:
4158 case TRUTH_ORIF_EXPR:
4159 case TRUTH_XOR_EXPR:
4160 case NE_EXPR:
4161 return build_zero_cst (type);
4162
4163 case MULT_EXPR:
4164 case TRUTH_AND_EXPR:
4165 case TRUTH_ANDIF_EXPR:
4166 case EQ_EXPR:
4167 return fold_convert_loc (loc, type, integer_one_node);
4168
4169 case BIT_AND_EXPR:
4170 return fold_convert_loc (loc, type, integer_minus_one_node);
4171
4172 case MAX_EXPR:
4173 if (SCALAR_FLOAT_TYPE_P (type))
4174 {
4175 REAL_VALUE_TYPE max, min;
4176 if (HONOR_INFINITIES (type))
4177 {
4178 real_inf (&max);
4179 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4180 }
4181 else
4182 real_maxval (&min, 1, TYPE_MODE (type));
4183 return build_real (type, min);
4184 }
4185 else if (POINTER_TYPE_P (type))
4186 {
4187 wide_int min
4188 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4189 return wide_int_to_tree (type, min);
4190 }
4191 else
4192 {
4193 gcc_assert (INTEGRAL_TYPE_P (type));
4194 return TYPE_MIN_VALUE (type);
4195 }
4196
4197 case MIN_EXPR:
4198 if (SCALAR_FLOAT_TYPE_P (type))
4199 {
4200 REAL_VALUE_TYPE max;
4201 if (HONOR_INFINITIES (type))
4202 real_inf (&max);
4203 else
4204 real_maxval (&max, 0, TYPE_MODE (type));
4205 return build_real (type, max);
4206 }
4207 else if (POINTER_TYPE_P (type))
4208 {
4209 wide_int max
4210 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4211 return wide_int_to_tree (type, max);
4212 }
4213 else
4214 {
4215 gcc_assert (INTEGRAL_TYPE_P (type));
4216 return TYPE_MAX_VALUE (type);
4217 }
4218
4219 default:
4220 gcc_unreachable ();
4221 }
4222 }
4223
4224 /* Construct the initialization value for reduction CLAUSE. */
4225
4226 tree
4227 omp_reduction_init (tree clause, tree type)
4228 {
4229 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4230 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4231 }
4232
4233 /* Return alignment to be assumed for var in CLAUSE, which should be
4234 OMP_CLAUSE_ALIGNED. */
4235
4236 static tree
4237 omp_clause_aligned_alignment (tree clause)
4238 {
4239 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4240 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4241
4242 /* Otherwise return implementation defined alignment. */
4243 unsigned int al = 1;
4244 opt_scalar_mode mode_iter;
4245 auto_vector_modes modes;
4246 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4247 static enum mode_class classes[]
4248 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4249 for (int i = 0; i < 4; i += 2)
4250 /* The for loop above dictates that we only walk through scalar classes. */
4251 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4252 {
4253 scalar_mode mode = mode_iter.require ();
4254 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4255 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4256 continue;
4257 machine_mode alt_vmode;
4258 for (unsigned int j = 0; j < modes.length (); ++j)
4259 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4260 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4261 vmode = alt_vmode;
4262
4263 tree type = lang_hooks.types.type_for_mode (mode, 1);
4264 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4265 continue;
4266 type = build_vector_type_for_mode (type, vmode);
4267 if (TYPE_MODE (type) != vmode)
4268 continue;
4269 if (TYPE_ALIGN_UNIT (type) > al)
4270 al = TYPE_ALIGN_UNIT (type);
4271 }
4272 return build_int_cst (integer_type_node, al);
4273 }
4274
4275
4276 /* This structure is part of the interface between lower_rec_simd_input_clauses
4277 and lower_rec_input_clauses. */
4278
4279 class omplow_simd_context {
4280 public:
4281 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4282 tree idx;
4283 tree lane;
4284 tree lastlane;
4285 vec<tree, va_heap> simt_eargs;
4286 gimple_seq simt_dlist;
4287 poly_uint64_pod max_vf;
4288 bool is_simt;
4289 };
4290
4291 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4292 privatization. */
4293
4294 static bool
4295 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4296 omplow_simd_context *sctx, tree &ivar,
4297 tree &lvar, tree *rvar = NULL,
4298 tree *rvar2 = NULL)
4299 {
4300 if (known_eq (sctx->max_vf, 0U))
4301 {
4302 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4303 if (maybe_gt (sctx->max_vf, 1U))
4304 {
4305 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4306 OMP_CLAUSE_SAFELEN);
4307 if (c)
4308 {
4309 poly_uint64 safe_len;
4310 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4311 || maybe_lt (safe_len, 1U))
4312 sctx->max_vf = 1;
4313 else
4314 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4315 }
4316 }
4317 if (maybe_gt (sctx->max_vf, 1U))
4318 {
4319 sctx->idx = create_tmp_var (unsigned_type_node);
4320 sctx->lane = create_tmp_var (unsigned_type_node);
4321 }
4322 }
4323 if (known_eq (sctx->max_vf, 1U))
4324 return false;
4325
4326 if (sctx->is_simt)
4327 {
4328 if (is_gimple_reg (new_var))
4329 {
4330 ivar = lvar = new_var;
4331 return true;
4332 }
4333 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4334 ivar = lvar = create_tmp_var (type);
4335 TREE_ADDRESSABLE (ivar) = 1;
4336 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4337 NULL, DECL_ATTRIBUTES (ivar));
4338 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4339 tree clobber = build_clobber (type);
4340 gimple *g = gimple_build_assign (ivar, clobber);
4341 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4342 }
4343 else
4344 {
4345 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4346 tree avar = create_tmp_var_raw (atype);
4347 if (TREE_ADDRESSABLE (new_var))
4348 TREE_ADDRESSABLE (avar) = 1;
4349 DECL_ATTRIBUTES (avar)
4350 = tree_cons (get_identifier ("omp simd array"), NULL,
4351 DECL_ATTRIBUTES (avar));
4352 gimple_add_tmp_var (avar);
4353 tree iavar = avar;
4354 if (rvar && !ctx->for_simd_scan_phase)
4355 {
4356 /* For inscan reductions, create another array temporary,
4357 which will hold the reduced value. */
4358 iavar = create_tmp_var_raw (atype);
4359 if (TREE_ADDRESSABLE (new_var))
4360 TREE_ADDRESSABLE (iavar) = 1;
4361 DECL_ATTRIBUTES (iavar)
4362 = tree_cons (get_identifier ("omp simd array"), NULL,
4363 tree_cons (get_identifier ("omp simd inscan"), NULL,
4364 DECL_ATTRIBUTES (iavar)));
4365 gimple_add_tmp_var (iavar);
4366 ctx->cb.decl_map->put (avar, iavar);
4367 if (sctx->lastlane == NULL_TREE)
4368 sctx->lastlane = create_tmp_var (unsigned_type_node);
4369 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4370 sctx->lastlane, NULL_TREE, NULL_TREE);
4371 TREE_THIS_NOTRAP (*rvar) = 1;
4372
4373 if (ctx->scan_exclusive)
4374 {
4375 /* And for exclusive scan yet another one, which will
4376 hold the value during the scan phase. */
4377 tree savar = create_tmp_var_raw (atype);
4378 if (TREE_ADDRESSABLE (new_var))
4379 TREE_ADDRESSABLE (savar) = 1;
4380 DECL_ATTRIBUTES (savar)
4381 = tree_cons (get_identifier ("omp simd array"), NULL,
4382 tree_cons (get_identifier ("omp simd inscan "
4383 "exclusive"), NULL,
4384 DECL_ATTRIBUTES (savar)));
4385 gimple_add_tmp_var (savar);
4386 ctx->cb.decl_map->put (iavar, savar);
4387 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4388 sctx->idx, NULL_TREE, NULL_TREE);
4389 TREE_THIS_NOTRAP (*rvar2) = 1;
4390 }
4391 }
4392 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4393 NULL_TREE, NULL_TREE);
4394 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4395 NULL_TREE, NULL_TREE);
4396 TREE_THIS_NOTRAP (ivar) = 1;
4397 TREE_THIS_NOTRAP (lvar) = 1;
4398 }
4399 if (DECL_P (new_var))
4400 {
4401 SET_DECL_VALUE_EXPR (new_var, lvar);
4402 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4403 }
4404 return true;
4405 }
4406
4407 /* Helper function of lower_rec_input_clauses. For a reference
4408 in simd reduction, add an underlying variable it will reference. */
4409
4410 static void
4411 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4412 {
4413 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4414 if (TREE_CONSTANT (z))
4415 {
4416 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4417 get_name (new_vard));
4418 gimple_add_tmp_var (z);
4419 TREE_ADDRESSABLE (z) = 1;
4420 z = build_fold_addr_expr_loc (loc, z);
4421 gimplify_assign (new_vard, z, ilist);
4422 }
4423 }
4424
4425 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4426 code to emit (type) (tskred_temp[idx]). */
4427
4428 static tree
4429 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4430 unsigned idx)
4431 {
4432 unsigned HOST_WIDE_INT sz
4433 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4434 tree r = build2 (MEM_REF, pointer_sized_int_node,
4435 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4436 idx * sz));
4437 tree v = create_tmp_var (pointer_sized_int_node);
4438 gimple *g = gimple_build_assign (v, r);
4439 gimple_seq_add_stmt (ilist, g);
4440 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4441 {
4442 v = create_tmp_var (type);
4443 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4444 gimple_seq_add_stmt (ilist, g);
4445 }
4446 return v;
4447 }
4448
4449 /* Lower early initialization of privatized variable NEW_VAR
4450 if it needs an allocator (has allocate clause). */
4451
4452 static bool
4453 lower_private_allocate (tree var, tree new_var, tree &allocator,
4454 tree &allocate_ptr, gimple_seq *ilist,
4455 omp_context *ctx, bool is_ref, tree size)
4456 {
4457 if (allocator)
4458 return false;
4459 gcc_assert (allocate_ptr == NULL_TREE);
4460 if (ctx->allocate_map
4461 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4462 if (tree *allocatorp = ctx->allocate_map->get (var))
4463 allocator = *allocatorp;
4464 if (allocator == NULL_TREE)
4465 return false;
4466 if (!is_ref && omp_is_reference (var))
4467 {
4468 allocator = NULL_TREE;
4469 return false;
4470 }
4471
4472 if (TREE_CODE (allocator) != INTEGER_CST)
4473 allocator = build_outer_var_ref (allocator, ctx);
4474 allocator = fold_convert (pointer_sized_int_node, allocator);
4475 if (TREE_CODE (allocator) != INTEGER_CST)
4476 {
4477 tree var = create_tmp_var (TREE_TYPE (allocator));
4478 gimplify_assign (var, allocator, ilist);
4479 allocator = var;
4480 }
4481
4482 tree ptr_type, align, sz = size;
4483 if (TYPE_P (new_var))
4484 {
4485 ptr_type = build_pointer_type (new_var);
4486 align = build_int_cst (size_type_node, TYPE_ALIGN_UNIT (new_var));
4487 }
4488 else if (is_ref)
4489 {
4490 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4491 align = build_int_cst (size_type_node,
4492 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4493 }
4494 else
4495 {
4496 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4497 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (new_var));
4498 if (sz == NULL_TREE)
4499 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4500 }
4501 if (TREE_CODE (sz) != INTEGER_CST)
4502 {
4503 tree szvar = create_tmp_var (size_type_node);
4504 gimplify_assign (szvar, sz, ilist);
4505 sz = szvar;
4506 }
4507 allocate_ptr = create_tmp_var (ptr_type);
4508 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4509 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4510 gimple_call_set_lhs (g, allocate_ptr);
4511 gimple_seq_add_stmt (ilist, g);
4512 if (!is_ref)
4513 {
4514 tree x = build_simple_mem_ref (allocate_ptr);
4515 TREE_THIS_NOTRAP (x) = 1;
4516 SET_DECL_VALUE_EXPR (new_var, x);
4517 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4518 }
4519 return true;
4520 }
4521
4522 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4523 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4524 private variables. Initialization statements go in ILIST, while calls
4525 to destructors go in DLIST. */
4526
4527 static void
4528 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4529 omp_context *ctx, struct omp_for_data *fd)
4530 {
4531 tree c, copyin_seq, x, ptr;
4532 bool copyin_by_ref = false;
4533 bool lastprivate_firstprivate = false;
4534 bool reduction_omp_orig_ref = false;
4535 int pass;
4536 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4537 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4538 omplow_simd_context sctx = omplow_simd_context ();
4539 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4540 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4541 gimple_seq llist[4] = { };
4542 tree nonconst_simd_if = NULL_TREE;
4543
4544 copyin_seq = NULL;
4545 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4546
4547 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4548 with data sharing clauses referencing variable sized vars. That
4549 is unnecessarily hard to support and very unlikely to result in
4550 vectorized code anyway. */
4551 if (is_simd)
4552 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4553 switch (OMP_CLAUSE_CODE (c))
4554 {
4555 case OMP_CLAUSE_LINEAR:
4556 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4557 sctx.max_vf = 1;
4558 /* FALLTHRU */
4559 case OMP_CLAUSE_PRIVATE:
4560 case OMP_CLAUSE_FIRSTPRIVATE:
4561 case OMP_CLAUSE_LASTPRIVATE:
4562 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4563 sctx.max_vf = 1;
4564 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4565 {
4566 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4567 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4568 sctx.max_vf = 1;
4569 }
4570 break;
4571 case OMP_CLAUSE_REDUCTION:
4572 case OMP_CLAUSE_IN_REDUCTION:
4573 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4574 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4575 sctx.max_vf = 1;
4576 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4577 {
4578 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4579 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4580 sctx.max_vf = 1;
4581 }
4582 break;
4583 case OMP_CLAUSE_IF:
4584 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4585 sctx.max_vf = 1;
4586 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4587 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4588 break;
4589 case OMP_CLAUSE_SIMDLEN:
4590 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4591 sctx.max_vf = 1;
4592 break;
4593 case OMP_CLAUSE__CONDTEMP_:
4594 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4595 if (sctx.is_simt)
4596 sctx.max_vf = 1;
4597 break;
4598 default:
4599 continue;
4600 }
4601
4602 /* Add a placeholder for simduid. */
4603 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4604 sctx.simt_eargs.safe_push (NULL_TREE);
4605
4606 unsigned task_reduction_cnt = 0;
4607 unsigned task_reduction_cntorig = 0;
4608 unsigned task_reduction_cnt_full = 0;
4609 unsigned task_reduction_cntorig_full = 0;
4610 unsigned task_reduction_other_cnt = 0;
4611 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4612 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4613 /* Do all the fixed sized types in the first pass, and the variable sized
4614 types in the second pass. This makes sure that the scalar arguments to
4615 the variable sized types are processed before we use them in the
4616 variable sized operations. For task reductions we use 4 passes, in the
4617 first two we ignore them, in the third one gather arguments for
4618 GOMP_task_reduction_remap call and in the last pass actually handle
4619 the task reductions. */
4620 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4621 ? 4 : 2); ++pass)
4622 {
4623 if (pass == 2 && task_reduction_cnt)
4624 {
4625 tskred_atype
4626 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4627 + task_reduction_cntorig);
4628 tskred_avar = create_tmp_var_raw (tskred_atype);
4629 gimple_add_tmp_var (tskred_avar);
4630 TREE_ADDRESSABLE (tskred_avar) = 1;
4631 task_reduction_cnt_full = task_reduction_cnt;
4632 task_reduction_cntorig_full = task_reduction_cntorig;
4633 }
4634 else if (pass == 3 && task_reduction_cnt)
4635 {
4636 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4637 gimple *g
4638 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4639 size_int (task_reduction_cntorig),
4640 build_fold_addr_expr (tskred_avar));
4641 gimple_seq_add_stmt (ilist, g);
4642 }
4643 if (pass == 3 && task_reduction_other_cnt)
4644 {
4645 /* For reduction clauses, build
4646 tskred_base = (void *) tskred_temp[2]
4647 + omp_get_thread_num () * tskred_temp[1]
4648 or if tskred_temp[1] is known to be constant, that constant
4649 directly. This is the start of the private reduction copy block
4650 for the current thread. */
4651 tree v = create_tmp_var (integer_type_node);
4652 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4653 gimple *g = gimple_build_call (x, 0);
4654 gimple_call_set_lhs (g, v);
4655 gimple_seq_add_stmt (ilist, g);
4656 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4657 tskred_temp = OMP_CLAUSE_DECL (c);
4658 if (is_taskreg_ctx (ctx))
4659 tskred_temp = lookup_decl (tskred_temp, ctx);
4660 tree v2 = create_tmp_var (sizetype);
4661 g = gimple_build_assign (v2, NOP_EXPR, v);
4662 gimple_seq_add_stmt (ilist, g);
4663 if (ctx->task_reductions[0])
4664 v = fold_convert (sizetype, ctx->task_reductions[0]);
4665 else
4666 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4667 tree v3 = create_tmp_var (sizetype);
4668 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4669 gimple_seq_add_stmt (ilist, g);
4670 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4671 tskred_base = create_tmp_var (ptr_type_node);
4672 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4673 gimple_seq_add_stmt (ilist, g);
4674 }
4675 task_reduction_cnt = 0;
4676 task_reduction_cntorig = 0;
4677 task_reduction_other_cnt = 0;
4678 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4679 {
4680 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4681 tree var, new_var;
4682 bool by_ref;
4683 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4684 bool task_reduction_p = false;
4685 bool task_reduction_needs_orig_p = false;
4686 tree cond = NULL_TREE;
4687 tree allocator, allocate_ptr;
4688
4689 switch (c_kind)
4690 {
4691 case OMP_CLAUSE_PRIVATE:
4692 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4693 continue;
4694 break;
4695 case OMP_CLAUSE_SHARED:
4696 /* Ignore shared directives in teams construct inside
4697 of target construct. */
4698 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4699 && !is_host_teams_ctx (ctx))
4700 continue;
4701 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4702 {
4703 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4704 || is_global_var (OMP_CLAUSE_DECL (c)));
4705 continue;
4706 }
4707 case OMP_CLAUSE_FIRSTPRIVATE:
4708 case OMP_CLAUSE_COPYIN:
4709 break;
4710 case OMP_CLAUSE_LINEAR:
4711 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4712 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4713 lastprivate_firstprivate = true;
4714 break;
4715 case OMP_CLAUSE_REDUCTION:
4716 case OMP_CLAUSE_IN_REDUCTION:
4717 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4718 {
4719 task_reduction_p = true;
4720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4721 {
4722 task_reduction_other_cnt++;
4723 if (pass == 2)
4724 continue;
4725 }
4726 else
4727 task_reduction_cnt++;
4728 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4729 {
4730 var = OMP_CLAUSE_DECL (c);
4731 /* If var is a global variable that isn't privatized
4732 in outer contexts, we don't need to look up the
4733 original address, it is always the address of the
4734 global variable itself. */
4735 if (!DECL_P (var)
4736 || omp_is_reference (var)
4737 || !is_global_var
4738 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4739 {
4740 task_reduction_needs_orig_p = true;
4741 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4742 task_reduction_cntorig++;
4743 }
4744 }
4745 }
4746 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4747 reduction_omp_orig_ref = true;
4748 break;
4749 case OMP_CLAUSE__REDUCTEMP_:
4750 if (!is_taskreg_ctx (ctx))
4751 continue;
4752 /* FALLTHRU */
4753 case OMP_CLAUSE__LOOPTEMP_:
4754 /* Handle _looptemp_/_reductemp_ clauses only on
4755 parallel/task. */
4756 if (fd)
4757 continue;
4758 break;
4759 case OMP_CLAUSE_LASTPRIVATE:
4760 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4761 {
4762 lastprivate_firstprivate = true;
4763 if (pass != 0 || is_taskloop_ctx (ctx))
4764 continue;
4765 }
4766 /* Even without corresponding firstprivate, if
4767 decl is Fortran allocatable, it needs outer var
4768 reference. */
4769 else if (pass == 0
4770 && lang_hooks.decls.omp_private_outer_ref
4771 (OMP_CLAUSE_DECL (c)))
4772 lastprivate_firstprivate = true;
4773 break;
4774 case OMP_CLAUSE_ALIGNED:
4775 if (pass != 1)
4776 continue;
4777 var = OMP_CLAUSE_DECL (c);
4778 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4779 && !is_global_var (var))
4780 {
4781 new_var = maybe_lookup_decl (var, ctx);
4782 if (new_var == NULL_TREE)
4783 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4784 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4785 tree alarg = omp_clause_aligned_alignment (c);
4786 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4787 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4788 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4789 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4790 gimplify_and_add (x, ilist);
4791 }
4792 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4793 && is_global_var (var))
4794 {
4795 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4796 new_var = lookup_decl (var, ctx);
4797 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4798 t = build_fold_addr_expr_loc (clause_loc, t);
4799 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4800 tree alarg = omp_clause_aligned_alignment (c);
4801 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4802 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4803 t = fold_convert_loc (clause_loc, ptype, t);
4804 x = create_tmp_var (ptype);
4805 t = build2 (MODIFY_EXPR, ptype, x, t);
4806 gimplify_and_add (t, ilist);
4807 t = build_simple_mem_ref_loc (clause_loc, x);
4808 SET_DECL_VALUE_EXPR (new_var, t);
4809 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4810 }
4811 continue;
4812 case OMP_CLAUSE__CONDTEMP_:
4813 if (is_parallel_ctx (ctx)
4814 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4815 break;
4816 continue;
4817 default:
4818 continue;
4819 }
4820
4821 if (task_reduction_p != (pass >= 2))
4822 continue;
4823
4824 allocator = NULL_TREE;
4825 allocate_ptr = NULL_TREE;
4826 new_var = var = OMP_CLAUSE_DECL (c);
4827 if ((c_kind == OMP_CLAUSE_REDUCTION
4828 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4829 && TREE_CODE (var) == MEM_REF)
4830 {
4831 var = TREE_OPERAND (var, 0);
4832 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4833 var = TREE_OPERAND (var, 0);
4834 if (TREE_CODE (var) == INDIRECT_REF
4835 || TREE_CODE (var) == ADDR_EXPR)
4836 var = TREE_OPERAND (var, 0);
4837 if (is_variable_sized (var))
4838 {
4839 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4840 var = DECL_VALUE_EXPR (var);
4841 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4842 var = TREE_OPERAND (var, 0);
4843 gcc_assert (DECL_P (var));
4844 }
4845 new_var = var;
4846 }
4847 if (c_kind != OMP_CLAUSE_COPYIN)
4848 new_var = lookup_decl (var, ctx);
4849
4850 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4851 {
4852 if (pass != 0)
4853 continue;
4854 }
4855 /* C/C++ array section reductions. */
4856 else if ((c_kind == OMP_CLAUSE_REDUCTION
4857 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4858 && var != OMP_CLAUSE_DECL (c))
4859 {
4860 if (pass == 0)
4861 continue;
4862
4863 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4864 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4865
4866 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4867 {
4868 tree b = TREE_OPERAND (orig_var, 1);
4869 b = maybe_lookup_decl (b, ctx);
4870 if (b == NULL)
4871 {
4872 b = TREE_OPERAND (orig_var, 1);
4873 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4874 }
4875 if (integer_zerop (bias))
4876 bias = b;
4877 else
4878 {
4879 bias = fold_convert_loc (clause_loc,
4880 TREE_TYPE (b), bias);
4881 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4882 TREE_TYPE (b), b, bias);
4883 }
4884 orig_var = TREE_OPERAND (orig_var, 0);
4885 }
4886 if (pass == 2)
4887 {
4888 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4889 if (is_global_var (out)
4890 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4891 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4892 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4893 != POINTER_TYPE)))
4894 x = var;
4895 else
4896 {
4897 bool by_ref = use_pointer_for_field (var, NULL);
4898 x = build_receiver_ref (var, by_ref, ctx);
4899 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4900 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4901 == POINTER_TYPE))
4902 x = build_fold_addr_expr (x);
4903 }
4904 if (TREE_CODE (orig_var) == INDIRECT_REF)
4905 x = build_simple_mem_ref (x);
4906 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4907 {
4908 if (var == TREE_OPERAND (orig_var, 0))
4909 x = build_fold_addr_expr (x);
4910 }
4911 bias = fold_convert (sizetype, bias);
4912 x = fold_convert (ptr_type_node, x);
4913 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4914 TREE_TYPE (x), x, bias);
4915 unsigned cnt = task_reduction_cnt - 1;
4916 if (!task_reduction_needs_orig_p)
4917 cnt += (task_reduction_cntorig_full
4918 - task_reduction_cntorig);
4919 else
4920 cnt = task_reduction_cntorig - 1;
4921 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4922 size_int (cnt), NULL_TREE, NULL_TREE);
4923 gimplify_assign (r, x, ilist);
4924 continue;
4925 }
4926
4927 if (TREE_CODE (orig_var) == INDIRECT_REF
4928 || TREE_CODE (orig_var) == ADDR_EXPR)
4929 orig_var = TREE_OPERAND (orig_var, 0);
4930 tree d = OMP_CLAUSE_DECL (c);
4931 tree type = TREE_TYPE (d);
4932 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4933 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4934 tree sz = v;
4935 const char *name = get_name (orig_var);
4936 if (pass != 3 && !TREE_CONSTANT (v))
4937 {
4938 tree t = maybe_lookup_decl (v, ctx);
4939 if (t)
4940 v = t;
4941 else
4942 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4943 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4944 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4945 TREE_TYPE (v), v,
4946 build_int_cst (TREE_TYPE (v), 1));
4947 sz = fold_build2_loc (clause_loc, MULT_EXPR,
4948 TREE_TYPE (v), t,
4949 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4950 }
4951 if (pass == 3)
4952 {
4953 tree xv = create_tmp_var (ptr_type_node);
4954 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4955 {
4956 unsigned cnt = task_reduction_cnt - 1;
4957 if (!task_reduction_needs_orig_p)
4958 cnt += (task_reduction_cntorig_full
4959 - task_reduction_cntorig);
4960 else
4961 cnt = task_reduction_cntorig - 1;
4962 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4963 size_int (cnt), NULL_TREE, NULL_TREE);
4964
4965 gimple *g = gimple_build_assign (xv, x);
4966 gimple_seq_add_stmt (ilist, g);
4967 }
4968 else
4969 {
4970 unsigned int idx = *ctx->task_reduction_map->get (c);
4971 tree off;
4972 if (ctx->task_reductions[1 + idx])
4973 off = fold_convert (sizetype,
4974 ctx->task_reductions[1 + idx]);
4975 else
4976 off = task_reduction_read (ilist, tskred_temp, sizetype,
4977 7 + 3 * idx + 1);
4978 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4979 tskred_base, off);
4980 gimple_seq_add_stmt (ilist, g);
4981 }
4982 x = fold_convert (build_pointer_type (boolean_type_node),
4983 xv);
4984 if (TREE_CONSTANT (v))
4985 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4986 TYPE_SIZE_UNIT (type));
4987 else
4988 {
4989 tree t = maybe_lookup_decl (v, ctx);
4990 if (t)
4991 v = t;
4992 else
4993 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4994 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4995 fb_rvalue);
4996 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4997 TREE_TYPE (v), v,
4998 build_int_cst (TREE_TYPE (v), 1));
4999 t = fold_build2_loc (clause_loc, MULT_EXPR,
5000 TREE_TYPE (v), t,
5001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5002 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5003 }
5004 cond = create_tmp_var (TREE_TYPE (x));
5005 gimplify_assign (cond, x, ilist);
5006 x = xv;
5007 }
5008 else if (lower_private_allocate (var, type, allocator,
5009 allocate_ptr, ilist, ctx,
5010 true,
5011 TREE_CONSTANT (v)
5012 ? TYPE_SIZE_UNIT (type)
5013 : sz))
5014 x = allocate_ptr;
5015 else if (TREE_CONSTANT (v))
5016 {
5017 x = create_tmp_var_raw (type, name);
5018 gimple_add_tmp_var (x);
5019 TREE_ADDRESSABLE (x) = 1;
5020 x = build_fold_addr_expr_loc (clause_loc, x);
5021 }
5022 else
5023 {
5024 tree atmp
5025 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5026 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5027 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5028 }
5029
5030 tree ptype = build_pointer_type (TREE_TYPE (type));
5031 x = fold_convert_loc (clause_loc, ptype, x);
5032 tree y = create_tmp_var (ptype, name);
5033 gimplify_assign (y, x, ilist);
5034 x = y;
5035 tree yb = y;
5036
5037 if (!integer_zerop (bias))
5038 {
5039 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5040 bias);
5041 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5042 x);
5043 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5044 pointer_sized_int_node, yb, bias);
5045 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5046 yb = create_tmp_var (ptype, name);
5047 gimplify_assign (yb, x, ilist);
5048 x = yb;
5049 }
5050
5051 d = TREE_OPERAND (d, 0);
5052 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5053 d = TREE_OPERAND (d, 0);
5054 if (TREE_CODE (d) == ADDR_EXPR)
5055 {
5056 if (orig_var != var)
5057 {
5058 gcc_assert (is_variable_sized (orig_var));
5059 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5060 x);
5061 gimplify_assign (new_var, x, ilist);
5062 tree new_orig_var = lookup_decl (orig_var, ctx);
5063 tree t = build_fold_indirect_ref (new_var);
5064 DECL_IGNORED_P (new_var) = 0;
5065 TREE_THIS_NOTRAP (t) = 1;
5066 SET_DECL_VALUE_EXPR (new_orig_var, t);
5067 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5068 }
5069 else
5070 {
5071 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5072 build_int_cst (ptype, 0));
5073 SET_DECL_VALUE_EXPR (new_var, x);
5074 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5075 }
5076 }
5077 else
5078 {
5079 gcc_assert (orig_var == var);
5080 if (TREE_CODE (d) == INDIRECT_REF)
5081 {
5082 x = create_tmp_var (ptype, name);
5083 TREE_ADDRESSABLE (x) = 1;
5084 gimplify_assign (x, yb, ilist);
5085 x = build_fold_addr_expr_loc (clause_loc, x);
5086 }
5087 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5088 gimplify_assign (new_var, x, ilist);
5089 }
5090 /* GOMP_taskgroup_reduction_register memsets the whole
5091 array to zero. If the initializer is zero, we don't
5092 need to initialize it again, just mark it as ever
5093 used unconditionally, i.e. cond = true. */
5094 if (cond
5095 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5096 && initializer_zerop (omp_reduction_init (c,
5097 TREE_TYPE (type))))
5098 {
5099 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5100 boolean_true_node);
5101 gimple_seq_add_stmt (ilist, g);
5102 continue;
5103 }
5104 tree end = create_artificial_label (UNKNOWN_LOCATION);
5105 if (cond)
5106 {
5107 gimple *g;
5108 if (!is_parallel_ctx (ctx))
5109 {
5110 tree condv = create_tmp_var (boolean_type_node);
5111 g = gimple_build_assign (condv,
5112 build_simple_mem_ref (cond));
5113 gimple_seq_add_stmt (ilist, g);
5114 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5115 g = gimple_build_cond (NE_EXPR, condv,
5116 boolean_false_node, end, lab1);
5117 gimple_seq_add_stmt (ilist, g);
5118 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5119 }
5120 g = gimple_build_assign (build_simple_mem_ref (cond),
5121 boolean_true_node);
5122 gimple_seq_add_stmt (ilist, g);
5123 }
5124
5125 tree y1 = create_tmp_var (ptype);
5126 gimplify_assign (y1, y, ilist);
5127 tree i2 = NULL_TREE, y2 = NULL_TREE;
5128 tree body2 = NULL_TREE, end2 = NULL_TREE;
5129 tree y3 = NULL_TREE, y4 = NULL_TREE;
5130 if (task_reduction_needs_orig_p)
5131 {
5132 y3 = create_tmp_var (ptype);
5133 tree ref;
5134 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5135 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5136 size_int (task_reduction_cnt_full
5137 + task_reduction_cntorig - 1),
5138 NULL_TREE, NULL_TREE);
5139 else
5140 {
5141 unsigned int idx = *ctx->task_reduction_map->get (c);
5142 ref = task_reduction_read (ilist, tskred_temp, ptype,
5143 7 + 3 * idx);
5144 }
5145 gimplify_assign (y3, ref, ilist);
5146 }
5147 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5148 {
5149 if (pass != 3)
5150 {
5151 y2 = create_tmp_var (ptype);
5152 gimplify_assign (y2, y, ilist);
5153 }
5154 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5155 {
5156 tree ref = build_outer_var_ref (var, ctx);
5157 /* For ref build_outer_var_ref already performs this. */
5158 if (TREE_CODE (d) == INDIRECT_REF)
5159 gcc_assert (omp_is_reference (var));
5160 else if (TREE_CODE (d) == ADDR_EXPR)
5161 ref = build_fold_addr_expr (ref);
5162 else if (omp_is_reference (var))
5163 ref = build_fold_addr_expr (ref);
5164 ref = fold_convert_loc (clause_loc, ptype, ref);
5165 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5166 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5167 {
5168 y3 = create_tmp_var (ptype);
5169 gimplify_assign (y3, unshare_expr (ref), ilist);
5170 }
5171 if (is_simd)
5172 {
5173 y4 = create_tmp_var (ptype);
5174 gimplify_assign (y4, ref, dlist);
5175 }
5176 }
5177 }
5178 tree i = create_tmp_var (TREE_TYPE (v));
5179 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5180 tree body = create_artificial_label (UNKNOWN_LOCATION);
5181 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5182 if (y2)
5183 {
5184 i2 = create_tmp_var (TREE_TYPE (v));
5185 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5186 body2 = create_artificial_label (UNKNOWN_LOCATION);
5187 end2 = create_artificial_label (UNKNOWN_LOCATION);
5188 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5189 }
5190 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5191 {
5192 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5193 tree decl_placeholder
5194 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5195 SET_DECL_VALUE_EXPR (decl_placeholder,
5196 build_simple_mem_ref (y1));
5197 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5198 SET_DECL_VALUE_EXPR (placeholder,
5199 y3 ? build_simple_mem_ref (y3)
5200 : error_mark_node);
5201 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5202 x = lang_hooks.decls.omp_clause_default_ctor
5203 (c, build_simple_mem_ref (y1),
5204 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5205 if (x)
5206 gimplify_and_add (x, ilist);
5207 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5208 {
5209 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5210 lower_omp (&tseq, ctx);
5211 gimple_seq_add_seq (ilist, tseq);
5212 }
5213 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5214 if (is_simd)
5215 {
5216 SET_DECL_VALUE_EXPR (decl_placeholder,
5217 build_simple_mem_ref (y2));
5218 SET_DECL_VALUE_EXPR (placeholder,
5219 build_simple_mem_ref (y4));
5220 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5221 lower_omp (&tseq, ctx);
5222 gimple_seq_add_seq (dlist, tseq);
5223 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5224 }
5225 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5226 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5227 if (y2)
5228 {
5229 x = lang_hooks.decls.omp_clause_dtor
5230 (c, build_simple_mem_ref (y2));
5231 if (x)
5232 gimplify_and_add (x, dlist);
5233 }
5234 }
5235 else
5236 {
5237 x = omp_reduction_init (c, TREE_TYPE (type));
5238 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5239
5240 /* reduction(-:var) sums up the partial results, so it
5241 acts identically to reduction(+:var). */
5242 if (code == MINUS_EXPR)
5243 code = PLUS_EXPR;
5244
5245 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5246 if (is_simd)
5247 {
5248 x = build2 (code, TREE_TYPE (type),
5249 build_simple_mem_ref (y4),
5250 build_simple_mem_ref (y2));
5251 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5252 }
5253 }
5254 gimple *g
5255 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5256 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5257 gimple_seq_add_stmt (ilist, g);
5258 if (y3)
5259 {
5260 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5261 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5262 gimple_seq_add_stmt (ilist, g);
5263 }
5264 g = gimple_build_assign (i, PLUS_EXPR, i,
5265 build_int_cst (TREE_TYPE (i), 1));
5266 gimple_seq_add_stmt (ilist, g);
5267 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5268 gimple_seq_add_stmt (ilist, g);
5269 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5270 if (y2)
5271 {
5272 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5273 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5274 gimple_seq_add_stmt (dlist, g);
5275 if (y4)
5276 {
5277 g = gimple_build_assign
5278 (y4, POINTER_PLUS_EXPR, y4,
5279 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5280 gimple_seq_add_stmt (dlist, g);
5281 }
5282 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5283 build_int_cst (TREE_TYPE (i2), 1));
5284 gimple_seq_add_stmt (dlist, g);
5285 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5286 gimple_seq_add_stmt (dlist, g);
5287 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5288 }
5289 if (allocator)
5290 {
5291 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5292 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5293 gimple_seq_add_stmt (dlist, g);
5294 }
5295 continue;
5296 }
5297 else if (pass == 2)
5298 {
5299 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5300 x = var;
5301 else
5302 {
5303 bool by_ref = use_pointer_for_field (var, ctx);
5304 x = build_receiver_ref (var, by_ref, ctx);
5305 }
5306 if (!omp_is_reference (var))
5307 x = build_fold_addr_expr (x);
5308 x = fold_convert (ptr_type_node, x);
5309 unsigned cnt = task_reduction_cnt - 1;
5310 if (!task_reduction_needs_orig_p)
5311 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5312 else
5313 cnt = task_reduction_cntorig - 1;
5314 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5315 size_int (cnt), NULL_TREE, NULL_TREE);
5316 gimplify_assign (r, x, ilist);
5317 continue;
5318 }
5319 else if (pass == 3)
5320 {
5321 tree type = TREE_TYPE (new_var);
5322 if (!omp_is_reference (var))
5323 type = build_pointer_type (type);
5324 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5325 {
5326 unsigned cnt = task_reduction_cnt - 1;
5327 if (!task_reduction_needs_orig_p)
5328 cnt += (task_reduction_cntorig_full
5329 - task_reduction_cntorig);
5330 else
5331 cnt = task_reduction_cntorig - 1;
5332 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5333 size_int (cnt), NULL_TREE, NULL_TREE);
5334 }
5335 else
5336 {
5337 unsigned int idx = *ctx->task_reduction_map->get (c);
5338 tree off;
5339 if (ctx->task_reductions[1 + idx])
5340 off = fold_convert (sizetype,
5341 ctx->task_reductions[1 + idx]);
5342 else
5343 off = task_reduction_read (ilist, tskred_temp, sizetype,
5344 7 + 3 * idx + 1);
5345 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5346 tskred_base, off);
5347 }
5348 x = fold_convert (type, x);
5349 tree t;
5350 if (omp_is_reference (var))
5351 {
5352 gimplify_assign (new_var, x, ilist);
5353 t = new_var;
5354 new_var = build_simple_mem_ref (new_var);
5355 }
5356 else
5357 {
5358 t = create_tmp_var (type);
5359 gimplify_assign (t, x, ilist);
5360 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5361 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5362 }
5363 t = fold_convert (build_pointer_type (boolean_type_node), t);
5364 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5365 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5366 cond = create_tmp_var (TREE_TYPE (t));
5367 gimplify_assign (cond, t, ilist);
5368 }
5369 else if (is_variable_sized (var))
5370 {
5371 /* For variable sized types, we need to allocate the
5372 actual storage here. Call alloca and store the
5373 result in the pointer decl that we created elsewhere. */
5374 if (pass == 0)
5375 continue;
5376
5377 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5378 {
5379 tree tmp;
5380
5381 ptr = DECL_VALUE_EXPR (new_var);
5382 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5383 ptr = TREE_OPERAND (ptr, 0);
5384 gcc_assert (DECL_P (ptr));
5385 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5386
5387 if (lower_private_allocate (var, new_var, allocator,
5388 allocate_ptr, ilist, ctx,
5389 false, x))
5390 tmp = allocate_ptr;
5391 else
5392 {
5393 /* void *tmp = __builtin_alloca */
5394 tree atmp
5395 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5396 gcall *stmt
5397 = gimple_build_call (atmp, 2, x,
5398 size_int (DECL_ALIGN (var)));
5399 cfun->calls_alloca = 1;
5400 tmp = create_tmp_var_raw (ptr_type_node);
5401 gimple_add_tmp_var (tmp);
5402 gimple_call_set_lhs (stmt, tmp);
5403
5404 gimple_seq_add_stmt (ilist, stmt);
5405 }
5406
5407 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5408 gimplify_assign (ptr, x, ilist);
5409 }
5410 }
5411 else if (omp_is_reference (var)
5412 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5413 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5414 {
5415 /* For references that are being privatized for Fortran,
5416 allocate new backing storage for the new pointer
5417 variable. This allows us to avoid changing all the
5418 code that expects a pointer to something that expects
5419 a direct variable. */
5420 if (pass == 0)
5421 continue;
5422
5423 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5424 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5425 {
5426 x = build_receiver_ref (var, false, ctx);
5427 if (ctx->allocate_map)
5428 if (tree *allocatep = ctx->allocate_map->get (var))
5429 {
5430 allocator = *allocatep;
5431 if (TREE_CODE (allocator) != INTEGER_CST)
5432 allocator = build_outer_var_ref (allocator, ctx);
5433 allocator = fold_convert (pointer_sized_int_node,
5434 allocator);
5435 allocate_ptr = unshare_expr (x);
5436 }
5437 if (allocator == NULL_TREE)
5438 x = build_fold_addr_expr_loc (clause_loc, x);
5439 }
5440 else if (lower_private_allocate (var, new_var, allocator,
5441 allocate_ptr,
5442 ilist, ctx, true, x))
5443 x = allocate_ptr;
5444 else if (TREE_CONSTANT (x))
5445 {
5446 /* For reduction in SIMD loop, defer adding the
5447 initialization of the reference, because if we decide
5448 to use SIMD array for it, the initilization could cause
5449 expansion ICE. Ditto for other privatization clauses. */
5450 if (is_simd)
5451 x = NULL_TREE;
5452 else
5453 {
5454 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5455 get_name (var));
5456 gimple_add_tmp_var (x);
5457 TREE_ADDRESSABLE (x) = 1;
5458 x = build_fold_addr_expr_loc (clause_loc, x);
5459 }
5460 }
5461 else
5462 {
5463 tree atmp
5464 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5465 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5466 tree al = size_int (TYPE_ALIGN (rtype));
5467 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5468 }
5469
5470 if (x)
5471 {
5472 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5473 gimplify_assign (new_var, x, ilist);
5474 }
5475
5476 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5477 }
5478 else if ((c_kind == OMP_CLAUSE_REDUCTION
5479 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5480 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5481 {
5482 if (pass == 0)
5483 continue;
5484 }
5485 else if (pass != 0)
5486 continue;
5487
5488 switch (OMP_CLAUSE_CODE (c))
5489 {
5490 case OMP_CLAUSE_SHARED:
5491 /* Ignore shared directives in teams construct inside
5492 target construct. */
5493 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5494 && !is_host_teams_ctx (ctx))
5495 continue;
5496 /* Shared global vars are just accessed directly. */
5497 if (is_global_var (new_var))
5498 break;
5499 /* For taskloop firstprivate/lastprivate, represented
5500 as firstprivate and shared clause on the task, new_var
5501 is the firstprivate var. */
5502 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5503 break;
5504 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5505 needs to be delayed until after fixup_child_record_type so
5506 that we get the correct type during the dereference. */
5507 by_ref = use_pointer_for_field (var, ctx);
5508 x = build_receiver_ref (var, by_ref, ctx);
5509 SET_DECL_VALUE_EXPR (new_var, x);
5510 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5511
5512 /* ??? If VAR is not passed by reference, and the variable
5513 hasn't been initialized yet, then we'll get a warning for
5514 the store into the omp_data_s structure. Ideally, we'd be
5515 able to notice this and not store anything at all, but
5516 we're generating code too early. Suppress the warning. */
5517 if (!by_ref)
5518 TREE_NO_WARNING (var) = 1;
5519 break;
5520
5521 case OMP_CLAUSE__CONDTEMP_:
5522 if (is_parallel_ctx (ctx))
5523 {
5524 x = build_receiver_ref (var, false, ctx);
5525 SET_DECL_VALUE_EXPR (new_var, x);
5526 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5527 }
5528 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5529 {
5530 x = build_zero_cst (TREE_TYPE (var));
5531 goto do_private;
5532 }
5533 break;
5534
5535 case OMP_CLAUSE_LASTPRIVATE:
5536 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5537 break;
5538 /* FALLTHRU */
5539
5540 case OMP_CLAUSE_PRIVATE:
5541 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5542 x = build_outer_var_ref (var, ctx);
5543 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5544 {
5545 if (is_task_ctx (ctx))
5546 x = build_receiver_ref (var, false, ctx);
5547 else
5548 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5549 }
5550 else
5551 x = NULL;
5552 do_private:
5553 tree nx;
5554 bool copy_ctor;
5555 copy_ctor = false;
5556 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5557 ilist, ctx, false, NULL_TREE);
5558 nx = unshare_expr (new_var);
5559 if (is_simd
5560 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5561 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5562 copy_ctor = true;
5563 if (copy_ctor)
5564 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5565 else
5566 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5567 if (is_simd)
5568 {
5569 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5570 if ((TREE_ADDRESSABLE (new_var) || nx || y
5571 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5572 && (gimple_omp_for_collapse (ctx->stmt) != 1
5573 || (gimple_omp_for_index (ctx->stmt, 0)
5574 != new_var)))
5575 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5576 || omp_is_reference (var))
5577 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5578 ivar, lvar))
5579 {
5580 if (omp_is_reference (var))
5581 {
5582 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5583 tree new_vard = TREE_OPERAND (new_var, 0);
5584 gcc_assert (DECL_P (new_vard));
5585 SET_DECL_VALUE_EXPR (new_vard,
5586 build_fold_addr_expr (lvar));
5587 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5588 }
5589
5590 if (nx)
5591 {
5592 tree iv = unshare_expr (ivar);
5593 if (copy_ctor)
5594 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5595 x);
5596 else
5597 x = lang_hooks.decls.omp_clause_default_ctor (c,
5598 iv,
5599 x);
5600 }
5601 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5602 {
5603 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5604 unshare_expr (ivar), x);
5605 nx = x;
5606 }
5607 if (nx && x)
5608 gimplify_and_add (x, &llist[0]);
5609 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5610 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5611 {
5612 tree v = new_var;
5613 if (!DECL_P (v))
5614 {
5615 gcc_assert (TREE_CODE (v) == MEM_REF);
5616 v = TREE_OPERAND (v, 0);
5617 gcc_assert (DECL_P (v));
5618 }
5619 v = *ctx->lastprivate_conditional_map->get (v);
5620 tree t = create_tmp_var (TREE_TYPE (v));
5621 tree z = build_zero_cst (TREE_TYPE (v));
5622 tree orig_v
5623 = build_outer_var_ref (var, ctx,
5624 OMP_CLAUSE_LASTPRIVATE);
5625 gimple_seq_add_stmt (dlist,
5626 gimple_build_assign (t, z));
5627 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5628 tree civar = DECL_VALUE_EXPR (v);
5629 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5630 civar = unshare_expr (civar);
5631 TREE_OPERAND (civar, 1) = sctx.idx;
5632 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5633 unshare_expr (civar));
5634 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5635 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5636 orig_v, unshare_expr (ivar)));
5637 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5638 civar);
5639 x = build3 (COND_EXPR, void_type_node, cond, x,
5640 void_node);
5641 gimple_seq tseq = NULL;
5642 gimplify_and_add (x, &tseq);
5643 if (ctx->outer)
5644 lower_omp (&tseq, ctx->outer);
5645 gimple_seq_add_seq (&llist[1], tseq);
5646 }
5647 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5648 && ctx->for_simd_scan_phase)
5649 {
5650 x = unshare_expr (ivar);
5651 tree orig_v
5652 = build_outer_var_ref (var, ctx,
5653 OMP_CLAUSE_LASTPRIVATE);
5654 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5655 orig_v);
5656 gimplify_and_add (x, &llist[0]);
5657 }
5658 if (y)
5659 {
5660 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5661 if (y)
5662 gimplify_and_add (y, &llist[1]);
5663 }
5664 break;
5665 }
5666 if (omp_is_reference (var))
5667 {
5668 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5669 tree new_vard = TREE_OPERAND (new_var, 0);
5670 gcc_assert (DECL_P (new_vard));
5671 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5672 x = TYPE_SIZE_UNIT (type);
5673 if (TREE_CONSTANT (x))
5674 {
5675 x = create_tmp_var_raw (type, get_name (var));
5676 gimple_add_tmp_var (x);
5677 TREE_ADDRESSABLE (x) = 1;
5678 x = build_fold_addr_expr_loc (clause_loc, x);
5679 x = fold_convert_loc (clause_loc,
5680 TREE_TYPE (new_vard), x);
5681 gimplify_assign (new_vard, x, ilist);
5682 }
5683 }
5684 }
5685 if (nx)
5686 gimplify_and_add (nx, ilist);
5687 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5688 && is_simd
5689 && ctx->for_simd_scan_phase)
5690 {
5691 tree orig_v = build_outer_var_ref (var, ctx,
5692 OMP_CLAUSE_LASTPRIVATE);
5693 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5694 orig_v);
5695 gimplify_and_add (x, ilist);
5696 }
5697 /* FALLTHRU */
5698
5699 do_dtor:
5700 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5701 if (x)
5702 gimplify_and_add (x, dlist);
5703 if (allocator)
5704 {
5705 if (!is_gimple_val (allocator))
5706 {
5707 tree avar = create_tmp_var (TREE_TYPE (allocator));
5708 gimplify_assign (avar, allocator, dlist);
5709 allocator = avar;
5710 }
5711 if (!is_gimple_val (allocate_ptr))
5712 {
5713 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
5714 gimplify_assign (apvar, allocate_ptr, dlist);
5715 allocate_ptr = apvar;
5716 }
5717 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5718 gimple *g
5719 = gimple_build_call (f, 2, allocate_ptr, allocator);
5720 gimple_seq_add_stmt (dlist, g);
5721 }
5722 break;
5723
5724 case OMP_CLAUSE_LINEAR:
5725 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5726 goto do_firstprivate;
5727 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5728 x = NULL;
5729 else
5730 x = build_outer_var_ref (var, ctx);
5731 goto do_private;
5732
5733 case OMP_CLAUSE_FIRSTPRIVATE:
5734 if (is_task_ctx (ctx))
5735 {
5736 if ((omp_is_reference (var)
5737 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5738 || is_variable_sized (var))
5739 goto do_dtor;
5740 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5741 ctx))
5742 || use_pointer_for_field (var, NULL))
5743 {
5744 x = build_receiver_ref (var, false, ctx);
5745 if (ctx->allocate_map)
5746 if (tree *allocatep = ctx->allocate_map->get (var))
5747 {
5748 allocator = *allocatep;
5749 if (TREE_CODE (allocator) != INTEGER_CST)
5750 allocator = build_outer_var_ref (allocator, ctx);
5751 allocator = fold_convert (pointer_sized_int_node,
5752 allocator);
5753 allocate_ptr = unshare_expr (x);
5754 x = build_simple_mem_ref (x);
5755 TREE_THIS_NOTRAP (x) = 1;
5756 }
5757 SET_DECL_VALUE_EXPR (new_var, x);
5758 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5759 goto do_dtor;
5760 }
5761 }
5762 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5763 && omp_is_reference (var))
5764 {
5765 x = build_outer_var_ref (var, ctx);
5766 gcc_assert (TREE_CODE (x) == MEM_REF
5767 && integer_zerop (TREE_OPERAND (x, 1)));
5768 x = TREE_OPERAND (x, 0);
5769 x = lang_hooks.decls.omp_clause_copy_ctor
5770 (c, unshare_expr (new_var), x);
5771 gimplify_and_add (x, ilist);
5772 goto do_dtor;
5773 }
5774 do_firstprivate:
5775 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5776 ilist, ctx, false, NULL_TREE);
5777 x = build_outer_var_ref (var, ctx);
5778 if (is_simd)
5779 {
5780 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5781 && gimple_omp_for_combined_into_p (ctx->stmt))
5782 {
5783 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5784 tree stept = TREE_TYPE (t);
5785 tree ct = omp_find_clause (clauses,
5786 OMP_CLAUSE__LOOPTEMP_);
5787 gcc_assert (ct);
5788 tree l = OMP_CLAUSE_DECL (ct);
5789 tree n1 = fd->loop.n1;
5790 tree step = fd->loop.step;
5791 tree itype = TREE_TYPE (l);
5792 if (POINTER_TYPE_P (itype))
5793 itype = signed_type_for (itype);
5794 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5795 if (TYPE_UNSIGNED (itype)
5796 && fd->loop.cond_code == GT_EXPR)
5797 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5798 fold_build1 (NEGATE_EXPR, itype, l),
5799 fold_build1 (NEGATE_EXPR,
5800 itype, step));
5801 else
5802 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5803 t = fold_build2 (MULT_EXPR, stept,
5804 fold_convert (stept, l), t);
5805
5806 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5807 {
5808 if (omp_is_reference (var))
5809 {
5810 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5811 tree new_vard = TREE_OPERAND (new_var, 0);
5812 gcc_assert (DECL_P (new_vard));
5813 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5814 nx = TYPE_SIZE_UNIT (type);
5815 if (TREE_CONSTANT (nx))
5816 {
5817 nx = create_tmp_var_raw (type,
5818 get_name (var));
5819 gimple_add_tmp_var (nx);
5820 TREE_ADDRESSABLE (nx) = 1;
5821 nx = build_fold_addr_expr_loc (clause_loc,
5822 nx);
5823 nx = fold_convert_loc (clause_loc,
5824 TREE_TYPE (new_vard),
5825 nx);
5826 gimplify_assign (new_vard, nx, ilist);
5827 }
5828 }
5829
5830 x = lang_hooks.decls.omp_clause_linear_ctor
5831 (c, new_var, x, t);
5832 gimplify_and_add (x, ilist);
5833 goto do_dtor;
5834 }
5835
5836 if (POINTER_TYPE_P (TREE_TYPE (x)))
5837 x = fold_build2 (POINTER_PLUS_EXPR,
5838 TREE_TYPE (x), x, t);
5839 else
5840 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5841 }
5842
5843 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5844 || TREE_ADDRESSABLE (new_var)
5845 || omp_is_reference (var))
5846 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5847 ivar, lvar))
5848 {
5849 if (omp_is_reference (var))
5850 {
5851 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5852 tree new_vard = TREE_OPERAND (new_var, 0);
5853 gcc_assert (DECL_P (new_vard));
5854 SET_DECL_VALUE_EXPR (new_vard,
5855 build_fold_addr_expr (lvar));
5856 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5857 }
5858 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5859 {
5860 tree iv = create_tmp_var (TREE_TYPE (new_var));
5861 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5862 gimplify_and_add (x, ilist);
5863 gimple_stmt_iterator gsi
5864 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5865 gassign *g
5866 = gimple_build_assign (unshare_expr (lvar), iv);
5867 gsi_insert_before_without_update (&gsi, g,
5868 GSI_SAME_STMT);
5869 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5870 enum tree_code code = PLUS_EXPR;
5871 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5872 code = POINTER_PLUS_EXPR;
5873 g = gimple_build_assign (iv, code, iv, t);
5874 gsi_insert_before_without_update (&gsi, g,
5875 GSI_SAME_STMT);
5876 break;
5877 }
5878 x = lang_hooks.decls.omp_clause_copy_ctor
5879 (c, unshare_expr (ivar), x);
5880 gimplify_and_add (x, &llist[0]);
5881 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5882 if (x)
5883 gimplify_and_add (x, &llist[1]);
5884 break;
5885 }
5886 if (omp_is_reference (var))
5887 {
5888 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5889 tree new_vard = TREE_OPERAND (new_var, 0);
5890 gcc_assert (DECL_P (new_vard));
5891 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5892 nx = TYPE_SIZE_UNIT (type);
5893 if (TREE_CONSTANT (nx))
5894 {
5895 nx = create_tmp_var_raw (type, get_name (var));
5896 gimple_add_tmp_var (nx);
5897 TREE_ADDRESSABLE (nx) = 1;
5898 nx = build_fold_addr_expr_loc (clause_loc, nx);
5899 nx = fold_convert_loc (clause_loc,
5900 TREE_TYPE (new_vard), nx);
5901 gimplify_assign (new_vard, nx, ilist);
5902 }
5903 }
5904 }
5905 x = lang_hooks.decls.omp_clause_copy_ctor
5906 (c, unshare_expr (new_var), x);
5907 gimplify_and_add (x, ilist);
5908 goto do_dtor;
5909
5910 case OMP_CLAUSE__LOOPTEMP_:
5911 case OMP_CLAUSE__REDUCTEMP_:
5912 gcc_assert (is_taskreg_ctx (ctx));
5913 x = build_outer_var_ref (var, ctx);
5914 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5915 gimplify_and_add (x, ilist);
5916 break;
5917
5918 case OMP_CLAUSE_COPYIN:
5919 by_ref = use_pointer_for_field (var, NULL);
5920 x = build_receiver_ref (var, by_ref, ctx);
5921 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5922 append_to_statement_list (x, &copyin_seq);
5923 copyin_by_ref |= by_ref;
5924 break;
5925
5926 case OMP_CLAUSE_REDUCTION:
5927 case OMP_CLAUSE_IN_REDUCTION:
5928 /* OpenACC reductions are initialized using the
5929 GOACC_REDUCTION internal function. */
5930 if (is_gimple_omp_oacc (ctx->stmt))
5931 break;
5932 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5933 {
5934 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5935 gimple *tseq;
5936 tree ptype = TREE_TYPE (placeholder);
5937 if (cond)
5938 {
5939 x = error_mark_node;
5940 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5941 && !task_reduction_needs_orig_p)
5942 x = var;
5943 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5944 {
5945 tree pptype = build_pointer_type (ptype);
5946 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5947 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5948 size_int (task_reduction_cnt_full
5949 + task_reduction_cntorig - 1),
5950 NULL_TREE, NULL_TREE);
5951 else
5952 {
5953 unsigned int idx
5954 = *ctx->task_reduction_map->get (c);
5955 x = task_reduction_read (ilist, tskred_temp,
5956 pptype, 7 + 3 * idx);
5957 }
5958 x = fold_convert (pptype, x);
5959 x = build_simple_mem_ref (x);
5960 }
5961 }
5962 else
5963 {
5964 lower_private_allocate (var, new_var, allocator,
5965 allocate_ptr, ilist, ctx, false,
5966 NULL_TREE);
5967 x = build_outer_var_ref (var, ctx);
5968
5969 if (omp_is_reference (var)
5970 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5971 x = build_fold_addr_expr_loc (clause_loc, x);
5972 }
5973 SET_DECL_VALUE_EXPR (placeholder, x);
5974 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5975 tree new_vard = new_var;
5976 if (omp_is_reference (var))
5977 {
5978 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5979 new_vard = TREE_OPERAND (new_var, 0);
5980 gcc_assert (DECL_P (new_vard));
5981 }
5982 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5983 if (is_simd
5984 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5985 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5986 rvarp = &rvar;
5987 if (is_simd
5988 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5989 ivar, lvar, rvarp,
5990 &rvar2))
5991 {
5992 if (new_vard == new_var)
5993 {
5994 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5995 SET_DECL_VALUE_EXPR (new_var, ivar);
5996 }
5997 else
5998 {
5999 SET_DECL_VALUE_EXPR (new_vard,
6000 build_fold_addr_expr (ivar));
6001 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6002 }
6003 x = lang_hooks.decls.omp_clause_default_ctor
6004 (c, unshare_expr (ivar),
6005 build_outer_var_ref (var, ctx));
6006 if (rvarp && ctx->for_simd_scan_phase)
6007 {
6008 if (x)
6009 gimplify_and_add (x, &llist[0]);
6010 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6011 if (x)
6012 gimplify_and_add (x, &llist[1]);
6013 break;
6014 }
6015 else if (rvarp)
6016 {
6017 if (x)
6018 {
6019 gimplify_and_add (x, &llist[0]);
6020
6021 tree ivar2 = unshare_expr (lvar);
6022 TREE_OPERAND (ivar2, 1) = sctx.idx;
6023 x = lang_hooks.decls.omp_clause_default_ctor
6024 (c, ivar2, build_outer_var_ref (var, ctx));
6025 gimplify_and_add (x, &llist[0]);
6026
6027 if (rvar2)
6028 {
6029 x = lang_hooks.decls.omp_clause_default_ctor
6030 (c, unshare_expr (rvar2),
6031 build_outer_var_ref (var, ctx));
6032 gimplify_and_add (x, &llist[0]);
6033 }
6034
6035 /* For types that need construction, add another
6036 private var which will be default constructed
6037 and optionally initialized with
6038 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6039 loop we want to assign this value instead of
6040 constructing and destructing it in each
6041 iteration. */
6042 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6043 gimple_add_tmp_var (nv);
6044 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6045 ? rvar2
6046 : ivar, 0),
6047 nv);
6048 x = lang_hooks.decls.omp_clause_default_ctor
6049 (c, nv, build_outer_var_ref (var, ctx));
6050 gimplify_and_add (x, ilist);
6051
6052 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6053 {
6054 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6055 x = DECL_VALUE_EXPR (new_vard);
6056 tree vexpr = nv;
6057 if (new_vard != new_var)
6058 vexpr = build_fold_addr_expr (nv);
6059 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6060 lower_omp (&tseq, ctx);
6061 SET_DECL_VALUE_EXPR (new_vard, x);
6062 gimple_seq_add_seq (ilist, tseq);
6063 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6064 }
6065
6066 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6067 if (x)
6068 gimplify_and_add (x, dlist);
6069 }
6070
6071 tree ref = build_outer_var_ref (var, ctx);
6072 x = unshare_expr (ivar);
6073 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6074 ref);
6075 gimplify_and_add (x, &llist[0]);
6076
6077 ref = build_outer_var_ref (var, ctx);
6078 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6079 rvar);
6080 gimplify_and_add (x, &llist[3]);
6081
6082 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6083 if (new_vard == new_var)
6084 SET_DECL_VALUE_EXPR (new_var, lvar);
6085 else
6086 SET_DECL_VALUE_EXPR (new_vard,
6087 build_fold_addr_expr (lvar));
6088
6089 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6090 if (x)
6091 gimplify_and_add (x, &llist[1]);
6092
6093 tree ivar2 = unshare_expr (lvar);
6094 TREE_OPERAND (ivar2, 1) = sctx.idx;
6095 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6096 if (x)
6097 gimplify_and_add (x, &llist[1]);
6098
6099 if (rvar2)
6100 {
6101 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6102 if (x)
6103 gimplify_and_add (x, &llist[1]);
6104 }
6105 break;
6106 }
6107 if (x)
6108 gimplify_and_add (x, &llist[0]);
6109 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6110 {
6111 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6112 lower_omp (&tseq, ctx);
6113 gimple_seq_add_seq (&llist[0], tseq);
6114 }
6115 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6116 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6117 lower_omp (&tseq, ctx);
6118 gimple_seq_add_seq (&llist[1], tseq);
6119 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6120 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6121 if (new_vard == new_var)
6122 SET_DECL_VALUE_EXPR (new_var, lvar);
6123 else
6124 SET_DECL_VALUE_EXPR (new_vard,
6125 build_fold_addr_expr (lvar));
6126 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6127 if (x)
6128 gimplify_and_add (x, &llist[1]);
6129 break;
6130 }
6131 /* If this is a reference to constant size reduction var
6132 with placeholder, we haven't emitted the initializer
6133 for it because it is undesirable if SIMD arrays are used.
6134 But if they aren't used, we need to emit the deferred
6135 initialization now. */
6136 else if (omp_is_reference (var) && is_simd)
6137 handle_simd_reference (clause_loc, new_vard, ilist);
6138
6139 tree lab2 = NULL_TREE;
6140 if (cond)
6141 {
6142 gimple *g;
6143 if (!is_parallel_ctx (ctx))
6144 {
6145 tree condv = create_tmp_var (boolean_type_node);
6146 tree m = build_simple_mem_ref (cond);
6147 g = gimple_build_assign (condv, m);
6148 gimple_seq_add_stmt (ilist, g);
6149 tree lab1
6150 = create_artificial_label (UNKNOWN_LOCATION);
6151 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6152 g = gimple_build_cond (NE_EXPR, condv,
6153 boolean_false_node,
6154 lab2, lab1);
6155 gimple_seq_add_stmt (ilist, g);
6156 gimple_seq_add_stmt (ilist,
6157 gimple_build_label (lab1));
6158 }
6159 g = gimple_build_assign (build_simple_mem_ref (cond),
6160 boolean_true_node);
6161 gimple_seq_add_stmt (ilist, g);
6162 }
6163 x = lang_hooks.decls.omp_clause_default_ctor
6164 (c, unshare_expr (new_var),
6165 cond ? NULL_TREE
6166 : build_outer_var_ref (var, ctx));
6167 if (x)
6168 gimplify_and_add (x, ilist);
6169
6170 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6171 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6172 {
6173 if (ctx->for_simd_scan_phase)
6174 goto do_dtor;
6175 if (x || (!is_simd
6176 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6177 {
6178 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6179 gimple_add_tmp_var (nv);
6180 ctx->cb.decl_map->put (new_vard, nv);
6181 x = lang_hooks.decls.omp_clause_default_ctor
6182 (c, nv, build_outer_var_ref (var, ctx));
6183 if (x)
6184 gimplify_and_add (x, ilist);
6185 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6186 {
6187 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6188 tree vexpr = nv;
6189 if (new_vard != new_var)
6190 vexpr = build_fold_addr_expr (nv);
6191 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6192 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6193 lower_omp (&tseq, ctx);
6194 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6195 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6196 gimple_seq_add_seq (ilist, tseq);
6197 }
6198 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6199 if (is_simd && ctx->scan_exclusive)
6200 {
6201 tree nv2
6202 = create_tmp_var_raw (TREE_TYPE (new_var));
6203 gimple_add_tmp_var (nv2);
6204 ctx->cb.decl_map->put (nv, nv2);
6205 x = lang_hooks.decls.omp_clause_default_ctor
6206 (c, nv2, build_outer_var_ref (var, ctx));
6207 gimplify_and_add (x, ilist);
6208 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6209 if (x)
6210 gimplify_and_add (x, dlist);
6211 }
6212 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6213 if (x)
6214 gimplify_and_add (x, dlist);
6215 }
6216 else if (is_simd
6217 && ctx->scan_exclusive
6218 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6219 {
6220 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6221 gimple_add_tmp_var (nv2);
6222 ctx->cb.decl_map->put (new_vard, nv2);
6223 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6224 if (x)
6225 gimplify_and_add (x, dlist);
6226 }
6227 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6228 goto do_dtor;
6229 }
6230
6231 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6232 {
6233 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6234 lower_omp (&tseq, ctx);
6235 gimple_seq_add_seq (ilist, tseq);
6236 }
6237 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6238 if (is_simd)
6239 {
6240 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6241 lower_omp (&tseq, ctx);
6242 gimple_seq_add_seq (dlist, tseq);
6243 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6244 }
6245 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6246 if (cond)
6247 {
6248 if (lab2)
6249 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6250 break;
6251 }
6252 goto do_dtor;
6253 }
6254 else
6255 {
6256 x = omp_reduction_init (c, TREE_TYPE (new_var));
6257 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6258 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6259
6260 if (cond)
6261 {
6262 gimple *g;
6263 tree lab2 = NULL_TREE;
6264 /* GOMP_taskgroup_reduction_register memsets the whole
6265 array to zero. If the initializer is zero, we don't
6266 need to initialize it again, just mark it as ever
6267 used unconditionally, i.e. cond = true. */
6268 if (initializer_zerop (x))
6269 {
6270 g = gimple_build_assign (build_simple_mem_ref (cond),
6271 boolean_true_node);
6272 gimple_seq_add_stmt (ilist, g);
6273 break;
6274 }
6275
6276 /* Otherwise, emit
6277 if (!cond) { cond = true; new_var = x; } */
6278 if (!is_parallel_ctx (ctx))
6279 {
6280 tree condv = create_tmp_var (boolean_type_node);
6281 tree m = build_simple_mem_ref (cond);
6282 g = gimple_build_assign (condv, m);
6283 gimple_seq_add_stmt (ilist, g);
6284 tree lab1
6285 = create_artificial_label (UNKNOWN_LOCATION);
6286 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6287 g = gimple_build_cond (NE_EXPR, condv,
6288 boolean_false_node,
6289 lab2, lab1);
6290 gimple_seq_add_stmt (ilist, g);
6291 gimple_seq_add_stmt (ilist,
6292 gimple_build_label (lab1));
6293 }
6294 g = gimple_build_assign (build_simple_mem_ref (cond),
6295 boolean_true_node);
6296 gimple_seq_add_stmt (ilist, g);
6297 gimplify_assign (new_var, x, ilist);
6298 if (lab2)
6299 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6300 break;
6301 }
6302
6303 /* reduction(-:var) sums up the partial results, so it
6304 acts identically to reduction(+:var). */
6305 if (code == MINUS_EXPR)
6306 code = PLUS_EXPR;
6307
6308 tree new_vard = new_var;
6309 if (is_simd && omp_is_reference (var))
6310 {
6311 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6312 new_vard = TREE_OPERAND (new_var, 0);
6313 gcc_assert (DECL_P (new_vard));
6314 }
6315 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6316 if (is_simd
6317 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6318 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6319 rvarp = &rvar;
6320 if (is_simd
6321 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6322 ivar, lvar, rvarp,
6323 &rvar2))
6324 {
6325 if (new_vard != new_var)
6326 {
6327 SET_DECL_VALUE_EXPR (new_vard,
6328 build_fold_addr_expr (lvar));
6329 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6330 }
6331
6332 tree ref = build_outer_var_ref (var, ctx);
6333
6334 if (rvarp)
6335 {
6336 if (ctx->for_simd_scan_phase)
6337 break;
6338 gimplify_assign (ivar, ref, &llist[0]);
6339 ref = build_outer_var_ref (var, ctx);
6340 gimplify_assign (ref, rvar, &llist[3]);
6341 break;
6342 }
6343
6344 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6345
6346 if (sctx.is_simt)
6347 {
6348 if (!simt_lane)
6349 simt_lane = create_tmp_var (unsigned_type_node);
6350 x = build_call_expr_internal_loc
6351 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6352 TREE_TYPE (ivar), 2, ivar, simt_lane);
6353 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6354 gimplify_assign (ivar, x, &llist[2]);
6355 }
6356 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6357 ref = build_outer_var_ref (var, ctx);
6358 gimplify_assign (ref, x, &llist[1]);
6359
6360 }
6361 else
6362 {
6363 lower_private_allocate (var, new_var, allocator,
6364 allocate_ptr, ilist, ctx,
6365 false, NULL_TREE);
6366 if (omp_is_reference (var) && is_simd)
6367 handle_simd_reference (clause_loc, new_vard, ilist);
6368 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6369 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6370 break;
6371 gimplify_assign (new_var, x, ilist);
6372 if (is_simd)
6373 {
6374 tree ref = build_outer_var_ref (var, ctx);
6375
6376 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6377 ref = build_outer_var_ref (var, ctx);
6378 gimplify_assign (ref, x, dlist);
6379 }
6380 if (allocator)
6381 goto do_dtor;
6382 }
6383 }
6384 break;
6385
6386 default:
6387 gcc_unreachable ();
6388 }
6389 }
6390 }
6391 if (tskred_avar)
6392 {
6393 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6394 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6395 }
6396
6397 if (known_eq (sctx.max_vf, 1U))
6398 {
6399 sctx.is_simt = false;
6400 if (ctx->lastprivate_conditional_map)
6401 {
6402 if (gimple_omp_for_combined_into_p (ctx->stmt))
6403 {
6404 /* Signal to lower_omp_1 that it should use parent context. */
6405 ctx->combined_into_simd_safelen1 = true;
6406 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6407 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6408 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6409 {
6410 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6411 omp_context *outer = ctx->outer;
6412 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6413 outer = outer->outer;
6414 tree *v = ctx->lastprivate_conditional_map->get (o);
6415 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6416 tree *pv = outer->lastprivate_conditional_map->get (po);
6417 *v = *pv;
6418 }
6419 }
6420 else
6421 {
6422 /* When not vectorized, treat lastprivate(conditional:) like
6423 normal lastprivate, as there will be just one simd lane
6424 writing the privatized variable. */
6425 delete ctx->lastprivate_conditional_map;
6426 ctx->lastprivate_conditional_map = NULL;
6427 }
6428 }
6429 }
6430
6431 if (nonconst_simd_if)
6432 {
6433 if (sctx.lane == NULL_TREE)
6434 {
6435 sctx.idx = create_tmp_var (unsigned_type_node);
6436 sctx.lane = create_tmp_var (unsigned_type_node);
6437 }
6438 /* FIXME: For now. */
6439 sctx.is_simt = false;
6440 }
6441
6442 if (sctx.lane || sctx.is_simt)
6443 {
6444 uid = create_tmp_var (ptr_type_node, "simduid");
6445 /* Don't want uninit warnings on simduid, it is always uninitialized,
6446 but we use it not for the value, but for the DECL_UID only. */
6447 TREE_NO_WARNING (uid) = 1;
6448 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6449 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6450 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6451 gimple_omp_for_set_clauses (ctx->stmt, c);
6452 }
6453 /* Emit calls denoting privatized variables and initializing a pointer to
6454 structure that holds private variables as fields after ompdevlow pass. */
6455 if (sctx.is_simt)
6456 {
6457 sctx.simt_eargs[0] = uid;
6458 gimple *g
6459 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6460 gimple_call_set_lhs (g, uid);
6461 gimple_seq_add_stmt (ilist, g);
6462 sctx.simt_eargs.release ();
6463
6464 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6465 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6466 gimple_call_set_lhs (g, simtrec);
6467 gimple_seq_add_stmt (ilist, g);
6468 }
6469 if (sctx.lane)
6470 {
6471 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6472 2 + (nonconst_simd_if != NULL),
6473 uid, integer_zero_node,
6474 nonconst_simd_if);
6475 gimple_call_set_lhs (g, sctx.lane);
6476 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6477 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6478 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6479 build_int_cst (unsigned_type_node, 0));
6480 gimple_seq_add_stmt (ilist, g);
6481 if (sctx.lastlane)
6482 {
6483 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6484 2, uid, sctx.lane);
6485 gimple_call_set_lhs (g, sctx.lastlane);
6486 gimple_seq_add_stmt (dlist, g);
6487 gimple_seq_add_seq (dlist, llist[3]);
6488 }
6489 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6490 if (llist[2])
6491 {
6492 tree simt_vf = create_tmp_var (unsigned_type_node);
6493 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6494 gimple_call_set_lhs (g, simt_vf);
6495 gimple_seq_add_stmt (dlist, g);
6496
6497 tree t = build_int_cst (unsigned_type_node, 1);
6498 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6499 gimple_seq_add_stmt (dlist, g);
6500
6501 t = build_int_cst (unsigned_type_node, 0);
6502 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6503 gimple_seq_add_stmt (dlist, g);
6504
6505 tree body = create_artificial_label (UNKNOWN_LOCATION);
6506 tree header = create_artificial_label (UNKNOWN_LOCATION);
6507 tree end = create_artificial_label (UNKNOWN_LOCATION);
6508 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6509 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6510
6511 gimple_seq_add_seq (dlist, llist[2]);
6512
6513 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6514 gimple_seq_add_stmt (dlist, g);
6515
6516 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6517 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6518 gimple_seq_add_stmt (dlist, g);
6519
6520 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6521 }
6522 for (int i = 0; i < 2; i++)
6523 if (llist[i])
6524 {
6525 tree vf = create_tmp_var (unsigned_type_node);
6526 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6527 gimple_call_set_lhs (g, vf);
6528 gimple_seq *seq = i == 0 ? ilist : dlist;
6529 gimple_seq_add_stmt (seq, g);
6530 tree t = build_int_cst (unsigned_type_node, 0);
6531 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6532 gimple_seq_add_stmt (seq, g);
6533 tree body = create_artificial_label (UNKNOWN_LOCATION);
6534 tree header = create_artificial_label (UNKNOWN_LOCATION);
6535 tree end = create_artificial_label (UNKNOWN_LOCATION);
6536 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6537 gimple_seq_add_stmt (seq, gimple_build_label (body));
6538 gimple_seq_add_seq (seq, llist[i]);
6539 t = build_int_cst (unsigned_type_node, 1);
6540 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6541 gimple_seq_add_stmt (seq, g);
6542 gimple_seq_add_stmt (seq, gimple_build_label (header));
6543 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6544 gimple_seq_add_stmt (seq, g);
6545 gimple_seq_add_stmt (seq, gimple_build_label (end));
6546 }
6547 }
6548 if (sctx.is_simt)
6549 {
6550 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6551 gimple *g
6552 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6553 gimple_seq_add_stmt (dlist, g);
6554 }
6555
6556 /* The copyin sequence is not to be executed by the main thread, since
6557 that would result in self-copies. Perhaps not visible to scalars,
6558 but it certainly is to C++ operator=. */
6559 if (copyin_seq)
6560 {
6561 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6562 0);
6563 x = build2 (NE_EXPR, boolean_type_node, x,
6564 build_int_cst (TREE_TYPE (x), 0));
6565 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6566 gimplify_and_add (x, ilist);
6567 }
6568
6569 /* If any copyin variable is passed by reference, we must ensure the
6570 master thread doesn't modify it before it is copied over in all
6571 threads. Similarly for variables in both firstprivate and
6572 lastprivate clauses we need to ensure the lastprivate copying
6573 happens after firstprivate copying in all threads. And similarly
6574 for UDRs if initializer expression refers to omp_orig. */
6575 if (copyin_by_ref || lastprivate_firstprivate
6576 || (reduction_omp_orig_ref
6577 && !ctx->scan_inclusive
6578 && !ctx->scan_exclusive))
6579 {
6580 /* Don't add any barrier for #pragma omp simd or
6581 #pragma omp distribute. */
6582 if (!is_task_ctx (ctx)
6583 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6584 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6585 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6586 }
6587
6588 /* If max_vf is non-zero, then we can use only a vectorization factor
6589 up to the max_vf we chose. So stick it into the safelen clause. */
6590 if (maybe_ne (sctx.max_vf, 0U))
6591 {
6592 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6593 OMP_CLAUSE_SAFELEN);
6594 poly_uint64 safe_len;
6595 if (c == NULL_TREE
6596 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6597 && maybe_gt (safe_len, sctx.max_vf)))
6598 {
6599 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6600 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6601 sctx.max_vf);
6602 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6603 gimple_omp_for_set_clauses (ctx->stmt, c);
6604 }
6605 }
6606 }
6607
6608 /* Create temporary variables for lastprivate(conditional:) implementation
6609 in context CTX with CLAUSES. */
6610
6611 static void
6612 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6613 {
6614 tree iter_type = NULL_TREE;
6615 tree cond_ptr = NULL_TREE;
6616 tree iter_var = NULL_TREE;
6617 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6618 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6619 tree next = *clauses;
6620 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6621 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6622 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6623 {
6624 if (is_simd)
6625 {
6626 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6627 gcc_assert (cc);
6628 if (iter_type == NULL_TREE)
6629 {
6630 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6631 iter_var = create_tmp_var_raw (iter_type);
6632 DECL_CONTEXT (iter_var) = current_function_decl;
6633 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6634 DECL_CHAIN (iter_var) = ctx->block_vars;
6635 ctx->block_vars = iter_var;
6636 tree c3
6637 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6638 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6639 OMP_CLAUSE_DECL (c3) = iter_var;
6640 OMP_CLAUSE_CHAIN (c3) = *clauses;
6641 *clauses = c3;
6642 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6643 }
6644 next = OMP_CLAUSE_CHAIN (cc);
6645 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6646 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6647 ctx->lastprivate_conditional_map->put (o, v);
6648 continue;
6649 }
6650 if (iter_type == NULL)
6651 {
6652 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6653 {
6654 struct omp_for_data fd;
6655 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6656 NULL);
6657 iter_type = unsigned_type_for (fd.iter_type);
6658 }
6659 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6660 iter_type = unsigned_type_node;
6661 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6662 if (c2)
6663 {
6664 cond_ptr
6665 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6666 OMP_CLAUSE_DECL (c2) = cond_ptr;
6667 }
6668 else
6669 {
6670 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6671 DECL_CONTEXT (cond_ptr) = current_function_decl;
6672 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6673 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6674 ctx->block_vars = cond_ptr;
6675 c2 = build_omp_clause (UNKNOWN_LOCATION,
6676 OMP_CLAUSE__CONDTEMP_);
6677 OMP_CLAUSE_DECL (c2) = cond_ptr;
6678 OMP_CLAUSE_CHAIN (c2) = *clauses;
6679 *clauses = c2;
6680 }
6681 iter_var = create_tmp_var_raw (iter_type);
6682 DECL_CONTEXT (iter_var) = current_function_decl;
6683 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6684 DECL_CHAIN (iter_var) = ctx->block_vars;
6685 ctx->block_vars = iter_var;
6686 tree c3
6687 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6688 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6689 OMP_CLAUSE_DECL (c3) = iter_var;
6690 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6691 OMP_CLAUSE_CHAIN (c2) = c3;
6692 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6693 }
6694 tree v = create_tmp_var_raw (iter_type);
6695 DECL_CONTEXT (v) = current_function_decl;
6696 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6697 DECL_CHAIN (v) = ctx->block_vars;
6698 ctx->block_vars = v;
6699 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6700 ctx->lastprivate_conditional_map->put (o, v);
6701 }
6702 }
6703
6704
6705 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6706 both parallel and workshare constructs. PREDICATE may be NULL if it's
6707 always true. BODY_P is the sequence to insert early initialization
6708 if needed, STMT_LIST is where the non-conditional lastprivate handling
6709 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6710 section. */
6711
6712 static void
6713 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6714 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6715 omp_context *ctx)
6716 {
6717 tree x, c, label = NULL, orig_clauses = clauses;
6718 bool par_clauses = false;
6719 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6720 unsigned HOST_WIDE_INT conditional_off = 0;
6721 gimple_seq post_stmt_list = NULL;
6722
6723 /* Early exit if there are no lastprivate or linear clauses. */
6724 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6725 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6726 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6727 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6728 break;
6729 if (clauses == NULL)
6730 {
6731 /* If this was a workshare clause, see if it had been combined
6732 with its parallel. In that case, look for the clauses on the
6733 parallel statement itself. */
6734 if (is_parallel_ctx (ctx))
6735 return;
6736
6737 ctx = ctx->outer;
6738 if (ctx == NULL || !is_parallel_ctx (ctx))
6739 return;
6740
6741 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6742 OMP_CLAUSE_LASTPRIVATE);
6743 if (clauses == NULL)
6744 return;
6745 par_clauses = true;
6746 }
6747
6748 bool maybe_simt = false;
6749 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6750 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6751 {
6752 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6753 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6754 if (simduid)
6755 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6756 }
6757
6758 if (predicate)
6759 {
6760 gcond *stmt;
6761 tree label_true, arm1, arm2;
6762 enum tree_code pred_code = TREE_CODE (predicate);
6763
6764 label = create_artificial_label (UNKNOWN_LOCATION);
6765 label_true = create_artificial_label (UNKNOWN_LOCATION);
6766 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6767 {
6768 arm1 = TREE_OPERAND (predicate, 0);
6769 arm2 = TREE_OPERAND (predicate, 1);
6770 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6771 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6772 }
6773 else
6774 {
6775 arm1 = predicate;
6776 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6777 arm2 = boolean_false_node;
6778 pred_code = NE_EXPR;
6779 }
6780 if (maybe_simt)
6781 {
6782 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6783 c = fold_convert (integer_type_node, c);
6784 simtcond = create_tmp_var (integer_type_node);
6785 gimplify_assign (simtcond, c, stmt_list);
6786 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6787 1, simtcond);
6788 c = create_tmp_var (integer_type_node);
6789 gimple_call_set_lhs (g, c);
6790 gimple_seq_add_stmt (stmt_list, g);
6791 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6792 label_true, label);
6793 }
6794 else
6795 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6796 gimple_seq_add_stmt (stmt_list, stmt);
6797 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6798 }
6799
6800 tree cond_ptr = NULL_TREE;
6801 for (c = clauses; c ;)
6802 {
6803 tree var, new_var;
6804 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6805 gimple_seq *this_stmt_list = stmt_list;
6806 tree lab2 = NULL_TREE;
6807
6808 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6809 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6810 && ctx->lastprivate_conditional_map
6811 && !ctx->combined_into_simd_safelen1)
6812 {
6813 gcc_assert (body_p);
6814 if (simduid)
6815 goto next;
6816 if (cond_ptr == NULL_TREE)
6817 {
6818 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6819 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6820 }
6821 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6822 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6823 tree v = *ctx->lastprivate_conditional_map->get (o);
6824 gimplify_assign (v, build_zero_cst (type), body_p);
6825 this_stmt_list = cstmt_list;
6826 tree mem;
6827 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6828 {
6829 mem = build2 (MEM_REF, type, cond_ptr,
6830 build_int_cst (TREE_TYPE (cond_ptr),
6831 conditional_off));
6832 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6833 }
6834 else
6835 mem = build4 (ARRAY_REF, type, cond_ptr,
6836 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6837 tree mem2 = copy_node (mem);
6838 gimple_seq seq = NULL;
6839 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6840 gimple_seq_add_seq (this_stmt_list, seq);
6841 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6842 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6843 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6844 gimple_seq_add_stmt (this_stmt_list, g);
6845 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6846 gimplify_assign (mem2, v, this_stmt_list);
6847 }
6848 else if (predicate
6849 && ctx->combined_into_simd_safelen1
6850 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6851 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6852 && ctx->lastprivate_conditional_map)
6853 this_stmt_list = &post_stmt_list;
6854
6855 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6856 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6857 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6858 {
6859 var = OMP_CLAUSE_DECL (c);
6860 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6861 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6862 && is_taskloop_ctx (ctx))
6863 {
6864 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6865 new_var = lookup_decl (var, ctx->outer);
6866 }
6867 else
6868 {
6869 new_var = lookup_decl (var, ctx);
6870 /* Avoid uninitialized warnings for lastprivate and
6871 for linear iterators. */
6872 if (predicate
6873 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6874 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6875 TREE_NO_WARNING (new_var) = 1;
6876 }
6877
6878 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6879 {
6880 tree val = DECL_VALUE_EXPR (new_var);
6881 if (TREE_CODE (val) == ARRAY_REF
6882 && VAR_P (TREE_OPERAND (val, 0))
6883 && lookup_attribute ("omp simd array",
6884 DECL_ATTRIBUTES (TREE_OPERAND (val,
6885 0))))
6886 {
6887 if (lastlane == NULL)
6888 {
6889 lastlane = create_tmp_var (unsigned_type_node);
6890 gcall *g
6891 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6892 2, simduid,
6893 TREE_OPERAND (val, 1));
6894 gimple_call_set_lhs (g, lastlane);
6895 gimple_seq_add_stmt (this_stmt_list, g);
6896 }
6897 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6898 TREE_OPERAND (val, 0), lastlane,
6899 NULL_TREE, NULL_TREE);
6900 TREE_THIS_NOTRAP (new_var) = 1;
6901 }
6902 }
6903 else if (maybe_simt)
6904 {
6905 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6906 ? DECL_VALUE_EXPR (new_var)
6907 : new_var);
6908 if (simtlast == NULL)
6909 {
6910 simtlast = create_tmp_var (unsigned_type_node);
6911 gcall *g = gimple_build_call_internal
6912 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6913 gimple_call_set_lhs (g, simtlast);
6914 gimple_seq_add_stmt (this_stmt_list, g);
6915 }
6916 x = build_call_expr_internal_loc
6917 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6918 TREE_TYPE (val), 2, val, simtlast);
6919 new_var = unshare_expr (new_var);
6920 gimplify_assign (new_var, x, this_stmt_list);
6921 new_var = unshare_expr (new_var);
6922 }
6923
6924 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6925 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6926 {
6927 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6928 gimple_seq_add_seq (this_stmt_list,
6929 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6930 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6931 }
6932 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6933 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6934 {
6935 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6936 gimple_seq_add_seq (this_stmt_list,
6937 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6938 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6939 }
6940
6941 x = NULL_TREE;
6942 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6943 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6944 && is_taskloop_ctx (ctx))
6945 {
6946 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6947 ctx->outer->outer);
6948 if (is_global_var (ovar))
6949 x = ovar;
6950 }
6951 if (!x)
6952 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6953 if (omp_is_reference (var))
6954 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6955 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6956 gimplify_and_add (x, this_stmt_list);
6957
6958 if (lab2)
6959 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6960 }
6961
6962 next:
6963 c = OMP_CLAUSE_CHAIN (c);
6964 if (c == NULL && !par_clauses)
6965 {
6966 /* If this was a workshare clause, see if it had been combined
6967 with its parallel. In that case, continue looking for the
6968 clauses also on the parallel statement itself. */
6969 if (is_parallel_ctx (ctx))
6970 break;
6971
6972 ctx = ctx->outer;
6973 if (ctx == NULL || !is_parallel_ctx (ctx))
6974 break;
6975
6976 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6977 OMP_CLAUSE_LASTPRIVATE);
6978 par_clauses = true;
6979 }
6980 }
6981
6982 if (label)
6983 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6984 gimple_seq_add_seq (stmt_list, post_stmt_list);
6985 }
6986
6987 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6988 (which might be a placeholder). INNER is true if this is an inner
6989 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6990 join markers. Generate the before-loop forking sequence in
6991 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6992 general form of these sequences is
6993
6994 GOACC_REDUCTION_SETUP
6995 GOACC_FORK
6996 GOACC_REDUCTION_INIT
6997 ...
6998 GOACC_REDUCTION_FINI
6999 GOACC_JOIN
7000 GOACC_REDUCTION_TEARDOWN. */
7001
7002 static void
7003 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7004 gcall *fork, gcall *join, gimple_seq *fork_seq,
7005 gimple_seq *join_seq, omp_context *ctx)
7006 {
7007 gimple_seq before_fork = NULL;
7008 gimple_seq after_fork = NULL;
7009 gimple_seq before_join = NULL;
7010 gimple_seq after_join = NULL;
7011 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7012 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7013 unsigned offset = 0;
7014
7015 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7016 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7017 {
7018 /* No 'reduction' clauses on OpenACC 'kernels'. */
7019 gcc_checking_assert (!is_oacc_kernels (ctx));
7020 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7021 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7022
7023 tree orig = OMP_CLAUSE_DECL (c);
7024 tree var = maybe_lookup_decl (orig, ctx);
7025 tree ref_to_res = NULL_TREE;
7026 tree incoming, outgoing, v1, v2, v3;
7027 bool is_private = false;
7028
7029 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7030 if (rcode == MINUS_EXPR)
7031 rcode = PLUS_EXPR;
7032 else if (rcode == TRUTH_ANDIF_EXPR)
7033 rcode = BIT_AND_EXPR;
7034 else if (rcode == TRUTH_ORIF_EXPR)
7035 rcode = BIT_IOR_EXPR;
7036 tree op = build_int_cst (unsigned_type_node, rcode);
7037
7038 if (!var)
7039 var = orig;
7040
7041 incoming = outgoing = var;
7042
7043 if (!inner)
7044 {
7045 /* See if an outer construct also reduces this variable. */
7046 omp_context *outer = ctx;
7047
7048 while (omp_context *probe = outer->outer)
7049 {
7050 enum gimple_code type = gimple_code (probe->stmt);
7051 tree cls;
7052
7053 switch (type)
7054 {
7055 case GIMPLE_OMP_FOR:
7056 cls = gimple_omp_for_clauses (probe->stmt);
7057 break;
7058
7059 case GIMPLE_OMP_TARGET:
7060 /* No 'reduction' clauses inside OpenACC 'kernels'
7061 regions. */
7062 gcc_checking_assert (!is_oacc_kernels (probe));
7063
7064 if (!is_gimple_omp_offloaded (probe->stmt))
7065 goto do_lookup;
7066
7067 cls = gimple_omp_target_clauses (probe->stmt);
7068 break;
7069
7070 default:
7071 goto do_lookup;
7072 }
7073
7074 outer = probe;
7075 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7076 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7077 && orig == OMP_CLAUSE_DECL (cls))
7078 {
7079 incoming = outgoing = lookup_decl (orig, probe);
7080 goto has_outer_reduction;
7081 }
7082 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7083 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7084 && orig == OMP_CLAUSE_DECL (cls))
7085 {
7086 is_private = true;
7087 goto do_lookup;
7088 }
7089 }
7090
7091 do_lookup:
7092 /* This is the outermost construct with this reduction,
7093 see if there's a mapping for it. */
7094 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7095 && maybe_lookup_field (orig, outer) && !is_private)
7096 {
7097 ref_to_res = build_receiver_ref (orig, false, outer);
7098 if (omp_is_reference (orig))
7099 ref_to_res = build_simple_mem_ref (ref_to_res);
7100
7101 tree type = TREE_TYPE (var);
7102 if (POINTER_TYPE_P (type))
7103 type = TREE_TYPE (type);
7104
7105 outgoing = var;
7106 incoming = omp_reduction_init_op (loc, rcode, type);
7107 }
7108 else
7109 {
7110 /* Try to look at enclosing contexts for reduction var,
7111 use original if no mapping found. */
7112 tree t = NULL_TREE;
7113 omp_context *c = ctx->outer;
7114 while (c && !t)
7115 {
7116 t = maybe_lookup_decl (orig, c);
7117 c = c->outer;
7118 }
7119 incoming = outgoing = (t ? t : orig);
7120 }
7121
7122 has_outer_reduction:;
7123 }
7124
7125 if (!ref_to_res)
7126 ref_to_res = integer_zero_node;
7127
7128 if (omp_is_reference (orig))
7129 {
7130 tree type = TREE_TYPE (var);
7131 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7132
7133 if (!inner)
7134 {
7135 tree x = create_tmp_var (TREE_TYPE (type), id);
7136 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7137 }
7138
7139 v1 = create_tmp_var (type, id);
7140 v2 = create_tmp_var (type, id);
7141 v3 = create_tmp_var (type, id);
7142
7143 gimplify_assign (v1, var, fork_seq);
7144 gimplify_assign (v2, var, fork_seq);
7145 gimplify_assign (v3, var, fork_seq);
7146
7147 var = build_simple_mem_ref (var);
7148 v1 = build_simple_mem_ref (v1);
7149 v2 = build_simple_mem_ref (v2);
7150 v3 = build_simple_mem_ref (v3);
7151 outgoing = build_simple_mem_ref (outgoing);
7152
7153 if (!TREE_CONSTANT (incoming))
7154 incoming = build_simple_mem_ref (incoming);
7155 }
7156 else
7157 v1 = v2 = v3 = var;
7158
7159 /* Determine position in reduction buffer, which may be used
7160 by target. The parser has ensured that this is not a
7161 variable-sized type. */
7162 fixed_size_mode mode
7163 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7164 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7165 offset = (offset + align - 1) & ~(align - 1);
7166 tree off = build_int_cst (sizetype, offset);
7167 offset += GET_MODE_SIZE (mode);
7168
7169 if (!init_code)
7170 {
7171 init_code = build_int_cst (integer_type_node,
7172 IFN_GOACC_REDUCTION_INIT);
7173 fini_code = build_int_cst (integer_type_node,
7174 IFN_GOACC_REDUCTION_FINI);
7175 setup_code = build_int_cst (integer_type_node,
7176 IFN_GOACC_REDUCTION_SETUP);
7177 teardown_code = build_int_cst (integer_type_node,
7178 IFN_GOACC_REDUCTION_TEARDOWN);
7179 }
7180
7181 tree setup_call
7182 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7183 TREE_TYPE (var), 6, setup_code,
7184 unshare_expr (ref_to_res),
7185 incoming, level, op, off);
7186 tree init_call
7187 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7188 TREE_TYPE (var), 6, init_code,
7189 unshare_expr (ref_to_res),
7190 v1, level, op, off);
7191 tree fini_call
7192 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7193 TREE_TYPE (var), 6, fini_code,
7194 unshare_expr (ref_to_res),
7195 v2, level, op, off);
7196 tree teardown_call
7197 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7198 TREE_TYPE (var), 6, teardown_code,
7199 ref_to_res, v3, level, op, off);
7200
7201 gimplify_assign (v1, setup_call, &before_fork);
7202 gimplify_assign (v2, init_call, &after_fork);
7203 gimplify_assign (v3, fini_call, &before_join);
7204 gimplify_assign (outgoing, teardown_call, &after_join);
7205 }
7206
7207 /* Now stitch things together. */
7208 gimple_seq_add_seq (fork_seq, before_fork);
7209 if (fork)
7210 gimple_seq_add_stmt (fork_seq, fork);
7211 gimple_seq_add_seq (fork_seq, after_fork);
7212
7213 gimple_seq_add_seq (join_seq, before_join);
7214 if (join)
7215 gimple_seq_add_stmt (join_seq, join);
7216 gimple_seq_add_seq (join_seq, after_join);
7217 }
7218
7219 /* Generate code to implement the REDUCTION clauses, append it
7220 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7221 that should be emitted also inside of the critical section,
7222 in that case clear *CLIST afterwards, otherwise leave it as is
7223 and let the caller emit it itself. */
7224
7225 static void
7226 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7227 gimple_seq *clist, omp_context *ctx)
7228 {
7229 gimple_seq sub_seq = NULL;
7230 gimple *stmt;
7231 tree x, c;
7232 int count = 0;
7233
7234 /* OpenACC loop reductions are handled elsewhere. */
7235 if (is_gimple_omp_oacc (ctx->stmt))
7236 return;
7237
7238 /* SIMD reductions are handled in lower_rec_input_clauses. */
7239 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7240 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7241 return;
7242
7243 /* inscan reductions are handled elsewhere. */
7244 if (ctx->scan_inclusive || ctx->scan_exclusive)
7245 return;
7246
7247 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7248 update in that case, otherwise use a lock. */
7249 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7250 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7251 && !OMP_CLAUSE_REDUCTION_TASK (c))
7252 {
7253 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7254 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7255 {
7256 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7257 count = -1;
7258 break;
7259 }
7260 count++;
7261 }
7262
7263 if (count == 0)
7264 return;
7265
7266 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7267 {
7268 tree var, ref, new_var, orig_var;
7269 enum tree_code code;
7270 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7271
7272 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7273 || OMP_CLAUSE_REDUCTION_TASK (c))
7274 continue;
7275
7276 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7277 orig_var = var = OMP_CLAUSE_DECL (c);
7278 if (TREE_CODE (var) == MEM_REF)
7279 {
7280 var = TREE_OPERAND (var, 0);
7281 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7282 var = TREE_OPERAND (var, 0);
7283 if (TREE_CODE (var) == ADDR_EXPR)
7284 var = TREE_OPERAND (var, 0);
7285 else
7286 {
7287 /* If this is a pointer or referenced based array
7288 section, the var could be private in the outer
7289 context e.g. on orphaned loop construct. Pretend this
7290 is private variable's outer reference. */
7291 ccode = OMP_CLAUSE_PRIVATE;
7292 if (TREE_CODE (var) == INDIRECT_REF)
7293 var = TREE_OPERAND (var, 0);
7294 }
7295 orig_var = var;
7296 if (is_variable_sized (var))
7297 {
7298 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7299 var = DECL_VALUE_EXPR (var);
7300 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7301 var = TREE_OPERAND (var, 0);
7302 gcc_assert (DECL_P (var));
7303 }
7304 }
7305 new_var = lookup_decl (var, ctx);
7306 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7307 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7308 ref = build_outer_var_ref (var, ctx, ccode);
7309 code = OMP_CLAUSE_REDUCTION_CODE (c);
7310
7311 /* reduction(-:var) sums up the partial results, so it acts
7312 identically to reduction(+:var). */
7313 if (code == MINUS_EXPR)
7314 code = PLUS_EXPR;
7315
7316 if (count == 1)
7317 {
7318 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7319
7320 addr = save_expr (addr);
7321 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7322 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
7323 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7324 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7325 gimplify_and_add (x, stmt_seqp);
7326 return;
7327 }
7328 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7329 {
7330 tree d = OMP_CLAUSE_DECL (c);
7331 tree type = TREE_TYPE (d);
7332 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7333 tree i = create_tmp_var (TREE_TYPE (v));
7334 tree ptype = build_pointer_type (TREE_TYPE (type));
7335 tree bias = TREE_OPERAND (d, 1);
7336 d = TREE_OPERAND (d, 0);
7337 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7338 {
7339 tree b = TREE_OPERAND (d, 1);
7340 b = maybe_lookup_decl (b, ctx);
7341 if (b == NULL)
7342 {
7343 b = TREE_OPERAND (d, 1);
7344 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7345 }
7346 if (integer_zerop (bias))
7347 bias = b;
7348 else
7349 {
7350 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7351 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7352 TREE_TYPE (b), b, bias);
7353 }
7354 d = TREE_OPERAND (d, 0);
7355 }
7356 /* For ref build_outer_var_ref already performs this, so
7357 only new_var needs a dereference. */
7358 if (TREE_CODE (d) == INDIRECT_REF)
7359 {
7360 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7361 gcc_assert (omp_is_reference (var) && var == orig_var);
7362 }
7363 else if (TREE_CODE (d) == ADDR_EXPR)
7364 {
7365 if (orig_var == var)
7366 {
7367 new_var = build_fold_addr_expr (new_var);
7368 ref = build_fold_addr_expr (ref);
7369 }
7370 }
7371 else
7372 {
7373 gcc_assert (orig_var == var);
7374 if (omp_is_reference (var))
7375 ref = build_fold_addr_expr (ref);
7376 }
7377 if (DECL_P (v))
7378 {
7379 tree t = maybe_lookup_decl (v, ctx);
7380 if (t)
7381 v = t;
7382 else
7383 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7384 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7385 }
7386 if (!integer_zerop (bias))
7387 {
7388 bias = fold_convert_loc (clause_loc, sizetype, bias);
7389 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7390 TREE_TYPE (new_var), new_var,
7391 unshare_expr (bias));
7392 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7393 TREE_TYPE (ref), ref, bias);
7394 }
7395 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7396 ref = fold_convert_loc (clause_loc, ptype, ref);
7397 tree m = create_tmp_var (ptype);
7398 gimplify_assign (m, new_var, stmt_seqp);
7399 new_var = m;
7400 m = create_tmp_var (ptype);
7401 gimplify_assign (m, ref, stmt_seqp);
7402 ref = m;
7403 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7404 tree body = create_artificial_label (UNKNOWN_LOCATION);
7405 tree end = create_artificial_label (UNKNOWN_LOCATION);
7406 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7407 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7408 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7409 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7410 {
7411 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7412 tree decl_placeholder
7413 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7414 SET_DECL_VALUE_EXPR (placeholder, out);
7415 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7416 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7417 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7418 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7419 gimple_seq_add_seq (&sub_seq,
7420 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7421 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7422 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7423 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7424 }
7425 else
7426 {
7427 x = build2 (code, TREE_TYPE (out), out, priv);
7428 out = unshare_expr (out);
7429 gimplify_assign (out, x, &sub_seq);
7430 }
7431 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7432 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7433 gimple_seq_add_stmt (&sub_seq, g);
7434 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7435 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7436 gimple_seq_add_stmt (&sub_seq, g);
7437 g = gimple_build_assign (i, PLUS_EXPR, i,
7438 build_int_cst (TREE_TYPE (i), 1));
7439 gimple_seq_add_stmt (&sub_seq, g);
7440 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7441 gimple_seq_add_stmt (&sub_seq, g);
7442 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7443 }
7444 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7445 {
7446 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7447
7448 if (omp_is_reference (var)
7449 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7450 TREE_TYPE (ref)))
7451 ref = build_fold_addr_expr_loc (clause_loc, ref);
7452 SET_DECL_VALUE_EXPR (placeholder, ref);
7453 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7454 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7455 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7456 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7457 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7458 }
7459 else
7460 {
7461 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7462 ref = build_outer_var_ref (var, ctx);
7463 gimplify_assign (ref, x, &sub_seq);
7464 }
7465 }
7466
7467 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7468 0);
7469 gimple_seq_add_stmt (stmt_seqp, stmt);
7470
7471 gimple_seq_add_seq (stmt_seqp, sub_seq);
7472
7473 if (clist)
7474 {
7475 gimple_seq_add_seq (stmt_seqp, *clist);
7476 *clist = NULL;
7477 }
7478
7479 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7480 0);
7481 gimple_seq_add_stmt (stmt_seqp, stmt);
7482 }
7483
7484
7485 /* Generate code to implement the COPYPRIVATE clauses. */
7486
7487 static void
7488 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7489 omp_context *ctx)
7490 {
7491 tree c;
7492
7493 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7494 {
7495 tree var, new_var, ref, x;
7496 bool by_ref;
7497 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7498
7499 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7500 continue;
7501
7502 var = OMP_CLAUSE_DECL (c);
7503 by_ref = use_pointer_for_field (var, NULL);
7504
7505 ref = build_sender_ref (var, ctx);
7506 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7507 if (by_ref)
7508 {
7509 x = build_fold_addr_expr_loc (clause_loc, new_var);
7510 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7511 }
7512 gimplify_assign (ref, x, slist);
7513
7514 ref = build_receiver_ref (var, false, ctx);
7515 if (by_ref)
7516 {
7517 ref = fold_convert_loc (clause_loc,
7518 build_pointer_type (TREE_TYPE (new_var)),
7519 ref);
7520 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7521 }
7522 if (omp_is_reference (var))
7523 {
7524 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7525 ref = build_simple_mem_ref_loc (clause_loc, ref);
7526 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7527 }
7528 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7529 gimplify_and_add (x, rlist);
7530 }
7531 }
7532
7533
7534 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7535 and REDUCTION from the sender (aka parent) side. */
7536
7537 static void
7538 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7539 omp_context *ctx)
7540 {
7541 tree c, t;
7542 int ignored_looptemp = 0;
7543 bool is_taskloop = false;
7544
7545 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7546 by GOMP_taskloop. */
7547 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7548 {
7549 ignored_looptemp = 2;
7550 is_taskloop = true;
7551 }
7552
7553 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7554 {
7555 tree val, ref, x, var;
7556 bool by_ref, do_in = false, do_out = false;
7557 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7558
7559 switch (OMP_CLAUSE_CODE (c))
7560 {
7561 case OMP_CLAUSE_PRIVATE:
7562 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7563 break;
7564 continue;
7565 case OMP_CLAUSE_FIRSTPRIVATE:
7566 case OMP_CLAUSE_COPYIN:
7567 case OMP_CLAUSE_LASTPRIVATE:
7568 case OMP_CLAUSE_IN_REDUCTION:
7569 case OMP_CLAUSE__REDUCTEMP_:
7570 break;
7571 case OMP_CLAUSE_REDUCTION:
7572 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7573 continue;
7574 break;
7575 case OMP_CLAUSE_SHARED:
7576 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7577 break;
7578 continue;
7579 case OMP_CLAUSE__LOOPTEMP_:
7580 if (ignored_looptemp)
7581 {
7582 ignored_looptemp--;
7583 continue;
7584 }
7585 break;
7586 default:
7587 continue;
7588 }
7589
7590 val = OMP_CLAUSE_DECL (c);
7591 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7592 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7593 && TREE_CODE (val) == MEM_REF)
7594 {
7595 val = TREE_OPERAND (val, 0);
7596 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7597 val = TREE_OPERAND (val, 0);
7598 if (TREE_CODE (val) == INDIRECT_REF
7599 || TREE_CODE (val) == ADDR_EXPR)
7600 val = TREE_OPERAND (val, 0);
7601 if (is_variable_sized (val))
7602 continue;
7603 }
7604
7605 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7606 outer taskloop region. */
7607 omp_context *ctx_for_o = ctx;
7608 if (is_taskloop
7609 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7610 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7611 ctx_for_o = ctx->outer;
7612
7613 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7614
7615 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7616 && is_global_var (var)
7617 && (val == OMP_CLAUSE_DECL (c)
7618 || !is_task_ctx (ctx)
7619 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7620 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7621 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7622 != POINTER_TYPE)))))
7623 continue;
7624
7625 t = omp_member_access_dummy_var (var);
7626 if (t)
7627 {
7628 var = DECL_VALUE_EXPR (var);
7629 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7630 if (o != t)
7631 var = unshare_and_remap (var, t, o);
7632 else
7633 var = unshare_expr (var);
7634 }
7635
7636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7637 {
7638 /* Handle taskloop firstprivate/lastprivate, where the
7639 lastprivate on GIMPLE_OMP_TASK is represented as
7640 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7641 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7642 x = omp_build_component_ref (ctx->sender_decl, f);
7643 if (use_pointer_for_field (val, ctx))
7644 var = build_fold_addr_expr (var);
7645 gimplify_assign (x, var, ilist);
7646 DECL_ABSTRACT_ORIGIN (f) = NULL;
7647 continue;
7648 }
7649
7650 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7651 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7652 || val == OMP_CLAUSE_DECL (c))
7653 && is_variable_sized (val))
7654 continue;
7655 by_ref = use_pointer_for_field (val, NULL);
7656
7657 switch (OMP_CLAUSE_CODE (c))
7658 {
7659 case OMP_CLAUSE_FIRSTPRIVATE:
7660 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7661 && !by_ref
7662 && is_task_ctx (ctx))
7663 TREE_NO_WARNING (var) = 1;
7664 do_in = true;
7665 break;
7666
7667 case OMP_CLAUSE_PRIVATE:
7668 case OMP_CLAUSE_COPYIN:
7669 case OMP_CLAUSE__LOOPTEMP_:
7670 case OMP_CLAUSE__REDUCTEMP_:
7671 do_in = true;
7672 break;
7673
7674 case OMP_CLAUSE_LASTPRIVATE:
7675 if (by_ref || omp_is_reference (val))
7676 {
7677 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7678 continue;
7679 do_in = true;
7680 }
7681 else
7682 {
7683 do_out = true;
7684 if (lang_hooks.decls.omp_private_outer_ref (val))
7685 do_in = true;
7686 }
7687 break;
7688
7689 case OMP_CLAUSE_REDUCTION:
7690 case OMP_CLAUSE_IN_REDUCTION:
7691 do_in = true;
7692 if (val == OMP_CLAUSE_DECL (c))
7693 {
7694 if (is_task_ctx (ctx))
7695 by_ref = use_pointer_for_field (val, ctx);
7696 else
7697 do_out = !(by_ref || omp_is_reference (val));
7698 }
7699 else
7700 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7701 break;
7702
7703 default:
7704 gcc_unreachable ();
7705 }
7706
7707 if (do_in)
7708 {
7709 ref = build_sender_ref (val, ctx);
7710 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7711 gimplify_assign (ref, x, ilist);
7712 if (is_task_ctx (ctx))
7713 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7714 }
7715
7716 if (do_out)
7717 {
7718 ref = build_sender_ref (val, ctx);
7719 gimplify_assign (var, ref, olist);
7720 }
7721 }
7722 }
7723
7724 /* Generate code to implement SHARED from the sender (aka parent)
7725 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7726 list things that got automatically shared. */
7727
7728 static void
7729 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7730 {
7731 tree var, ovar, nvar, t, f, x, record_type;
7732
7733 if (ctx->record_type == NULL)
7734 return;
7735
7736 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7737 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7738 {
7739 ovar = DECL_ABSTRACT_ORIGIN (f);
7740 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7741 continue;
7742
7743 nvar = maybe_lookup_decl (ovar, ctx);
7744 if (!nvar
7745 || !DECL_HAS_VALUE_EXPR_P (nvar)
7746 || (ctx->allocate_map
7747 && ctx->allocate_map->get (ovar)))
7748 continue;
7749
7750 /* If CTX is a nested parallel directive. Find the immediately
7751 enclosing parallel or workshare construct that contains a
7752 mapping for OVAR. */
7753 var = lookup_decl_in_outer_ctx (ovar, ctx);
7754
7755 t = omp_member_access_dummy_var (var);
7756 if (t)
7757 {
7758 var = DECL_VALUE_EXPR (var);
7759 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7760 if (o != t)
7761 var = unshare_and_remap (var, t, o);
7762 else
7763 var = unshare_expr (var);
7764 }
7765
7766 if (use_pointer_for_field (ovar, ctx))
7767 {
7768 x = build_sender_ref (ovar, ctx);
7769 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7770 && TREE_TYPE (f) == TREE_TYPE (ovar))
7771 {
7772 gcc_assert (is_parallel_ctx (ctx)
7773 && DECL_ARTIFICIAL (ovar));
7774 /* _condtemp_ clause. */
7775 var = build_constructor (TREE_TYPE (x), NULL);
7776 }
7777 else
7778 var = build_fold_addr_expr (var);
7779 gimplify_assign (x, var, ilist);
7780 }
7781 else
7782 {
7783 x = build_sender_ref (ovar, ctx);
7784 gimplify_assign (x, var, ilist);
7785
7786 if (!TREE_READONLY (var)
7787 /* We don't need to receive a new reference to a result
7788 or parm decl. In fact we may not store to it as we will
7789 invalidate any pending RSO and generate wrong gimple
7790 during inlining. */
7791 && !((TREE_CODE (var) == RESULT_DECL
7792 || TREE_CODE (var) == PARM_DECL)
7793 && DECL_BY_REFERENCE (var)))
7794 {
7795 x = build_sender_ref (ovar, ctx);
7796 gimplify_assign (var, x, olist);
7797 }
7798 }
7799 }
7800 }
7801
7802 /* Emit an OpenACC head marker call, encapulating the partitioning and
7803 other information that must be processed by the target compiler.
7804 Return the maximum number of dimensions the associated loop might
7805 be partitioned over. */
7806
7807 static unsigned
7808 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7809 gimple_seq *seq, omp_context *ctx)
7810 {
7811 unsigned levels = 0;
7812 unsigned tag = 0;
7813 tree gang_static = NULL_TREE;
7814 auto_vec<tree, 5> args;
7815
7816 args.quick_push (build_int_cst
7817 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7818 args.quick_push (ddvar);
7819 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7820 {
7821 switch (OMP_CLAUSE_CODE (c))
7822 {
7823 case OMP_CLAUSE_GANG:
7824 tag |= OLF_DIM_GANG;
7825 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7826 /* static:* is represented by -1, and we can ignore it, as
7827 scheduling is always static. */
7828 if (gang_static && integer_minus_onep (gang_static))
7829 gang_static = NULL_TREE;
7830 levels++;
7831 break;
7832
7833 case OMP_CLAUSE_WORKER:
7834 tag |= OLF_DIM_WORKER;
7835 levels++;
7836 break;
7837
7838 case OMP_CLAUSE_VECTOR:
7839 tag |= OLF_DIM_VECTOR;
7840 levels++;
7841 break;
7842
7843 case OMP_CLAUSE_SEQ:
7844 tag |= OLF_SEQ;
7845 break;
7846
7847 case OMP_CLAUSE_AUTO:
7848 tag |= OLF_AUTO;
7849 break;
7850
7851 case OMP_CLAUSE_INDEPENDENT:
7852 tag |= OLF_INDEPENDENT;
7853 break;
7854
7855 case OMP_CLAUSE_TILE:
7856 tag |= OLF_TILE;
7857 break;
7858
7859 default:
7860 continue;
7861 }
7862 }
7863
7864 if (gang_static)
7865 {
7866 if (DECL_P (gang_static))
7867 gang_static = build_outer_var_ref (gang_static, ctx);
7868 tag |= OLF_GANG_STATIC;
7869 }
7870
7871 omp_context *tgt = enclosing_target_ctx (ctx);
7872 if (!tgt || is_oacc_parallel_or_serial (tgt))
7873 ;
7874 else if (is_oacc_kernels (tgt))
7875 /* Not using this loops handling inside OpenACC 'kernels' regions. */
7876 gcc_unreachable ();
7877 else if (is_oacc_kernels_decomposed_part (tgt))
7878 ;
7879 else
7880 gcc_unreachable ();
7881
7882 /* In a parallel region, loops are implicitly INDEPENDENT. */
7883 if (!tgt || is_oacc_parallel_or_serial (tgt))
7884 tag |= OLF_INDEPENDENT;
7885
7886 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
7887 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
7888 if (tgt && is_oacc_kernels_decomposed_part (tgt))
7889 {
7890 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
7891 gcc_assert (!(tag & OLF_AUTO));
7892 }
7893
7894 if (tag & OLF_TILE)
7895 /* Tiling could use all 3 levels. */
7896 levels = 3;
7897 else
7898 {
7899 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7900 Ensure at least one level, or 2 for possible auto
7901 partitioning */
7902 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7903 << OLF_DIM_BASE) | OLF_SEQ));
7904
7905 if (levels < 1u + maybe_auto)
7906 levels = 1u + maybe_auto;
7907 }
7908
7909 args.quick_push (build_int_cst (integer_type_node, levels));
7910 args.quick_push (build_int_cst (integer_type_node, tag));
7911 if (gang_static)
7912 args.quick_push (gang_static);
7913
7914 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7915 gimple_set_location (call, loc);
7916 gimple_set_lhs (call, ddvar);
7917 gimple_seq_add_stmt (seq, call);
7918
7919 return levels;
7920 }
7921
7922 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7923 partitioning level of the enclosed region. */
7924
7925 static void
7926 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7927 tree tofollow, gimple_seq *seq)
7928 {
7929 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7930 : IFN_UNIQUE_OACC_TAIL_MARK);
7931 tree marker = build_int_cst (integer_type_node, marker_kind);
7932 int nargs = 2 + (tofollow != NULL_TREE);
7933 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7934 marker, ddvar, tofollow);
7935 gimple_set_location (call, loc);
7936 gimple_set_lhs (call, ddvar);
7937 gimple_seq_add_stmt (seq, call);
7938 }
7939
7940 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7941 the loop clauses, from which we extract reductions. Initialize
7942 HEAD and TAIL. */
7943
7944 static void
7945 lower_oacc_head_tail (location_t loc, tree clauses,
7946 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7947 {
7948 bool inner = false;
7949 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7950 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7951
7952 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7953 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7954 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7955
7956 gcc_assert (count);
7957 for (unsigned done = 1; count; count--, done++)
7958 {
7959 gimple_seq fork_seq = NULL;
7960 gimple_seq join_seq = NULL;
7961
7962 tree place = build_int_cst (integer_type_node, -1);
7963 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7964 fork_kind, ddvar, place);
7965 gimple_set_location (fork, loc);
7966 gimple_set_lhs (fork, ddvar);
7967
7968 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7969 join_kind, ddvar, place);
7970 gimple_set_location (join, loc);
7971 gimple_set_lhs (join, ddvar);
7972
7973 /* Mark the beginning of this level sequence. */
7974 if (inner)
7975 lower_oacc_loop_marker (loc, ddvar, true,
7976 build_int_cst (integer_type_node, count),
7977 &fork_seq);
7978 lower_oacc_loop_marker (loc, ddvar, false,
7979 build_int_cst (integer_type_node, done),
7980 &join_seq);
7981
7982 lower_oacc_reductions (loc, clauses, place, inner,
7983 fork, join, &fork_seq, &join_seq, ctx);
7984
7985 /* Append this level to head. */
7986 gimple_seq_add_seq (head, fork_seq);
7987 /* Prepend it to tail. */
7988 gimple_seq_add_seq (&join_seq, *tail);
7989 *tail = join_seq;
7990
7991 inner = true;
7992 }
7993
7994 /* Mark the end of the sequence. */
7995 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7996 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7997 }
7998
7999 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8000 catch handler and return it. This prevents programs from violating the
8001 structured block semantics with throws. */
8002
8003 static gimple_seq
8004 maybe_catch_exception (gimple_seq body)
8005 {
8006 gimple *g;
8007 tree decl;
8008
8009 if (!flag_exceptions)
8010 return body;
8011
8012 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8013 decl = lang_hooks.eh_protect_cleanup_actions ();
8014 else
8015 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8016
8017 g = gimple_build_eh_must_not_throw (decl);
8018 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8019 GIMPLE_TRY_CATCH);
8020
8021 return gimple_seq_alloc_with_stmt (g);
8022 }
8023
8024 \f
8025 /* Routines to lower OMP directives into OMP-GIMPLE. */
8026
8027 /* If ctx is a worksharing context inside of a cancellable parallel
8028 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8029 and conditional branch to parallel's cancel_label to handle
8030 cancellation in the implicit barrier. */
8031
8032 static void
8033 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8034 gimple_seq *body)
8035 {
8036 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8037 if (gimple_omp_return_nowait_p (omp_return))
8038 return;
8039 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8040 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8041 && outer->cancellable)
8042 {
8043 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8044 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8045 tree lhs = create_tmp_var (c_bool_type);
8046 gimple_omp_return_set_lhs (omp_return, lhs);
8047 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8048 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8049 fold_convert (c_bool_type,
8050 boolean_false_node),
8051 outer->cancel_label, fallthru_label);
8052 gimple_seq_add_stmt (body, g);
8053 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8054 }
8055 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8056 return;
8057 }
8058
8059 /* Find the first task_reduction or reduction clause or return NULL
8060 if there are none. */
8061
8062 static inline tree
8063 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8064 enum omp_clause_code ccode)
8065 {
8066 while (1)
8067 {
8068 clauses = omp_find_clause (clauses, ccode);
8069 if (clauses == NULL_TREE)
8070 return NULL_TREE;
8071 if (ccode != OMP_CLAUSE_REDUCTION
8072 || code == OMP_TASKLOOP
8073 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8074 return clauses;
8075 clauses = OMP_CLAUSE_CHAIN (clauses);
8076 }
8077 }
8078
8079 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8080 gimple_seq *, gimple_seq *);
8081
8082 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8083 CTX is the enclosing OMP context for the current statement. */
8084
8085 static void
8086 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8087 {
8088 tree block, control;
8089 gimple_stmt_iterator tgsi;
8090 gomp_sections *stmt;
8091 gimple *t;
8092 gbind *new_stmt, *bind;
8093 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8094
8095 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8096
8097 push_gimplify_context ();
8098
8099 dlist = NULL;
8100 ilist = NULL;
8101
8102 tree rclauses
8103 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8104 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8105 tree rtmp = NULL_TREE;
8106 if (rclauses)
8107 {
8108 tree type = build_pointer_type (pointer_sized_int_node);
8109 tree temp = create_tmp_var (type);
8110 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8111 OMP_CLAUSE_DECL (c) = temp;
8112 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8113 gimple_omp_sections_set_clauses (stmt, c);
8114 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8115 gimple_omp_sections_clauses (stmt),
8116 &ilist, &tred_dlist);
8117 rclauses = c;
8118 rtmp = make_ssa_name (type);
8119 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8120 }
8121
8122 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8123 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8124
8125 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8126 &ilist, &dlist, ctx, NULL);
8127
8128 control = create_tmp_var (unsigned_type_node, ".section");
8129 gimple_omp_sections_set_control (stmt, control);
8130
8131 new_body = gimple_omp_body (stmt);
8132 gimple_omp_set_body (stmt, NULL);
8133 tgsi = gsi_start (new_body);
8134 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8135 {
8136 omp_context *sctx;
8137 gimple *sec_start;
8138
8139 sec_start = gsi_stmt (tgsi);
8140 sctx = maybe_lookup_ctx (sec_start);
8141 gcc_assert (sctx);
8142
8143 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8144 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8145 GSI_CONTINUE_LINKING);
8146 gimple_omp_set_body (sec_start, NULL);
8147
8148 if (gsi_one_before_end_p (tgsi))
8149 {
8150 gimple_seq l = NULL;
8151 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8152 &ilist, &l, &clist, ctx);
8153 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8154 gimple_omp_section_set_last (sec_start);
8155 }
8156
8157 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8158 GSI_CONTINUE_LINKING);
8159 }
8160
8161 block = make_node (BLOCK);
8162 bind = gimple_build_bind (NULL, new_body, block);
8163
8164 olist = NULL;
8165 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8166 &clist, ctx);
8167 if (clist)
8168 {
8169 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8170 gcall *g = gimple_build_call (fndecl, 0);
8171 gimple_seq_add_stmt (&olist, g);
8172 gimple_seq_add_seq (&olist, clist);
8173 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8174 g = gimple_build_call (fndecl, 0);
8175 gimple_seq_add_stmt (&olist, g);
8176 }
8177
8178 block = make_node (BLOCK);
8179 new_stmt = gimple_build_bind (NULL, NULL, block);
8180 gsi_replace (gsi_p, new_stmt, true);
8181
8182 pop_gimplify_context (new_stmt);
8183 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8184 BLOCK_VARS (block) = gimple_bind_vars (bind);
8185 if (BLOCK_VARS (block))
8186 TREE_USED (block) = 1;
8187
8188 new_body = NULL;
8189 gimple_seq_add_seq (&new_body, ilist);
8190 gimple_seq_add_stmt (&new_body, stmt);
8191 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8192 gimple_seq_add_stmt (&new_body, bind);
8193
8194 t = gimple_build_omp_continue (control, control);
8195 gimple_seq_add_stmt (&new_body, t);
8196
8197 gimple_seq_add_seq (&new_body, olist);
8198 if (ctx->cancellable)
8199 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8200 gimple_seq_add_seq (&new_body, dlist);
8201
8202 new_body = maybe_catch_exception (new_body);
8203
8204 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8205 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8206 t = gimple_build_omp_return (nowait);
8207 gimple_seq_add_stmt (&new_body, t);
8208 gimple_seq_add_seq (&new_body, tred_dlist);
8209 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8210
8211 if (rclauses)
8212 OMP_CLAUSE_DECL (rclauses) = rtmp;
8213
8214 gimple_bind_set_body (new_stmt, new_body);
8215 }
8216
8217
8218 /* A subroutine of lower_omp_single. Expand the simple form of
8219 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8220
8221 if (GOMP_single_start ())
8222 BODY;
8223 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8224
8225 FIXME. It may be better to delay expanding the logic of this until
8226 pass_expand_omp. The expanded logic may make the job more difficult
8227 to a synchronization analysis pass. */
8228
8229 static void
8230 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8231 {
8232 location_t loc = gimple_location (single_stmt);
8233 tree tlabel = create_artificial_label (loc);
8234 tree flabel = create_artificial_label (loc);
8235 gimple *call, *cond;
8236 tree lhs, decl;
8237
8238 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8239 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8240 call = gimple_build_call (decl, 0);
8241 gimple_call_set_lhs (call, lhs);
8242 gimple_seq_add_stmt (pre_p, call);
8243
8244 cond = gimple_build_cond (EQ_EXPR, lhs,
8245 fold_convert_loc (loc, TREE_TYPE (lhs),
8246 boolean_true_node),
8247 tlabel, flabel);
8248 gimple_seq_add_stmt (pre_p, cond);
8249 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8250 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8251 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8252 }
8253
8254
8255 /* A subroutine of lower_omp_single. Expand the simple form of
8256 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8257
8258 #pragma omp single copyprivate (a, b, c)
8259
8260 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8261
8262 {
8263 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8264 {
8265 BODY;
8266 copyout.a = a;
8267 copyout.b = b;
8268 copyout.c = c;
8269 GOMP_single_copy_end (&copyout);
8270 }
8271 else
8272 {
8273 a = copyout_p->a;
8274 b = copyout_p->b;
8275 c = copyout_p->c;
8276 }
8277 GOMP_barrier ();
8278 }
8279
8280 FIXME. It may be better to delay expanding the logic of this until
8281 pass_expand_omp. The expanded logic may make the job more difficult
8282 to a synchronization analysis pass. */
8283
8284 static void
8285 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8286 omp_context *ctx)
8287 {
8288 tree ptr_type, t, l0, l1, l2, bfn_decl;
8289 gimple_seq copyin_seq;
8290 location_t loc = gimple_location (single_stmt);
8291
8292 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8293
8294 ptr_type = build_pointer_type (ctx->record_type);
8295 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8296
8297 l0 = create_artificial_label (loc);
8298 l1 = create_artificial_label (loc);
8299 l2 = create_artificial_label (loc);
8300
8301 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8302 t = build_call_expr_loc (loc, bfn_decl, 0);
8303 t = fold_convert_loc (loc, ptr_type, t);
8304 gimplify_assign (ctx->receiver_decl, t, pre_p);
8305
8306 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8307 build_int_cst (ptr_type, 0));
8308 t = build3 (COND_EXPR, void_type_node, t,
8309 build_and_jump (&l0), build_and_jump (&l1));
8310 gimplify_and_add (t, pre_p);
8311
8312 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8313
8314 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8315
8316 copyin_seq = NULL;
8317 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8318 &copyin_seq, ctx);
8319
8320 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8321 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8322 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8323 gimplify_and_add (t, pre_p);
8324
8325 t = build_and_jump (&l2);
8326 gimplify_and_add (t, pre_p);
8327
8328 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8329
8330 gimple_seq_add_seq (pre_p, copyin_seq);
8331
8332 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8333 }
8334
8335
8336 /* Expand code for an OpenMP single directive. */
8337
8338 static void
8339 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8340 {
8341 tree block;
8342 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8343 gbind *bind;
8344 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8345
8346 push_gimplify_context ();
8347
8348 block = make_node (BLOCK);
8349 bind = gimple_build_bind (NULL, NULL, block);
8350 gsi_replace (gsi_p, bind, true);
8351 bind_body = NULL;
8352 dlist = NULL;
8353 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8354 &bind_body, &dlist, ctx, NULL);
8355 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8356
8357 gimple_seq_add_stmt (&bind_body, single_stmt);
8358
8359 if (ctx->record_type)
8360 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8361 else
8362 lower_omp_single_simple (single_stmt, &bind_body);
8363
8364 gimple_omp_set_body (single_stmt, NULL);
8365
8366 gimple_seq_add_seq (&bind_body, dlist);
8367
8368 bind_body = maybe_catch_exception (bind_body);
8369
8370 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8371 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8372 gimple *g = gimple_build_omp_return (nowait);
8373 gimple_seq_add_stmt (&bind_body_tail, g);
8374 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8375 if (ctx->record_type)
8376 {
8377 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8378 tree clobber = build_clobber (ctx->record_type);
8379 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8380 clobber), GSI_SAME_STMT);
8381 }
8382 gimple_seq_add_seq (&bind_body, bind_body_tail);
8383 gimple_bind_set_body (bind, bind_body);
8384
8385 pop_gimplify_context (bind);
8386
8387 gimple_bind_append_vars (bind, ctx->block_vars);
8388 BLOCK_VARS (block) = ctx->block_vars;
8389 if (BLOCK_VARS (block))
8390 TREE_USED (block) = 1;
8391 }
8392
8393
8394 /* Expand code for an OpenMP master directive. */
8395
8396 static void
8397 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8398 {
8399 tree block, lab = NULL, x, bfn_decl;
8400 gimple *stmt = gsi_stmt (*gsi_p);
8401 gbind *bind;
8402 location_t loc = gimple_location (stmt);
8403 gimple_seq tseq;
8404
8405 push_gimplify_context ();
8406
8407 block = make_node (BLOCK);
8408 bind = gimple_build_bind (NULL, NULL, block);
8409 gsi_replace (gsi_p, bind, true);
8410 gimple_bind_add_stmt (bind, stmt);
8411
8412 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8413 x = build_call_expr_loc (loc, bfn_decl, 0);
8414 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8415 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8416 tseq = NULL;
8417 gimplify_and_add (x, &tseq);
8418 gimple_bind_add_seq (bind, tseq);
8419
8420 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8421 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8422 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8423 gimple_omp_set_body (stmt, NULL);
8424
8425 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8426
8427 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8428
8429 pop_gimplify_context (bind);
8430
8431 gimple_bind_append_vars (bind, ctx->block_vars);
8432 BLOCK_VARS (block) = ctx->block_vars;
8433 }
8434
8435 /* Helper function for lower_omp_task_reductions. For a specific PASS
8436 find out the current clause it should be processed, or return false
8437 if all have been processed already. */
8438
8439 static inline bool
8440 omp_task_reduction_iterate (int pass, enum tree_code code,
8441 enum omp_clause_code ccode, tree *c, tree *decl,
8442 tree *type, tree *next)
8443 {
8444 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8445 {
8446 if (ccode == OMP_CLAUSE_REDUCTION
8447 && code != OMP_TASKLOOP
8448 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8449 continue;
8450 *decl = OMP_CLAUSE_DECL (*c);
8451 *type = TREE_TYPE (*decl);
8452 if (TREE_CODE (*decl) == MEM_REF)
8453 {
8454 if (pass != 1)
8455 continue;
8456 }
8457 else
8458 {
8459 if (omp_is_reference (*decl))
8460 *type = TREE_TYPE (*type);
8461 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8462 continue;
8463 }
8464 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8465 return true;
8466 }
8467 *decl = NULL_TREE;
8468 *type = NULL_TREE;
8469 *next = NULL_TREE;
8470 return false;
8471 }
8472
8473 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8474 OMP_TASKGROUP only with task modifier). Register mapping of those in
8475 START sequence and reducing them and unregister them in the END sequence. */
8476
8477 static void
8478 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8479 gimple_seq *start, gimple_seq *end)
8480 {
8481 enum omp_clause_code ccode
8482 = (code == OMP_TASKGROUP
8483 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8484 tree cancellable = NULL_TREE;
8485 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8486 if (clauses == NULL_TREE)
8487 return;
8488 if (code == OMP_FOR || code == OMP_SECTIONS)
8489 {
8490 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8491 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8492 && outer->cancellable)
8493 {
8494 cancellable = error_mark_node;
8495 break;
8496 }
8497 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8498 break;
8499 }
8500 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8501 tree *last = &TYPE_FIELDS (record_type);
8502 unsigned cnt = 0;
8503 if (cancellable)
8504 {
8505 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8506 ptr_type_node);
8507 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8508 integer_type_node);
8509 *last = field;
8510 DECL_CHAIN (field) = ifield;
8511 last = &DECL_CHAIN (ifield);
8512 DECL_CONTEXT (field) = record_type;
8513 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8514 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8515 DECL_CONTEXT (ifield) = record_type;
8516 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8517 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8518 }
8519 for (int pass = 0; pass < 2; pass++)
8520 {
8521 tree decl, type, next;
8522 for (tree c = clauses;
8523 omp_task_reduction_iterate (pass, code, ccode,
8524 &c, &decl, &type, &next); c = next)
8525 {
8526 ++cnt;
8527 tree new_type = type;
8528 if (ctx->outer)
8529 new_type = remap_type (type, &ctx->outer->cb);
8530 tree field
8531 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8532 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8533 new_type);
8534 if (DECL_P (decl) && type == TREE_TYPE (decl))
8535 {
8536 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8537 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8538 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8539 }
8540 else
8541 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8542 DECL_CONTEXT (field) = record_type;
8543 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8544 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8545 *last = field;
8546 last = &DECL_CHAIN (field);
8547 tree bfield
8548 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8549 boolean_type_node);
8550 DECL_CONTEXT (bfield) = record_type;
8551 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8552 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8553 *last = bfield;
8554 last = &DECL_CHAIN (bfield);
8555 }
8556 }
8557 *last = NULL_TREE;
8558 layout_type (record_type);
8559
8560 /* Build up an array which registers with the runtime all the reductions
8561 and deregisters them at the end. Format documented in libgomp/task.c. */
8562 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8563 tree avar = create_tmp_var_raw (atype);
8564 gimple_add_tmp_var (avar);
8565 TREE_ADDRESSABLE (avar) = 1;
8566 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8567 NULL_TREE, NULL_TREE);
8568 tree t = build_int_cst (pointer_sized_int_node, cnt);
8569 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8570 gimple_seq seq = NULL;
8571 tree sz = fold_convert (pointer_sized_int_node,
8572 TYPE_SIZE_UNIT (record_type));
8573 int cachesz = 64;
8574 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8575 build_int_cst (pointer_sized_int_node, cachesz - 1));
8576 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8577 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8578 ctx->task_reductions.create (1 + cnt);
8579 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8580 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8581 ? sz : NULL_TREE);
8582 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8583 gimple_seq_add_seq (start, seq);
8584 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8585 NULL_TREE, NULL_TREE);
8586 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8587 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8588 NULL_TREE, NULL_TREE);
8589 t = build_int_cst (pointer_sized_int_node,
8590 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8591 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8592 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8593 NULL_TREE, NULL_TREE);
8594 t = build_int_cst (pointer_sized_int_node, -1);
8595 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8596 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8597 NULL_TREE, NULL_TREE);
8598 t = build_int_cst (pointer_sized_int_node, 0);
8599 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8600
8601 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8602 and for each task reduction checks a bool right after the private variable
8603 within that thread's chunk; if the bool is clear, it hasn't been
8604 initialized and thus isn't going to be reduced nor destructed, otherwise
8605 reduce and destruct it. */
8606 tree idx = create_tmp_var (size_type_node);
8607 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8608 tree num_thr_sz = create_tmp_var (size_type_node);
8609 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8610 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8611 tree lab3 = NULL_TREE;
8612 gimple *g;
8613 if (code == OMP_FOR || code == OMP_SECTIONS)
8614 {
8615 /* For worksharing constructs, only perform it in the master thread,
8616 with the exception of cancelled implicit barriers - then only handle
8617 the current thread. */
8618 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8619 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8620 tree thr_num = create_tmp_var (integer_type_node);
8621 g = gimple_build_call (t, 0);
8622 gimple_call_set_lhs (g, thr_num);
8623 gimple_seq_add_stmt (end, g);
8624 if (cancellable)
8625 {
8626 tree c;
8627 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8628 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8629 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8630 if (code == OMP_FOR)
8631 c = gimple_omp_for_clauses (ctx->stmt);
8632 else /* if (code == OMP_SECTIONS) */
8633 c = gimple_omp_sections_clauses (ctx->stmt);
8634 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8635 cancellable = c;
8636 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8637 lab5, lab6);
8638 gimple_seq_add_stmt (end, g);
8639 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8640 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8641 gimple_seq_add_stmt (end, g);
8642 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8643 build_one_cst (TREE_TYPE (idx)));
8644 gimple_seq_add_stmt (end, g);
8645 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8646 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8647 }
8648 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8649 gimple_seq_add_stmt (end, g);
8650 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8651 }
8652 if (code != OMP_PARALLEL)
8653 {
8654 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8655 tree num_thr = create_tmp_var (integer_type_node);
8656 g = gimple_build_call (t, 0);
8657 gimple_call_set_lhs (g, num_thr);
8658 gimple_seq_add_stmt (end, g);
8659 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8660 gimple_seq_add_stmt (end, g);
8661 if (cancellable)
8662 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8663 }
8664 else
8665 {
8666 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8667 OMP_CLAUSE__REDUCTEMP_);
8668 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8669 t = fold_convert (size_type_node, t);
8670 gimplify_assign (num_thr_sz, t, end);
8671 }
8672 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8673 NULL_TREE, NULL_TREE);
8674 tree data = create_tmp_var (pointer_sized_int_node);
8675 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8676 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8677 tree ptr;
8678 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8679 ptr = create_tmp_var (build_pointer_type (record_type));
8680 else
8681 ptr = create_tmp_var (ptr_type_node);
8682 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8683
8684 tree field = TYPE_FIELDS (record_type);
8685 cnt = 0;
8686 if (cancellable)
8687 field = DECL_CHAIN (DECL_CHAIN (field));
8688 for (int pass = 0; pass < 2; pass++)
8689 {
8690 tree decl, type, next;
8691 for (tree c = clauses;
8692 omp_task_reduction_iterate (pass, code, ccode,
8693 &c, &decl, &type, &next); c = next)
8694 {
8695 tree var = decl, ref;
8696 if (TREE_CODE (decl) == MEM_REF)
8697 {
8698 var = TREE_OPERAND (var, 0);
8699 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8700 var = TREE_OPERAND (var, 0);
8701 tree v = var;
8702 if (TREE_CODE (var) == ADDR_EXPR)
8703 var = TREE_OPERAND (var, 0);
8704 else if (TREE_CODE (var) == INDIRECT_REF)
8705 var = TREE_OPERAND (var, 0);
8706 tree orig_var = var;
8707 if (is_variable_sized (var))
8708 {
8709 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8710 var = DECL_VALUE_EXPR (var);
8711 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8712 var = TREE_OPERAND (var, 0);
8713 gcc_assert (DECL_P (var));
8714 }
8715 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8716 if (orig_var != var)
8717 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8718 else if (TREE_CODE (v) == ADDR_EXPR)
8719 t = build_fold_addr_expr (t);
8720 else if (TREE_CODE (v) == INDIRECT_REF)
8721 t = build_fold_indirect_ref (t);
8722 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8723 {
8724 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8725 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8726 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8727 }
8728 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8729 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8730 fold_convert (size_type_node,
8731 TREE_OPERAND (decl, 1)));
8732 }
8733 else
8734 {
8735 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8736 if (!omp_is_reference (decl))
8737 t = build_fold_addr_expr (t);
8738 }
8739 t = fold_convert (pointer_sized_int_node, t);
8740 seq = NULL;
8741 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8742 gimple_seq_add_seq (start, seq);
8743 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8744 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8745 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8746 t = unshare_expr (byte_position (field));
8747 t = fold_convert (pointer_sized_int_node, t);
8748 ctx->task_reduction_map->put (c, cnt);
8749 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8750 ? t : NULL_TREE);
8751 seq = NULL;
8752 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8753 gimple_seq_add_seq (start, seq);
8754 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8755 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8756 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8757
8758 tree bfield = DECL_CHAIN (field);
8759 tree cond;
8760 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8761 /* In parallel or worksharing all threads unconditionally
8762 initialize all their task reduction private variables. */
8763 cond = boolean_true_node;
8764 else if (TREE_TYPE (ptr) == ptr_type_node)
8765 {
8766 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8767 unshare_expr (byte_position (bfield)));
8768 seq = NULL;
8769 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8770 gimple_seq_add_seq (end, seq);
8771 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8772 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8773 build_int_cst (pbool, 0));
8774 }
8775 else
8776 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8777 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8778 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8779 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8780 tree condv = create_tmp_var (boolean_type_node);
8781 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8782 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8783 lab3, lab4);
8784 gimple_seq_add_stmt (end, g);
8785 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8786 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8787 {
8788 /* If this reduction doesn't need destruction and parallel
8789 has been cancelled, there is nothing to do for this
8790 reduction, so jump around the merge operation. */
8791 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8792 g = gimple_build_cond (NE_EXPR, cancellable,
8793 build_zero_cst (TREE_TYPE (cancellable)),
8794 lab4, lab5);
8795 gimple_seq_add_stmt (end, g);
8796 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8797 }
8798
8799 tree new_var;
8800 if (TREE_TYPE (ptr) == ptr_type_node)
8801 {
8802 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8803 unshare_expr (byte_position (field)));
8804 seq = NULL;
8805 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8806 gimple_seq_add_seq (end, seq);
8807 tree pbool = build_pointer_type (TREE_TYPE (field));
8808 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8809 build_int_cst (pbool, 0));
8810 }
8811 else
8812 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8813 build_simple_mem_ref (ptr), field, NULL_TREE);
8814
8815 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8816 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8817 ref = build_simple_mem_ref (ref);
8818 /* reduction(-:var) sums up the partial results, so it acts
8819 identically to reduction(+:var). */
8820 if (rcode == MINUS_EXPR)
8821 rcode = PLUS_EXPR;
8822 if (TREE_CODE (decl) == MEM_REF)
8823 {
8824 tree type = TREE_TYPE (new_var);
8825 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8826 tree i = create_tmp_var (TREE_TYPE (v));
8827 tree ptype = build_pointer_type (TREE_TYPE (type));
8828 if (DECL_P (v))
8829 {
8830 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8831 tree vv = create_tmp_var (TREE_TYPE (v));
8832 gimplify_assign (vv, v, start);
8833 v = vv;
8834 }
8835 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8836 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8837 new_var = build_fold_addr_expr (new_var);
8838 new_var = fold_convert (ptype, new_var);
8839 ref = fold_convert (ptype, ref);
8840 tree m = create_tmp_var (ptype);
8841 gimplify_assign (m, new_var, end);
8842 new_var = m;
8843 m = create_tmp_var (ptype);
8844 gimplify_assign (m, ref, end);
8845 ref = m;
8846 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8847 tree body = create_artificial_label (UNKNOWN_LOCATION);
8848 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8849 gimple_seq_add_stmt (end, gimple_build_label (body));
8850 tree priv = build_simple_mem_ref (new_var);
8851 tree out = build_simple_mem_ref (ref);
8852 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8853 {
8854 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8855 tree decl_placeholder
8856 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8857 tree lab6 = NULL_TREE;
8858 if (cancellable)
8859 {
8860 /* If this reduction needs destruction and parallel
8861 has been cancelled, jump around the merge operation
8862 to the destruction. */
8863 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8864 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8865 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8866 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8867 lab6, lab5);
8868 gimple_seq_add_stmt (end, g);
8869 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8870 }
8871 SET_DECL_VALUE_EXPR (placeholder, out);
8872 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8873 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8874 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8875 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8876 gimple_seq_add_seq (end,
8877 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8878 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8879 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8880 {
8881 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8882 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8883 }
8884 if (cancellable)
8885 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8886 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8887 if (x)
8888 {
8889 gimple_seq tseq = NULL;
8890 gimplify_stmt (&x, &tseq);
8891 gimple_seq_add_seq (end, tseq);
8892 }
8893 }
8894 else
8895 {
8896 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8897 out = unshare_expr (out);
8898 gimplify_assign (out, x, end);
8899 }
8900 gimple *g
8901 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8902 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8903 gimple_seq_add_stmt (end, g);
8904 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8905 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8906 gimple_seq_add_stmt (end, g);
8907 g = gimple_build_assign (i, PLUS_EXPR, i,
8908 build_int_cst (TREE_TYPE (i), 1));
8909 gimple_seq_add_stmt (end, g);
8910 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8911 gimple_seq_add_stmt (end, g);
8912 gimple_seq_add_stmt (end, gimple_build_label (endl));
8913 }
8914 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8915 {
8916 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8917 tree oldv = NULL_TREE;
8918 tree lab6 = NULL_TREE;
8919 if (cancellable)
8920 {
8921 /* If this reduction needs destruction and parallel
8922 has been cancelled, jump around the merge operation
8923 to the destruction. */
8924 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8925 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8926 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8927 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8928 lab6, lab5);
8929 gimple_seq_add_stmt (end, g);
8930 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8931 }
8932 if (omp_is_reference (decl)
8933 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8934 TREE_TYPE (ref)))
8935 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8936 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8937 tree refv = create_tmp_var (TREE_TYPE (ref));
8938 gimplify_assign (refv, ref, end);
8939 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8940 SET_DECL_VALUE_EXPR (placeholder, ref);
8941 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8942 tree d = maybe_lookup_decl (decl, ctx);
8943 gcc_assert (d);
8944 if (DECL_HAS_VALUE_EXPR_P (d))
8945 oldv = DECL_VALUE_EXPR (d);
8946 if (omp_is_reference (var))
8947 {
8948 tree v = fold_convert (TREE_TYPE (d),
8949 build_fold_addr_expr (new_var));
8950 SET_DECL_VALUE_EXPR (d, v);
8951 }
8952 else
8953 SET_DECL_VALUE_EXPR (d, new_var);
8954 DECL_HAS_VALUE_EXPR_P (d) = 1;
8955 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8956 if (oldv)
8957 SET_DECL_VALUE_EXPR (d, oldv);
8958 else
8959 {
8960 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8961 DECL_HAS_VALUE_EXPR_P (d) = 0;
8962 }
8963 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8964 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8965 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8966 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8967 if (cancellable)
8968 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8969 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8970 if (x)
8971 {
8972 gimple_seq tseq = NULL;
8973 gimplify_stmt (&x, &tseq);
8974 gimple_seq_add_seq (end, tseq);
8975 }
8976 }
8977 else
8978 {
8979 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8980 ref = unshare_expr (ref);
8981 gimplify_assign (ref, x, end);
8982 }
8983 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8984 ++cnt;
8985 field = DECL_CHAIN (bfield);
8986 }
8987 }
8988
8989 if (code == OMP_TASKGROUP)
8990 {
8991 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8992 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8993 gimple_seq_add_stmt (start, g);
8994 }
8995 else
8996 {
8997 tree c;
8998 if (code == OMP_FOR)
8999 c = gimple_omp_for_clauses (ctx->stmt);
9000 else if (code == OMP_SECTIONS)
9001 c = gimple_omp_sections_clauses (ctx->stmt);
9002 else
9003 c = gimple_omp_taskreg_clauses (ctx->stmt);
9004 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9005 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9006 build_fold_addr_expr (avar));
9007 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9008 }
9009
9010 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9011 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9012 size_one_node));
9013 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9014 gimple_seq_add_stmt (end, g);
9015 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9016 if (code == OMP_FOR || code == OMP_SECTIONS)
9017 {
9018 enum built_in_function bfn
9019 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9020 t = builtin_decl_explicit (bfn);
9021 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9022 tree arg;
9023 if (cancellable)
9024 {
9025 arg = create_tmp_var (c_bool_type);
9026 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9027 cancellable));
9028 }
9029 else
9030 arg = build_int_cst (c_bool_type, 0);
9031 g = gimple_build_call (t, 1, arg);
9032 }
9033 else
9034 {
9035 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9036 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9037 }
9038 gimple_seq_add_stmt (end, g);
9039 t = build_constructor (atype, NULL);
9040 TREE_THIS_VOLATILE (t) = 1;
9041 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9042 }
9043
9044 /* Expand code for an OpenMP taskgroup directive. */
9045
9046 static void
9047 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9048 {
9049 gimple *stmt = gsi_stmt (*gsi_p);
9050 gcall *x;
9051 gbind *bind;
9052 gimple_seq dseq = NULL;
9053 tree block = make_node (BLOCK);
9054
9055 bind = gimple_build_bind (NULL, NULL, block);
9056 gsi_replace (gsi_p, bind, true);
9057 gimple_bind_add_stmt (bind, stmt);
9058
9059 push_gimplify_context ();
9060
9061 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9062 0);
9063 gimple_bind_add_stmt (bind, x);
9064
9065 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9066 gimple_omp_taskgroup_clauses (stmt),
9067 gimple_bind_body_ptr (bind), &dseq);
9068
9069 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9070 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9071 gimple_omp_set_body (stmt, NULL);
9072
9073 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9074 gimple_bind_add_seq (bind, dseq);
9075
9076 pop_gimplify_context (bind);
9077
9078 gimple_bind_append_vars (bind, ctx->block_vars);
9079 BLOCK_VARS (block) = ctx->block_vars;
9080 }
9081
9082
9083 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9084
9085 static void
9086 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9087 omp_context *ctx)
9088 {
9089 struct omp_for_data fd;
9090 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9091 return;
9092
9093 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9094 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9095 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9096 if (!fd.ordered)
9097 return;
9098
9099 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9100 tree c = gimple_omp_ordered_clauses (ord_stmt);
9101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9102 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9103 {
9104 /* Merge depend clauses from multiple adjacent
9105 #pragma omp ordered depend(sink:...) constructs
9106 into one #pragma omp ordered depend(sink:...), so that
9107 we can optimize them together. */
9108 gimple_stmt_iterator gsi = *gsi_p;
9109 gsi_next (&gsi);
9110 while (!gsi_end_p (gsi))
9111 {
9112 gimple *stmt = gsi_stmt (gsi);
9113 if (is_gimple_debug (stmt)
9114 || gimple_code (stmt) == GIMPLE_NOP)
9115 {
9116 gsi_next (&gsi);
9117 continue;
9118 }
9119 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9120 break;
9121 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9122 c = gimple_omp_ordered_clauses (ord_stmt2);
9123 if (c == NULL_TREE
9124 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9125 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9126 break;
9127 while (*list_p)
9128 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9129 *list_p = c;
9130 gsi_remove (&gsi, true);
9131 }
9132 }
9133
9134 /* Canonicalize sink dependence clauses into one folded clause if
9135 possible.
9136
9137 The basic algorithm is to create a sink vector whose first
9138 element is the GCD of all the first elements, and whose remaining
9139 elements are the minimum of the subsequent columns.
9140
9141 We ignore dependence vectors whose first element is zero because
9142 such dependencies are known to be executed by the same thread.
9143
9144 We take into account the direction of the loop, so a minimum
9145 becomes a maximum if the loop is iterating forwards. We also
9146 ignore sink clauses where the loop direction is unknown, or where
9147 the offsets are clearly invalid because they are not a multiple
9148 of the loop increment.
9149
9150 For example:
9151
9152 #pragma omp for ordered(2)
9153 for (i=0; i < N; ++i)
9154 for (j=0; j < M; ++j)
9155 {
9156 #pragma omp ordered \
9157 depend(sink:i-8,j-2) \
9158 depend(sink:i,j-1) \ // Completely ignored because i+0.
9159 depend(sink:i-4,j-3) \
9160 depend(sink:i-6,j-4)
9161 #pragma omp ordered depend(source)
9162 }
9163
9164 Folded clause is:
9165
9166 depend(sink:-gcd(8,4,6),-min(2,3,4))
9167 -or-
9168 depend(sink:-2,-2)
9169 */
9170
9171 /* FIXME: Computing GCD's where the first element is zero is
9172 non-trivial in the presence of collapsed loops. Do this later. */
9173 if (fd.collapse > 1)
9174 return;
9175
9176 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9177
9178 /* wide_int is not a POD so it must be default-constructed. */
9179 for (unsigned i = 0; i != 2 * len - 1; ++i)
9180 new (static_cast<void*>(folded_deps + i)) wide_int ();
9181
9182 tree folded_dep = NULL_TREE;
9183 /* TRUE if the first dimension's offset is negative. */
9184 bool neg_offset_p = false;
9185
9186 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9187 unsigned int i;
9188 while ((c = *list_p) != NULL)
9189 {
9190 bool remove = false;
9191
9192 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9193 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9194 goto next_ordered_clause;
9195
9196 tree vec;
9197 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9198 vec && TREE_CODE (vec) == TREE_LIST;
9199 vec = TREE_CHAIN (vec), ++i)
9200 {
9201 gcc_assert (i < len);
9202
9203 /* omp_extract_for_data has canonicalized the condition. */
9204 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9205 || fd.loops[i].cond_code == GT_EXPR);
9206 bool forward = fd.loops[i].cond_code == LT_EXPR;
9207 bool maybe_lexically_later = true;
9208
9209 /* While the committee makes up its mind, bail if we have any
9210 non-constant steps. */
9211 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9212 goto lower_omp_ordered_ret;
9213
9214 tree itype = TREE_TYPE (TREE_VALUE (vec));
9215 if (POINTER_TYPE_P (itype))
9216 itype = sizetype;
9217 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9218 TYPE_PRECISION (itype),
9219 TYPE_SIGN (itype));
9220
9221 /* Ignore invalid offsets that are not multiples of the step. */
9222 if (!wi::multiple_of_p (wi::abs (offset),
9223 wi::abs (wi::to_wide (fd.loops[i].step)),
9224 UNSIGNED))
9225 {
9226 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9227 "ignoring sink clause with offset that is not "
9228 "a multiple of the loop step");
9229 remove = true;
9230 goto next_ordered_clause;
9231 }
9232
9233 /* Calculate the first dimension. The first dimension of
9234 the folded dependency vector is the GCD of the first
9235 elements, while ignoring any first elements whose offset
9236 is 0. */
9237 if (i == 0)
9238 {
9239 /* Ignore dependence vectors whose first dimension is 0. */
9240 if (offset == 0)
9241 {
9242 remove = true;
9243 goto next_ordered_clause;
9244 }
9245 else
9246 {
9247 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9248 {
9249 error_at (OMP_CLAUSE_LOCATION (c),
9250 "first offset must be in opposite direction "
9251 "of loop iterations");
9252 goto lower_omp_ordered_ret;
9253 }
9254 if (forward)
9255 offset = -offset;
9256 neg_offset_p = forward;
9257 /* Initialize the first time around. */
9258 if (folded_dep == NULL_TREE)
9259 {
9260 folded_dep = c;
9261 folded_deps[0] = offset;
9262 }
9263 else
9264 folded_deps[0] = wi::gcd (folded_deps[0],
9265 offset, UNSIGNED);
9266 }
9267 }
9268 /* Calculate minimum for the remaining dimensions. */
9269 else
9270 {
9271 folded_deps[len + i - 1] = offset;
9272 if (folded_dep == c)
9273 folded_deps[i] = offset;
9274 else if (maybe_lexically_later
9275 && !wi::eq_p (folded_deps[i], offset))
9276 {
9277 if (forward ^ wi::gts_p (folded_deps[i], offset))
9278 {
9279 unsigned int j;
9280 folded_dep = c;
9281 for (j = 1; j <= i; j++)
9282 folded_deps[j] = folded_deps[len + j - 1];
9283 }
9284 else
9285 maybe_lexically_later = false;
9286 }
9287 }
9288 }
9289 gcc_assert (i == len);
9290
9291 remove = true;
9292
9293 next_ordered_clause:
9294 if (remove)
9295 *list_p = OMP_CLAUSE_CHAIN (c);
9296 else
9297 list_p = &OMP_CLAUSE_CHAIN (c);
9298 }
9299
9300 if (folded_dep)
9301 {
9302 if (neg_offset_p)
9303 folded_deps[0] = -folded_deps[0];
9304
9305 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9306 if (POINTER_TYPE_P (itype))
9307 itype = sizetype;
9308
9309 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9310 = wide_int_to_tree (itype, folded_deps[0]);
9311 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9312 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9313 }
9314
9315 lower_omp_ordered_ret:
9316
9317 /* Ordered without clauses is #pragma omp threads, while we want
9318 a nop instead if we remove all clauses. */
9319 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9320 gsi_replace (gsi_p, gimple_build_nop (), true);
9321 }
9322
9323
9324 /* Expand code for an OpenMP ordered directive. */
9325
9326 static void
9327 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9328 {
9329 tree block;
9330 gimple *stmt = gsi_stmt (*gsi_p), *g;
9331 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9332 gcall *x;
9333 gbind *bind;
9334 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9335 OMP_CLAUSE_SIMD);
9336 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9337 loop. */
9338 bool maybe_simt
9339 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9340 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9341 OMP_CLAUSE_THREADS);
9342
9343 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9344 OMP_CLAUSE_DEPEND))
9345 {
9346 /* FIXME: This is needs to be moved to the expansion to verify various
9347 conditions only testable on cfg with dominators computed, and also
9348 all the depend clauses to be merged still might need to be available
9349 for the runtime checks. */
9350 if (0)
9351 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9352 return;
9353 }
9354
9355 push_gimplify_context ();
9356
9357 block = make_node (BLOCK);
9358 bind = gimple_build_bind (NULL, NULL, block);
9359 gsi_replace (gsi_p, bind, true);
9360 gimple_bind_add_stmt (bind, stmt);
9361
9362 if (simd)
9363 {
9364 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9365 build_int_cst (NULL_TREE, threads));
9366 cfun->has_simduid_loops = true;
9367 }
9368 else
9369 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9370 0);
9371 gimple_bind_add_stmt (bind, x);
9372
9373 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9374 if (maybe_simt)
9375 {
9376 counter = create_tmp_var (integer_type_node);
9377 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9378 gimple_call_set_lhs (g, counter);
9379 gimple_bind_add_stmt (bind, g);
9380
9381 body = create_artificial_label (UNKNOWN_LOCATION);
9382 test = create_artificial_label (UNKNOWN_LOCATION);
9383 gimple_bind_add_stmt (bind, gimple_build_label (body));
9384
9385 tree simt_pred = create_tmp_var (integer_type_node);
9386 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9387 gimple_call_set_lhs (g, simt_pred);
9388 gimple_bind_add_stmt (bind, g);
9389
9390 tree t = create_artificial_label (UNKNOWN_LOCATION);
9391 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9392 gimple_bind_add_stmt (bind, g);
9393
9394 gimple_bind_add_stmt (bind, gimple_build_label (t));
9395 }
9396 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9397 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9398 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9399 gimple_omp_set_body (stmt, NULL);
9400
9401 if (maybe_simt)
9402 {
9403 gimple_bind_add_stmt (bind, gimple_build_label (test));
9404 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9405 gimple_bind_add_stmt (bind, g);
9406
9407 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9408 tree nonneg = create_tmp_var (integer_type_node);
9409 gimple_seq tseq = NULL;
9410 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9411 gimple_bind_add_seq (bind, tseq);
9412
9413 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9414 gimple_call_set_lhs (g, nonneg);
9415 gimple_bind_add_stmt (bind, g);
9416
9417 tree end = create_artificial_label (UNKNOWN_LOCATION);
9418 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9419 gimple_bind_add_stmt (bind, g);
9420
9421 gimple_bind_add_stmt (bind, gimple_build_label (end));
9422 }
9423 if (simd)
9424 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9425 build_int_cst (NULL_TREE, threads));
9426 else
9427 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9428 0);
9429 gimple_bind_add_stmt (bind, x);
9430
9431 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9432
9433 pop_gimplify_context (bind);
9434
9435 gimple_bind_append_vars (bind, ctx->block_vars);
9436 BLOCK_VARS (block) = gimple_bind_vars (bind);
9437 }
9438
9439
9440 /* Expand code for an OpenMP scan directive and the structured block
9441 before the scan directive. */
9442
9443 static void
9444 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9445 {
9446 gimple *stmt = gsi_stmt (*gsi_p);
9447 bool has_clauses
9448 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9449 tree lane = NULL_TREE;
9450 gimple_seq before = NULL;
9451 omp_context *octx = ctx->outer;
9452 gcc_assert (octx);
9453 if (octx->scan_exclusive && !has_clauses)
9454 {
9455 gimple_stmt_iterator gsi2 = *gsi_p;
9456 gsi_next (&gsi2);
9457 gimple *stmt2 = gsi_stmt (gsi2);
9458 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9459 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9460 the one with exclusive clause(s), comes first. */
9461 if (stmt2
9462 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9463 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9464 {
9465 gsi_remove (gsi_p, false);
9466 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9467 ctx = maybe_lookup_ctx (stmt2);
9468 gcc_assert (ctx);
9469 lower_omp_scan (gsi_p, ctx);
9470 return;
9471 }
9472 }
9473
9474 bool input_phase = has_clauses ^ octx->scan_inclusive;
9475 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9476 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9477 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9478 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9479 && !gimple_omp_for_combined_p (octx->stmt));
9480 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9481 if (is_for_simd && octx->for_simd_scan_phase)
9482 is_simd = false;
9483 if (is_simd)
9484 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9485 OMP_CLAUSE__SIMDUID_))
9486 {
9487 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9488 lane = create_tmp_var (unsigned_type_node);
9489 tree t = build_int_cst (integer_type_node,
9490 input_phase ? 1
9491 : octx->scan_inclusive ? 2 : 3);
9492 gimple *g
9493 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9494 gimple_call_set_lhs (g, lane);
9495 gimple_seq_add_stmt (&before, g);
9496 }
9497
9498 if (is_simd || is_for)
9499 {
9500 for (tree c = gimple_omp_for_clauses (octx->stmt);
9501 c; c = OMP_CLAUSE_CHAIN (c))
9502 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9503 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9504 {
9505 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9506 tree var = OMP_CLAUSE_DECL (c);
9507 tree new_var = lookup_decl (var, octx);
9508 tree val = new_var;
9509 tree var2 = NULL_TREE;
9510 tree var3 = NULL_TREE;
9511 tree var4 = NULL_TREE;
9512 tree lane0 = NULL_TREE;
9513 tree new_vard = new_var;
9514 if (omp_is_reference (var))
9515 {
9516 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9517 val = new_var;
9518 }
9519 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9520 {
9521 val = DECL_VALUE_EXPR (new_vard);
9522 if (new_vard != new_var)
9523 {
9524 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9525 val = TREE_OPERAND (val, 0);
9526 }
9527 if (TREE_CODE (val) == ARRAY_REF
9528 && VAR_P (TREE_OPERAND (val, 0)))
9529 {
9530 tree v = TREE_OPERAND (val, 0);
9531 if (lookup_attribute ("omp simd array",
9532 DECL_ATTRIBUTES (v)))
9533 {
9534 val = unshare_expr (val);
9535 lane0 = TREE_OPERAND (val, 1);
9536 TREE_OPERAND (val, 1) = lane;
9537 var2 = lookup_decl (v, octx);
9538 if (octx->scan_exclusive)
9539 var4 = lookup_decl (var2, octx);
9540 if (input_phase
9541 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9542 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9543 if (!input_phase)
9544 {
9545 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9546 var2, lane, NULL_TREE, NULL_TREE);
9547 TREE_THIS_NOTRAP (var2) = 1;
9548 if (octx->scan_exclusive)
9549 {
9550 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9551 var4, lane, NULL_TREE,
9552 NULL_TREE);
9553 TREE_THIS_NOTRAP (var4) = 1;
9554 }
9555 }
9556 else
9557 var2 = val;
9558 }
9559 }
9560 gcc_assert (var2);
9561 }
9562 else
9563 {
9564 var2 = build_outer_var_ref (var, octx);
9565 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9566 {
9567 var3 = maybe_lookup_decl (new_vard, octx);
9568 if (var3 == new_vard || var3 == NULL_TREE)
9569 var3 = NULL_TREE;
9570 else if (is_simd && octx->scan_exclusive && !input_phase)
9571 {
9572 var4 = maybe_lookup_decl (var3, octx);
9573 if (var4 == var3 || var4 == NULL_TREE)
9574 {
9575 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9576 {
9577 var4 = var3;
9578 var3 = NULL_TREE;
9579 }
9580 else
9581 var4 = NULL_TREE;
9582 }
9583 }
9584 }
9585 if (is_simd
9586 && octx->scan_exclusive
9587 && !input_phase
9588 && var4 == NULL_TREE)
9589 var4 = create_tmp_var (TREE_TYPE (val));
9590 }
9591 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9592 {
9593 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9594 if (input_phase)
9595 {
9596 if (var3)
9597 {
9598 /* If we've added a separate identity element
9599 variable, copy it over into val. */
9600 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9601 var3);
9602 gimplify_and_add (x, &before);
9603 }
9604 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9605 {
9606 /* Otherwise, assign to it the identity element. */
9607 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9608 if (is_for)
9609 tseq = copy_gimple_seq_and_replace_locals (tseq);
9610 tree ref = build_outer_var_ref (var, octx);
9611 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9612 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9613 if (x)
9614 {
9615 if (new_vard != new_var)
9616 val = build_fold_addr_expr_loc (clause_loc, val);
9617 SET_DECL_VALUE_EXPR (new_vard, val);
9618 }
9619 SET_DECL_VALUE_EXPR (placeholder, ref);
9620 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9621 lower_omp (&tseq, octx);
9622 if (x)
9623 SET_DECL_VALUE_EXPR (new_vard, x);
9624 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9625 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9626 gimple_seq_add_seq (&before, tseq);
9627 if (is_simd)
9628 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9629 }
9630 }
9631 else if (is_simd)
9632 {
9633 tree x;
9634 if (octx->scan_exclusive)
9635 {
9636 tree v4 = unshare_expr (var4);
9637 tree v2 = unshare_expr (var2);
9638 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9639 gimplify_and_add (x, &before);
9640 }
9641 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9642 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9643 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9644 tree vexpr = val;
9645 if (x && new_vard != new_var)
9646 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9647 if (x)
9648 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9649 SET_DECL_VALUE_EXPR (placeholder, var2);
9650 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9651 lower_omp (&tseq, octx);
9652 gimple_seq_add_seq (&before, tseq);
9653 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9654 if (x)
9655 SET_DECL_VALUE_EXPR (new_vard, x);
9656 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9657 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9658 if (octx->scan_inclusive)
9659 {
9660 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9661 var2);
9662 gimplify_and_add (x, &before);
9663 }
9664 else if (lane0 == NULL_TREE)
9665 {
9666 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9667 var4);
9668 gimplify_and_add (x, &before);
9669 }
9670 }
9671 }
9672 else
9673 {
9674 if (input_phase)
9675 {
9676 /* input phase. Set val to initializer before
9677 the body. */
9678 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9679 gimplify_assign (val, x, &before);
9680 }
9681 else if (is_simd)
9682 {
9683 /* scan phase. */
9684 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9685 if (code == MINUS_EXPR)
9686 code = PLUS_EXPR;
9687
9688 tree x = build2 (code, TREE_TYPE (var2),
9689 unshare_expr (var2), unshare_expr (val));
9690 if (octx->scan_inclusive)
9691 {
9692 gimplify_assign (unshare_expr (var2), x, &before);
9693 gimplify_assign (val, var2, &before);
9694 }
9695 else
9696 {
9697 gimplify_assign (unshare_expr (var4),
9698 unshare_expr (var2), &before);
9699 gimplify_assign (var2, x, &before);
9700 if (lane0 == NULL_TREE)
9701 gimplify_assign (val, var4, &before);
9702 }
9703 }
9704 }
9705 if (octx->scan_exclusive && !input_phase && lane0)
9706 {
9707 tree vexpr = unshare_expr (var4);
9708 TREE_OPERAND (vexpr, 1) = lane0;
9709 if (new_vard != new_var)
9710 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9711 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9712 }
9713 }
9714 }
9715 if (is_simd && !is_for_simd)
9716 {
9717 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9718 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9719 gsi_replace (gsi_p, gimple_build_nop (), true);
9720 return;
9721 }
9722 lower_omp (gimple_omp_body_ptr (stmt), octx);
9723 if (before)
9724 {
9725 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9726 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9727 }
9728 }
9729
9730
9731 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9732 substitution of a couple of function calls. But in the NAMED case,
9733 requires that languages coordinate a symbol name. It is therefore
9734 best put here in common code. */
9735
9736 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9737
9738 static void
9739 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9740 {
9741 tree block;
9742 tree name, lock, unlock;
9743 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9744 gbind *bind;
9745 location_t loc = gimple_location (stmt);
9746 gimple_seq tbody;
9747
9748 name = gimple_omp_critical_name (stmt);
9749 if (name)
9750 {
9751 tree decl;
9752
9753 if (!critical_name_mutexes)
9754 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9755
9756 tree *n = critical_name_mutexes->get (name);
9757 if (n == NULL)
9758 {
9759 char *new_str;
9760
9761 decl = create_tmp_var_raw (ptr_type_node);
9762
9763 new_str = ACONCAT ((".gomp_critical_user_",
9764 IDENTIFIER_POINTER (name), NULL));
9765 DECL_NAME (decl) = get_identifier (new_str);
9766 TREE_PUBLIC (decl) = 1;
9767 TREE_STATIC (decl) = 1;
9768 DECL_COMMON (decl) = 1;
9769 DECL_ARTIFICIAL (decl) = 1;
9770 DECL_IGNORED_P (decl) = 1;
9771
9772 varpool_node::finalize_decl (decl);
9773
9774 critical_name_mutexes->put (name, decl);
9775 }
9776 else
9777 decl = *n;
9778
9779 /* If '#pragma omp critical' is inside offloaded region or
9780 inside function marked as offloadable, the symbol must be
9781 marked as offloadable too. */
9782 omp_context *octx;
9783 if (cgraph_node::get (current_function_decl)->offloadable)
9784 varpool_node::get_create (decl)->offloadable = 1;
9785 else
9786 for (octx = ctx->outer; octx; octx = octx->outer)
9787 if (is_gimple_omp_offloaded (octx->stmt))
9788 {
9789 varpool_node::get_create (decl)->offloadable = 1;
9790 break;
9791 }
9792
9793 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9794 lock = build_call_expr_loc (loc, lock, 1,
9795 build_fold_addr_expr_loc (loc, decl));
9796
9797 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9798 unlock = build_call_expr_loc (loc, unlock, 1,
9799 build_fold_addr_expr_loc (loc, decl));
9800 }
9801 else
9802 {
9803 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9804 lock = build_call_expr_loc (loc, lock, 0);
9805
9806 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9807 unlock = build_call_expr_loc (loc, unlock, 0);
9808 }
9809
9810 push_gimplify_context ();
9811
9812 block = make_node (BLOCK);
9813 bind = gimple_build_bind (NULL, NULL, block);
9814 gsi_replace (gsi_p, bind, true);
9815 gimple_bind_add_stmt (bind, stmt);
9816
9817 tbody = gimple_bind_body (bind);
9818 gimplify_and_add (lock, &tbody);
9819 gimple_bind_set_body (bind, tbody);
9820
9821 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9822 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9823 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9824 gimple_omp_set_body (stmt, NULL);
9825
9826 tbody = gimple_bind_body (bind);
9827 gimplify_and_add (unlock, &tbody);
9828 gimple_bind_set_body (bind, tbody);
9829
9830 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9831
9832 pop_gimplify_context (bind);
9833 gimple_bind_append_vars (bind, ctx->block_vars);
9834 BLOCK_VARS (block) = gimple_bind_vars (bind);
9835 }
9836
9837 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9838 for a lastprivate clause. Given a loop control predicate of (V
9839 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9840 is appended to *DLIST, iterator initialization is appended to
9841 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9842 to be emitted in a critical section. */
9843
9844 static void
9845 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9846 gimple_seq *dlist, gimple_seq *clist,
9847 struct omp_context *ctx)
9848 {
9849 tree clauses, cond, vinit;
9850 enum tree_code cond_code;
9851 gimple_seq stmts;
9852
9853 cond_code = fd->loop.cond_code;
9854 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9855
9856 /* When possible, use a strict equality expression. This can let VRP
9857 type optimizations deduce the value and remove a copy. */
9858 if (tree_fits_shwi_p (fd->loop.step))
9859 {
9860 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9861 if (step == 1 || step == -1)
9862 cond_code = EQ_EXPR;
9863 }
9864
9865 tree n2 = fd->loop.n2;
9866 if (fd->collapse > 1
9867 && TREE_CODE (n2) != INTEGER_CST
9868 && gimple_omp_for_combined_into_p (fd->for_stmt))
9869 {
9870 struct omp_context *taskreg_ctx = NULL;
9871 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9872 {
9873 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9874 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9875 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9876 {
9877 if (gimple_omp_for_combined_into_p (gfor))
9878 {
9879 gcc_assert (ctx->outer->outer
9880 && is_parallel_ctx (ctx->outer->outer));
9881 taskreg_ctx = ctx->outer->outer;
9882 }
9883 else
9884 {
9885 struct omp_for_data outer_fd;
9886 omp_extract_for_data (gfor, &outer_fd, NULL);
9887 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9888 }
9889 }
9890 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9891 taskreg_ctx = ctx->outer->outer;
9892 }
9893 else if (is_taskreg_ctx (ctx->outer))
9894 taskreg_ctx = ctx->outer;
9895 if (taskreg_ctx)
9896 {
9897 int i;
9898 tree taskreg_clauses
9899 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9900 tree innerc = omp_find_clause (taskreg_clauses,
9901 OMP_CLAUSE__LOOPTEMP_);
9902 gcc_assert (innerc);
9903 int count = fd->collapse;
9904 if (fd->non_rect
9905 && fd->last_nonrect == fd->first_nonrect + 1)
9906 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
9907 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
9908 count += 4;
9909 for (i = 0; i < count; i++)
9910 {
9911 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9912 OMP_CLAUSE__LOOPTEMP_);
9913 gcc_assert (innerc);
9914 }
9915 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9916 OMP_CLAUSE__LOOPTEMP_);
9917 if (innerc)
9918 n2 = fold_convert (TREE_TYPE (n2),
9919 lookup_decl (OMP_CLAUSE_DECL (innerc),
9920 taskreg_ctx));
9921 }
9922 }
9923 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9924
9925 clauses = gimple_omp_for_clauses (fd->for_stmt);
9926 stmts = NULL;
9927 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9928 if (!gimple_seq_empty_p (stmts))
9929 {
9930 gimple_seq_add_seq (&stmts, *dlist);
9931 *dlist = stmts;
9932
9933 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9934 vinit = fd->loop.n1;
9935 if (cond_code == EQ_EXPR
9936 && tree_fits_shwi_p (fd->loop.n2)
9937 && ! integer_zerop (fd->loop.n2))
9938 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9939 else
9940 vinit = unshare_expr (vinit);
9941
9942 /* Initialize the iterator variable, so that threads that don't execute
9943 any iterations don't execute the lastprivate clauses by accident. */
9944 gimplify_assign (fd->loop.v, vinit, body_p);
9945 }
9946 }
9947
9948 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9949
9950 static tree
9951 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9952 struct walk_stmt_info *wi)
9953 {
9954 gimple *stmt = gsi_stmt (*gsi_p);
9955
9956 *handled_ops_p = true;
9957 switch (gimple_code (stmt))
9958 {
9959 WALK_SUBSTMTS;
9960
9961 case GIMPLE_OMP_FOR:
9962 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9963 && gimple_omp_for_combined_into_p (stmt))
9964 *handled_ops_p = false;
9965 break;
9966
9967 case GIMPLE_OMP_SCAN:
9968 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9969 return integer_zero_node;
9970 default:
9971 break;
9972 }
9973 return NULL;
9974 }
9975
9976 /* Helper function for lower_omp_for, add transformations for a worksharing
9977 loop with scan directives inside of it.
9978 For worksharing loop not combined with simd, transform:
9979 #pragma omp for reduction(inscan,+:r) private(i)
9980 for (i = 0; i < n; i = i + 1)
9981 {
9982 {
9983 update (r);
9984 }
9985 #pragma omp scan inclusive(r)
9986 {
9987 use (r);
9988 }
9989 }
9990
9991 into two worksharing loops + code to merge results:
9992
9993 num_threads = omp_get_num_threads ();
9994 thread_num = omp_get_thread_num ();
9995 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9996 <D.2099>:
9997 var2 = r;
9998 goto <D.2101>;
9999 <D.2100>:
10000 // For UDRs this is UDR init, or if ctors are needed, copy from
10001 // var3 that has been constructed to contain the neutral element.
10002 var2 = 0;
10003 <D.2101>:
10004 ivar = 0;
10005 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10006 // a shared array with num_threads elements and rprivb to a local array
10007 // number of elements equal to the number of (contiguous) iterations the
10008 // current thread will perform. controlb and controlp variables are
10009 // temporaries to handle deallocation of rprivb at the end of second
10010 // GOMP_FOR.
10011 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10012 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10013 for (i = 0; i < n; i = i + 1)
10014 {
10015 {
10016 // For UDRs this is UDR init or copy from var3.
10017 r = 0;
10018 // This is the input phase from user code.
10019 update (r);
10020 }
10021 {
10022 // For UDRs this is UDR merge.
10023 var2 = var2 + r;
10024 // Rather than handing it over to the user, save to local thread's
10025 // array.
10026 rprivb[ivar] = var2;
10027 // For exclusive scan, the above two statements are swapped.
10028 ivar = ivar + 1;
10029 }
10030 }
10031 // And remember the final value from this thread's into the shared
10032 // rpriva array.
10033 rpriva[(sizetype) thread_num] = var2;
10034 // If more than one thread, compute using Work-Efficient prefix sum
10035 // the inclusive parallel scan of the rpriva array.
10036 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10037 <D.2102>:
10038 GOMP_barrier ();
10039 down = 0;
10040 k = 1;
10041 num_threadsu = (unsigned int) num_threads;
10042 thread_numup1 = (unsigned int) thread_num + 1;
10043 <D.2108>:
10044 twok = k << 1;
10045 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10046 <D.2110>:
10047 down = 4294967295;
10048 k = k >> 1;
10049 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10050 <D.2112>:
10051 k = k >> 1;
10052 <D.2111>:
10053 twok = k << 1;
10054 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10055 mul = REALPART_EXPR <cplx>;
10056 ovf = IMAGPART_EXPR <cplx>;
10057 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10058 <D.2116>:
10059 andv = k & down;
10060 andvm1 = andv + 4294967295;
10061 l = mul + andvm1;
10062 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10063 <D.2120>:
10064 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10065 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10066 rpriva[l] = rpriva[l - k] + rpriva[l];
10067 <D.2117>:
10068 if (down == 0) goto <D.2121>; else goto <D.2122>;
10069 <D.2121>:
10070 k = k << 1;
10071 goto <D.2123>;
10072 <D.2122>:
10073 k = k >> 1;
10074 <D.2123>:
10075 GOMP_barrier ();
10076 if (k != 0) goto <D.2108>; else goto <D.2103>;
10077 <D.2103>:
10078 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10079 <D.2124>:
10080 // For UDRs this is UDR init or copy from var3.
10081 var2 = 0;
10082 goto <D.2126>;
10083 <D.2125>:
10084 var2 = rpriva[thread_num - 1];
10085 <D.2126>:
10086 ivar = 0;
10087 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10088 reduction(inscan,+:r) private(i)
10089 for (i = 0; i < n; i = i + 1)
10090 {
10091 {
10092 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10093 r = var2 + rprivb[ivar];
10094 }
10095 {
10096 // This is the scan phase from user code.
10097 use (r);
10098 // Plus a bump of the iterator.
10099 ivar = ivar + 1;
10100 }
10101 } */
10102
10103 static void
10104 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10105 struct omp_for_data *fd, omp_context *ctx)
10106 {
10107 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10108 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10109
10110 gimple_seq body = gimple_omp_body (stmt);
10111 gimple_stmt_iterator input1_gsi = gsi_none ();
10112 struct walk_stmt_info wi;
10113 memset (&wi, 0, sizeof (wi));
10114 wi.val_only = true;
10115 wi.info = (void *) &input1_gsi;
10116 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10117 gcc_assert (!gsi_end_p (input1_gsi));
10118
10119 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10120 gimple_stmt_iterator gsi = input1_gsi;
10121 gsi_next (&gsi);
10122 gimple_stmt_iterator scan1_gsi = gsi;
10123 gimple *scan_stmt1 = gsi_stmt (gsi);
10124 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10125
10126 gimple_seq input_body = gimple_omp_body (input_stmt1);
10127 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10128 gimple_omp_set_body (input_stmt1, NULL);
10129 gimple_omp_set_body (scan_stmt1, NULL);
10130 gimple_omp_set_body (stmt, NULL);
10131
10132 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10133 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10134 gimple_omp_set_body (stmt, body);
10135 gimple_omp_set_body (input_stmt1, input_body);
10136
10137 gimple_stmt_iterator input2_gsi = gsi_none ();
10138 memset (&wi, 0, sizeof (wi));
10139 wi.val_only = true;
10140 wi.info = (void *) &input2_gsi;
10141 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10142 gcc_assert (!gsi_end_p (input2_gsi));
10143
10144 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10145 gsi = input2_gsi;
10146 gsi_next (&gsi);
10147 gimple_stmt_iterator scan2_gsi = gsi;
10148 gimple *scan_stmt2 = gsi_stmt (gsi);
10149 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10150 gimple_omp_set_body (scan_stmt2, scan_body);
10151
10152 gimple_stmt_iterator input3_gsi = gsi_none ();
10153 gimple_stmt_iterator scan3_gsi = gsi_none ();
10154 gimple_stmt_iterator input4_gsi = gsi_none ();
10155 gimple_stmt_iterator scan4_gsi = gsi_none ();
10156 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10157 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10158 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10159 if (is_for_simd)
10160 {
10161 memset (&wi, 0, sizeof (wi));
10162 wi.val_only = true;
10163 wi.info = (void *) &input3_gsi;
10164 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10165 gcc_assert (!gsi_end_p (input3_gsi));
10166
10167 input_stmt3 = gsi_stmt (input3_gsi);
10168 gsi = input3_gsi;
10169 gsi_next (&gsi);
10170 scan3_gsi = gsi;
10171 scan_stmt3 = gsi_stmt (gsi);
10172 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10173
10174 memset (&wi, 0, sizeof (wi));
10175 wi.val_only = true;
10176 wi.info = (void *) &input4_gsi;
10177 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10178 gcc_assert (!gsi_end_p (input4_gsi));
10179
10180 input_stmt4 = gsi_stmt (input4_gsi);
10181 gsi = input4_gsi;
10182 gsi_next (&gsi);
10183 scan4_gsi = gsi;
10184 scan_stmt4 = gsi_stmt (gsi);
10185 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10186
10187 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10188 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10189 }
10190
10191 tree num_threads = create_tmp_var (integer_type_node);
10192 tree thread_num = create_tmp_var (integer_type_node);
10193 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10194 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10195 gimple *g = gimple_build_call (nthreads_decl, 0);
10196 gimple_call_set_lhs (g, num_threads);
10197 gimple_seq_add_stmt (body_p, g);
10198 g = gimple_build_call (threadnum_decl, 0);
10199 gimple_call_set_lhs (g, thread_num);
10200 gimple_seq_add_stmt (body_p, g);
10201
10202 tree ivar = create_tmp_var (sizetype);
10203 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10204 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10205 tree k = create_tmp_var (unsigned_type_node);
10206 tree l = create_tmp_var (unsigned_type_node);
10207
10208 gimple_seq clist = NULL, mdlist = NULL;
10209 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10210 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10211 gimple_seq scan1_list = NULL, input2_list = NULL;
10212 gimple_seq last_list = NULL, reduc_list = NULL;
10213 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10214 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10215 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10216 {
10217 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10218 tree var = OMP_CLAUSE_DECL (c);
10219 tree new_var = lookup_decl (var, ctx);
10220 tree var3 = NULL_TREE;
10221 tree new_vard = new_var;
10222 if (omp_is_reference (var))
10223 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10224 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10225 {
10226 var3 = maybe_lookup_decl (new_vard, ctx);
10227 if (var3 == new_vard)
10228 var3 = NULL_TREE;
10229 }
10230
10231 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10232 tree rpriva = create_tmp_var (ptype);
10233 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10234 OMP_CLAUSE_DECL (nc) = rpriva;
10235 *cp1 = nc;
10236 cp1 = &OMP_CLAUSE_CHAIN (nc);
10237
10238 tree rprivb = create_tmp_var (ptype);
10239 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10240 OMP_CLAUSE_DECL (nc) = rprivb;
10241 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10242 *cp1 = nc;
10243 cp1 = &OMP_CLAUSE_CHAIN (nc);
10244
10245 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10246 if (new_vard != new_var)
10247 TREE_ADDRESSABLE (var2) = 1;
10248 gimple_add_tmp_var (var2);
10249
10250 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10251 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10252 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10253 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10254 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10255
10256 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10257 thread_num, integer_minus_one_node);
10258 x = fold_convert_loc (clause_loc, sizetype, x);
10259 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10260 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10261 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10262 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10263
10264 x = fold_convert_loc (clause_loc, sizetype, l);
10265 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10266 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10267 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10268 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10269
10270 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10271 x = fold_convert_loc (clause_loc, sizetype, x);
10272 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10273 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10274 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10275 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10276
10277 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10278 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10279 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10280 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10281
10282 tree var4 = is_for_simd ? new_var : var2;
10283 tree var5 = NULL_TREE, var6 = NULL_TREE;
10284 if (is_for_simd)
10285 {
10286 var5 = lookup_decl (var, input_simd_ctx);
10287 var6 = lookup_decl (var, scan_simd_ctx);
10288 if (new_vard != new_var)
10289 {
10290 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10291 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10292 }
10293 }
10294 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10295 {
10296 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10297 tree val = var2;
10298
10299 x = lang_hooks.decls.omp_clause_default_ctor
10300 (c, var2, build_outer_var_ref (var, ctx));
10301 if (x)
10302 gimplify_and_add (x, &clist);
10303
10304 x = build_outer_var_ref (var, ctx);
10305 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10306 x);
10307 gimplify_and_add (x, &thr01_list);
10308
10309 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10310 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10311 if (var3)
10312 {
10313 x = unshare_expr (var4);
10314 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10315 gimplify_and_add (x, &thrn1_list);
10316 x = unshare_expr (var4);
10317 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10318 gimplify_and_add (x, &thr02_list);
10319 }
10320 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10321 {
10322 /* Otherwise, assign to it the identity element. */
10323 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10324 tseq = copy_gimple_seq_and_replace_locals (tseq);
10325 if (!is_for_simd)
10326 {
10327 if (new_vard != new_var)
10328 val = build_fold_addr_expr_loc (clause_loc, val);
10329 SET_DECL_VALUE_EXPR (new_vard, val);
10330 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10331 }
10332 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10333 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10334 lower_omp (&tseq, ctx);
10335 gimple_seq_add_seq (&thrn1_list, tseq);
10336 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10337 lower_omp (&tseq, ctx);
10338 gimple_seq_add_seq (&thr02_list, tseq);
10339 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10340 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10341 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10342 if (y)
10343 SET_DECL_VALUE_EXPR (new_vard, y);
10344 else
10345 {
10346 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10347 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10348 }
10349 }
10350
10351 x = unshare_expr (var4);
10352 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10353 gimplify_and_add (x, &thrn2_list);
10354
10355 if (is_for_simd)
10356 {
10357 x = unshare_expr (rprivb_ref);
10358 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10359 gimplify_and_add (x, &scan1_list);
10360 }
10361 else
10362 {
10363 if (ctx->scan_exclusive)
10364 {
10365 x = unshare_expr (rprivb_ref);
10366 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10367 gimplify_and_add (x, &scan1_list);
10368 }
10369
10370 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10371 tseq = copy_gimple_seq_and_replace_locals (tseq);
10372 SET_DECL_VALUE_EXPR (placeholder, var2);
10373 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10374 lower_omp (&tseq, ctx);
10375 gimple_seq_add_seq (&scan1_list, tseq);
10376
10377 if (ctx->scan_inclusive)
10378 {
10379 x = unshare_expr (rprivb_ref);
10380 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10381 gimplify_and_add (x, &scan1_list);
10382 }
10383 }
10384
10385 x = unshare_expr (rpriva_ref);
10386 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10387 unshare_expr (var4));
10388 gimplify_and_add (x, &mdlist);
10389
10390 x = unshare_expr (is_for_simd ? var6 : new_var);
10391 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10392 gimplify_and_add (x, &input2_list);
10393
10394 val = rprivb_ref;
10395 if (new_vard != new_var)
10396 val = build_fold_addr_expr_loc (clause_loc, val);
10397
10398 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10399 tseq = copy_gimple_seq_and_replace_locals (tseq);
10400 SET_DECL_VALUE_EXPR (new_vard, val);
10401 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10402 if (is_for_simd)
10403 {
10404 SET_DECL_VALUE_EXPR (placeholder, var6);
10405 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10406 }
10407 else
10408 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10409 lower_omp (&tseq, ctx);
10410 if (y)
10411 SET_DECL_VALUE_EXPR (new_vard, y);
10412 else
10413 {
10414 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10415 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10416 }
10417 if (!is_for_simd)
10418 {
10419 SET_DECL_VALUE_EXPR (placeholder, new_var);
10420 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10421 lower_omp (&tseq, ctx);
10422 }
10423 gimple_seq_add_seq (&input2_list, tseq);
10424
10425 x = build_outer_var_ref (var, ctx);
10426 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10427 gimplify_and_add (x, &last_list);
10428
10429 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10430 gimplify_and_add (x, &reduc_list);
10431 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10432 tseq = copy_gimple_seq_and_replace_locals (tseq);
10433 val = rprival_ref;
10434 if (new_vard != new_var)
10435 val = build_fold_addr_expr_loc (clause_loc, val);
10436 SET_DECL_VALUE_EXPR (new_vard, val);
10437 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10438 SET_DECL_VALUE_EXPR (placeholder, var2);
10439 lower_omp (&tseq, ctx);
10440 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10441 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10442 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10443 if (y)
10444 SET_DECL_VALUE_EXPR (new_vard, y);
10445 else
10446 {
10447 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10448 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10449 }
10450 gimple_seq_add_seq (&reduc_list, tseq);
10451 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10452 gimplify_and_add (x, &reduc_list);
10453
10454 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10455 if (x)
10456 gimplify_and_add (x, dlist);
10457 }
10458 else
10459 {
10460 x = build_outer_var_ref (var, ctx);
10461 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10462
10463 x = omp_reduction_init (c, TREE_TYPE (new_var));
10464 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10465 &thrn1_list);
10466 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10467
10468 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10469
10470 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10471 if (code == MINUS_EXPR)
10472 code = PLUS_EXPR;
10473
10474 if (is_for_simd)
10475 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10476 else
10477 {
10478 if (ctx->scan_exclusive)
10479 gimplify_assign (unshare_expr (rprivb_ref), var2,
10480 &scan1_list);
10481 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10482 gimplify_assign (var2, x, &scan1_list);
10483 if (ctx->scan_inclusive)
10484 gimplify_assign (unshare_expr (rprivb_ref), var2,
10485 &scan1_list);
10486 }
10487
10488 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10489 &mdlist);
10490
10491 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10492 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10493
10494 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10495 &last_list);
10496
10497 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10498 unshare_expr (rprival_ref));
10499 gimplify_assign (rprival_ref, x, &reduc_list);
10500 }
10501 }
10502
10503 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10504 gimple_seq_add_stmt (&scan1_list, g);
10505 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10506 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10507 ? scan_stmt4 : scan_stmt2), g);
10508
10509 tree controlb = create_tmp_var (boolean_type_node);
10510 tree controlp = create_tmp_var (ptr_type_node);
10511 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10512 OMP_CLAUSE_DECL (nc) = controlb;
10513 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10514 *cp1 = nc;
10515 cp1 = &OMP_CLAUSE_CHAIN (nc);
10516 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10517 OMP_CLAUSE_DECL (nc) = controlp;
10518 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10519 *cp1 = nc;
10520 cp1 = &OMP_CLAUSE_CHAIN (nc);
10521 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10522 OMP_CLAUSE_DECL (nc) = controlb;
10523 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10524 *cp2 = nc;
10525 cp2 = &OMP_CLAUSE_CHAIN (nc);
10526 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10527 OMP_CLAUSE_DECL (nc) = controlp;
10528 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10529 *cp2 = nc;
10530 cp2 = &OMP_CLAUSE_CHAIN (nc);
10531
10532 *cp1 = gimple_omp_for_clauses (stmt);
10533 gimple_omp_for_set_clauses (stmt, new_clauses1);
10534 *cp2 = gimple_omp_for_clauses (new_stmt);
10535 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10536
10537 if (is_for_simd)
10538 {
10539 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10540 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10541
10542 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10543 GSI_SAME_STMT);
10544 gsi_remove (&input3_gsi, true);
10545 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10546 GSI_SAME_STMT);
10547 gsi_remove (&scan3_gsi, true);
10548 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10549 GSI_SAME_STMT);
10550 gsi_remove (&input4_gsi, true);
10551 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10552 GSI_SAME_STMT);
10553 gsi_remove (&scan4_gsi, true);
10554 }
10555 else
10556 {
10557 gimple_omp_set_body (scan_stmt1, scan1_list);
10558 gimple_omp_set_body (input_stmt2, input2_list);
10559 }
10560
10561 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10562 GSI_SAME_STMT);
10563 gsi_remove (&input1_gsi, true);
10564 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10565 GSI_SAME_STMT);
10566 gsi_remove (&scan1_gsi, true);
10567 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10568 GSI_SAME_STMT);
10569 gsi_remove (&input2_gsi, true);
10570 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10571 GSI_SAME_STMT);
10572 gsi_remove (&scan2_gsi, true);
10573
10574 gimple_seq_add_seq (body_p, clist);
10575
10576 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10577 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10578 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10579 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10580 gimple_seq_add_stmt (body_p, g);
10581 g = gimple_build_label (lab1);
10582 gimple_seq_add_stmt (body_p, g);
10583 gimple_seq_add_seq (body_p, thr01_list);
10584 g = gimple_build_goto (lab3);
10585 gimple_seq_add_stmt (body_p, g);
10586 g = gimple_build_label (lab2);
10587 gimple_seq_add_stmt (body_p, g);
10588 gimple_seq_add_seq (body_p, thrn1_list);
10589 g = gimple_build_label (lab3);
10590 gimple_seq_add_stmt (body_p, g);
10591
10592 g = gimple_build_assign (ivar, size_zero_node);
10593 gimple_seq_add_stmt (body_p, g);
10594
10595 gimple_seq_add_stmt (body_p, stmt);
10596 gimple_seq_add_seq (body_p, body);
10597 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10598 fd->loop.v));
10599
10600 g = gimple_build_omp_return (true);
10601 gimple_seq_add_stmt (body_p, g);
10602 gimple_seq_add_seq (body_p, mdlist);
10603
10604 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10605 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10606 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10607 gimple_seq_add_stmt (body_p, g);
10608 g = gimple_build_label (lab1);
10609 gimple_seq_add_stmt (body_p, g);
10610
10611 g = omp_build_barrier (NULL);
10612 gimple_seq_add_stmt (body_p, g);
10613
10614 tree down = create_tmp_var (unsigned_type_node);
10615 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10616 gimple_seq_add_stmt (body_p, g);
10617
10618 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10619 gimple_seq_add_stmt (body_p, g);
10620
10621 tree num_threadsu = create_tmp_var (unsigned_type_node);
10622 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10623 gimple_seq_add_stmt (body_p, g);
10624
10625 tree thread_numu = create_tmp_var (unsigned_type_node);
10626 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10627 gimple_seq_add_stmt (body_p, g);
10628
10629 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10630 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10631 build_int_cst (unsigned_type_node, 1));
10632 gimple_seq_add_stmt (body_p, g);
10633
10634 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10635 g = gimple_build_label (lab3);
10636 gimple_seq_add_stmt (body_p, g);
10637
10638 tree twok = create_tmp_var (unsigned_type_node);
10639 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10640 gimple_seq_add_stmt (body_p, g);
10641
10642 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10643 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10644 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10645 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10646 gimple_seq_add_stmt (body_p, g);
10647 g = gimple_build_label (lab4);
10648 gimple_seq_add_stmt (body_p, g);
10649 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10650 gimple_seq_add_stmt (body_p, g);
10651 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10652 gimple_seq_add_stmt (body_p, g);
10653
10654 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10655 gimple_seq_add_stmt (body_p, g);
10656 g = gimple_build_label (lab6);
10657 gimple_seq_add_stmt (body_p, g);
10658
10659 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10660 gimple_seq_add_stmt (body_p, g);
10661
10662 g = gimple_build_label (lab5);
10663 gimple_seq_add_stmt (body_p, g);
10664
10665 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10666 gimple_seq_add_stmt (body_p, g);
10667
10668 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10669 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10670 gimple_call_set_lhs (g, cplx);
10671 gimple_seq_add_stmt (body_p, g);
10672 tree mul = create_tmp_var (unsigned_type_node);
10673 g = gimple_build_assign (mul, REALPART_EXPR,
10674 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10675 gimple_seq_add_stmt (body_p, g);
10676 tree ovf = create_tmp_var (unsigned_type_node);
10677 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10678 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10679 gimple_seq_add_stmt (body_p, g);
10680
10681 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10682 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10683 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10684 lab7, lab8);
10685 gimple_seq_add_stmt (body_p, g);
10686 g = gimple_build_label (lab7);
10687 gimple_seq_add_stmt (body_p, g);
10688
10689 tree andv = create_tmp_var (unsigned_type_node);
10690 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10691 gimple_seq_add_stmt (body_p, g);
10692 tree andvm1 = create_tmp_var (unsigned_type_node);
10693 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10694 build_minus_one_cst (unsigned_type_node));
10695 gimple_seq_add_stmt (body_p, g);
10696
10697 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10698 gimple_seq_add_stmt (body_p, g);
10699
10700 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10701 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10702 gimple_seq_add_stmt (body_p, g);
10703 g = gimple_build_label (lab9);
10704 gimple_seq_add_stmt (body_p, g);
10705 gimple_seq_add_seq (body_p, reduc_list);
10706 g = gimple_build_label (lab8);
10707 gimple_seq_add_stmt (body_p, g);
10708
10709 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10710 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10711 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10712 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10713 lab10, lab11);
10714 gimple_seq_add_stmt (body_p, g);
10715 g = gimple_build_label (lab10);
10716 gimple_seq_add_stmt (body_p, g);
10717 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10718 gimple_seq_add_stmt (body_p, g);
10719 g = gimple_build_goto (lab12);
10720 gimple_seq_add_stmt (body_p, g);
10721 g = gimple_build_label (lab11);
10722 gimple_seq_add_stmt (body_p, g);
10723 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10724 gimple_seq_add_stmt (body_p, g);
10725 g = gimple_build_label (lab12);
10726 gimple_seq_add_stmt (body_p, g);
10727
10728 g = omp_build_barrier (NULL);
10729 gimple_seq_add_stmt (body_p, g);
10730
10731 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10732 lab3, lab2);
10733 gimple_seq_add_stmt (body_p, g);
10734
10735 g = gimple_build_label (lab2);
10736 gimple_seq_add_stmt (body_p, g);
10737
10738 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10739 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10740 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10741 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10742 gimple_seq_add_stmt (body_p, g);
10743 g = gimple_build_label (lab1);
10744 gimple_seq_add_stmt (body_p, g);
10745 gimple_seq_add_seq (body_p, thr02_list);
10746 g = gimple_build_goto (lab3);
10747 gimple_seq_add_stmt (body_p, g);
10748 g = gimple_build_label (lab2);
10749 gimple_seq_add_stmt (body_p, g);
10750 gimple_seq_add_seq (body_p, thrn2_list);
10751 g = gimple_build_label (lab3);
10752 gimple_seq_add_stmt (body_p, g);
10753
10754 g = gimple_build_assign (ivar, size_zero_node);
10755 gimple_seq_add_stmt (body_p, g);
10756 gimple_seq_add_stmt (body_p, new_stmt);
10757 gimple_seq_add_seq (body_p, new_body);
10758
10759 gimple_seq new_dlist = NULL;
10760 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10761 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10762 tree num_threadsm1 = create_tmp_var (integer_type_node);
10763 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10764 integer_minus_one_node);
10765 gimple_seq_add_stmt (&new_dlist, g);
10766 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10767 gimple_seq_add_stmt (&new_dlist, g);
10768 g = gimple_build_label (lab1);
10769 gimple_seq_add_stmt (&new_dlist, g);
10770 gimple_seq_add_seq (&new_dlist, last_list);
10771 g = gimple_build_label (lab2);
10772 gimple_seq_add_stmt (&new_dlist, g);
10773 gimple_seq_add_seq (&new_dlist, *dlist);
10774 *dlist = new_dlist;
10775 }
10776
10777 /* Lower code for an OMP loop directive. */
10778
10779 static void
10780 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10781 {
10782 tree *rhs_p, block;
10783 struct omp_for_data fd, *fdp = NULL;
10784 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10785 gbind *new_stmt;
10786 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10787 gimple_seq cnt_list = NULL, clist = NULL;
10788 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10789 size_t i;
10790
10791 push_gimplify_context ();
10792
10793 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10794
10795 block = make_node (BLOCK);
10796 new_stmt = gimple_build_bind (NULL, NULL, block);
10797 /* Replace at gsi right away, so that 'stmt' is no member
10798 of a sequence anymore as we're going to add to a different
10799 one below. */
10800 gsi_replace (gsi_p, new_stmt, true);
10801
10802 /* Move declaration of temporaries in the loop body before we make
10803 it go away. */
10804 omp_for_body = gimple_omp_body (stmt);
10805 if (!gimple_seq_empty_p (omp_for_body)
10806 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10807 {
10808 gbind *inner_bind
10809 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10810 tree vars = gimple_bind_vars (inner_bind);
10811 gimple_bind_append_vars (new_stmt, vars);
10812 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10813 keep them on the inner_bind and it's block. */
10814 gimple_bind_set_vars (inner_bind, NULL_TREE);
10815 if (gimple_bind_block (inner_bind))
10816 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10817 }
10818
10819 if (gimple_omp_for_combined_into_p (stmt))
10820 {
10821 omp_extract_for_data (stmt, &fd, NULL);
10822 fdp = &fd;
10823
10824 /* We need two temporaries with fd.loop.v type (istart/iend)
10825 and then (fd.collapse - 1) temporaries with the same
10826 type for count2 ... countN-1 vars if not constant. */
10827 size_t count = 2;
10828 tree type = fd.iter_type;
10829 if (fd.collapse > 1
10830 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10831 count += fd.collapse - 1;
10832 size_t count2 = 0;
10833 tree type2 = NULL_TREE;
10834 bool taskreg_for
10835 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10836 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10837 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10838 tree simtc = NULL;
10839 tree clauses = *pc;
10840 if (fd.collapse > 1
10841 && fd.non_rect
10842 && fd.last_nonrect == fd.first_nonrect + 1
10843 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10844 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
10845 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10846 {
10847 v = gimple_omp_for_index (stmt, fd.first_nonrect);
10848 type2 = TREE_TYPE (v);
10849 count++;
10850 count2 = 3;
10851 }
10852 if (taskreg_for)
10853 outerc
10854 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10855 OMP_CLAUSE__LOOPTEMP_);
10856 if (ctx->simt_stmt)
10857 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10858 OMP_CLAUSE__LOOPTEMP_);
10859 for (i = 0; i < count + count2; i++)
10860 {
10861 tree temp;
10862 if (taskreg_for)
10863 {
10864 gcc_assert (outerc);
10865 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10866 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10867 OMP_CLAUSE__LOOPTEMP_);
10868 }
10869 else
10870 {
10871 /* If there are 2 adjacent SIMD stmts, one with _simt_
10872 clause, another without, make sure they have the same
10873 decls in _looptemp_ clauses, because the outer stmt
10874 they are combined into will look up just one inner_stmt. */
10875 if (ctx->simt_stmt)
10876 temp = OMP_CLAUSE_DECL (simtc);
10877 else
10878 temp = create_tmp_var (i >= count ? type2 : type);
10879 insert_decl_map (&ctx->outer->cb, temp, temp);
10880 }
10881 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10882 OMP_CLAUSE_DECL (*pc) = temp;
10883 pc = &OMP_CLAUSE_CHAIN (*pc);
10884 if (ctx->simt_stmt)
10885 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10886 OMP_CLAUSE__LOOPTEMP_);
10887 }
10888 *pc = clauses;
10889 }
10890
10891 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10892 dlist = NULL;
10893 body = NULL;
10894 tree rclauses
10895 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10896 OMP_CLAUSE_REDUCTION);
10897 tree rtmp = NULL_TREE;
10898 if (rclauses)
10899 {
10900 tree type = build_pointer_type (pointer_sized_int_node);
10901 tree temp = create_tmp_var (type);
10902 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10903 OMP_CLAUSE_DECL (c) = temp;
10904 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10905 gimple_omp_for_set_clauses (stmt, c);
10906 lower_omp_task_reductions (ctx, OMP_FOR,
10907 gimple_omp_for_clauses (stmt),
10908 &tred_ilist, &tred_dlist);
10909 rclauses = c;
10910 rtmp = make_ssa_name (type);
10911 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10912 }
10913
10914 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10915 ctx);
10916
10917 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10918 fdp);
10919 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10920 gimple_omp_for_pre_body (stmt));
10921
10922 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10923
10924 /* Lower the header expressions. At this point, we can assume that
10925 the header is of the form:
10926
10927 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10928
10929 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10930 using the .omp_data_s mapping, if needed. */
10931 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10932 {
10933 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10934 if (TREE_CODE (*rhs_p) == TREE_VEC)
10935 {
10936 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10937 TREE_VEC_ELT (*rhs_p, 1)
10938 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10939 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10940 TREE_VEC_ELT (*rhs_p, 2)
10941 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10942 }
10943 else if (!is_gimple_min_invariant (*rhs_p))
10944 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10945 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10946 recompute_tree_invariant_for_addr_expr (*rhs_p);
10947
10948 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10949 if (TREE_CODE (*rhs_p) == TREE_VEC)
10950 {
10951 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10952 TREE_VEC_ELT (*rhs_p, 1)
10953 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10954 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10955 TREE_VEC_ELT (*rhs_p, 2)
10956 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10957 }
10958 else if (!is_gimple_min_invariant (*rhs_p))
10959 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10960 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10961 recompute_tree_invariant_for_addr_expr (*rhs_p);
10962
10963 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10964 if (!is_gimple_min_invariant (*rhs_p))
10965 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10966 }
10967 if (rclauses)
10968 gimple_seq_add_seq (&tred_ilist, cnt_list);
10969 else
10970 gimple_seq_add_seq (&body, cnt_list);
10971
10972 /* Once lowered, extract the bounds and clauses. */
10973 omp_extract_for_data (stmt, &fd, NULL);
10974
10975 if (is_gimple_omp_oacc (ctx->stmt)
10976 && !ctx_in_oacc_kernels_region (ctx))
10977 lower_oacc_head_tail (gimple_location (stmt),
10978 gimple_omp_for_clauses (stmt),
10979 &oacc_head, &oacc_tail, ctx);
10980
10981 /* Add OpenACC partitioning and reduction markers just before the loop. */
10982 if (oacc_head)
10983 gimple_seq_add_seq (&body, oacc_head);
10984
10985 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10986
10987 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10988 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10989 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10990 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10991 {
10992 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10993 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10994 OMP_CLAUSE_LINEAR_STEP (c)
10995 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10996 ctx);
10997 }
10998
10999 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11000 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11001 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11002 else
11003 {
11004 gimple_seq_add_stmt (&body, stmt);
11005 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11006 }
11007
11008 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11009 fd.loop.v));
11010
11011 /* After the loop, add exit clauses. */
11012 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11013
11014 if (clist)
11015 {
11016 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11017 gcall *g = gimple_build_call (fndecl, 0);
11018 gimple_seq_add_stmt (&body, g);
11019 gimple_seq_add_seq (&body, clist);
11020 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11021 g = gimple_build_call (fndecl, 0);
11022 gimple_seq_add_stmt (&body, g);
11023 }
11024
11025 if (ctx->cancellable)
11026 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11027
11028 gimple_seq_add_seq (&body, dlist);
11029
11030 if (rclauses)
11031 {
11032 gimple_seq_add_seq (&tred_ilist, body);
11033 body = tred_ilist;
11034 }
11035
11036 body = maybe_catch_exception (body);
11037
11038 /* Region exit marker goes at the end of the loop body. */
11039 gimple *g = gimple_build_omp_return (fd.have_nowait);
11040 gimple_seq_add_stmt (&body, g);
11041
11042 gimple_seq_add_seq (&body, tred_dlist);
11043
11044 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11045
11046 if (rclauses)
11047 OMP_CLAUSE_DECL (rclauses) = rtmp;
11048
11049 /* Add OpenACC joining and reduction markers just after the loop. */
11050 if (oacc_tail)
11051 gimple_seq_add_seq (&body, oacc_tail);
11052
11053 pop_gimplify_context (new_stmt);
11054
11055 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11056 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11057 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11058 if (BLOCK_VARS (block))
11059 TREE_USED (block) = 1;
11060
11061 gimple_bind_set_body (new_stmt, body);
11062 gimple_omp_set_body (stmt, NULL);
11063 gimple_omp_for_set_pre_body (stmt, NULL);
11064 }
11065
11066 /* Callback for walk_stmts. Check if the current statement only contains
11067 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11068
11069 static tree
11070 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11071 bool *handled_ops_p,
11072 struct walk_stmt_info *wi)
11073 {
11074 int *info = (int *) wi->info;
11075 gimple *stmt = gsi_stmt (*gsi_p);
11076
11077 *handled_ops_p = true;
11078 switch (gimple_code (stmt))
11079 {
11080 WALK_SUBSTMTS;
11081
11082 case GIMPLE_DEBUG:
11083 break;
11084 case GIMPLE_OMP_FOR:
11085 case GIMPLE_OMP_SECTIONS:
11086 *info = *info == 0 ? 1 : -1;
11087 break;
11088 default:
11089 *info = -1;
11090 break;
11091 }
11092 return NULL;
11093 }
11094
11095 struct omp_taskcopy_context
11096 {
11097 /* This field must be at the beginning, as we do "inheritance": Some
11098 callback functions for tree-inline.c (e.g., omp_copy_decl)
11099 receive a copy_body_data pointer that is up-casted to an
11100 omp_context pointer. */
11101 copy_body_data cb;
11102 omp_context *ctx;
11103 };
11104
11105 static tree
11106 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11107 {
11108 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11109
11110 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11111 return create_tmp_var (TREE_TYPE (var));
11112
11113 return var;
11114 }
11115
11116 static tree
11117 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11118 {
11119 tree name, new_fields = NULL, type, f;
11120
11121 type = lang_hooks.types.make_type (RECORD_TYPE);
11122 name = DECL_NAME (TYPE_NAME (orig_type));
11123 name = build_decl (gimple_location (tcctx->ctx->stmt),
11124 TYPE_DECL, name, type);
11125 TYPE_NAME (type) = name;
11126
11127 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11128 {
11129 tree new_f = copy_node (f);
11130 DECL_CONTEXT (new_f) = type;
11131 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11132 TREE_CHAIN (new_f) = new_fields;
11133 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11134 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11135 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11136 &tcctx->cb, NULL);
11137 new_fields = new_f;
11138 tcctx->cb.decl_map->put (f, new_f);
11139 }
11140 TYPE_FIELDS (type) = nreverse (new_fields);
11141 layout_type (type);
11142 return type;
11143 }
11144
11145 /* Create task copyfn. */
11146
11147 static void
11148 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11149 {
11150 struct function *child_cfun;
11151 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11152 tree record_type, srecord_type, bind, list;
11153 bool record_needs_remap = false, srecord_needs_remap = false;
11154 splay_tree_node n;
11155 struct omp_taskcopy_context tcctx;
11156 location_t loc = gimple_location (task_stmt);
11157 size_t looptempno = 0;
11158
11159 child_fn = gimple_omp_task_copy_fn (task_stmt);
11160 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11161 gcc_assert (child_cfun->cfg == NULL);
11162 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11163
11164 /* Reset DECL_CONTEXT on function arguments. */
11165 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11166 DECL_CONTEXT (t) = child_fn;
11167
11168 /* Populate the function. */
11169 push_gimplify_context ();
11170 push_cfun (child_cfun);
11171
11172 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11173 TREE_SIDE_EFFECTS (bind) = 1;
11174 list = NULL;
11175 DECL_SAVED_TREE (child_fn) = bind;
11176 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11177
11178 /* Remap src and dst argument types if needed. */
11179 record_type = ctx->record_type;
11180 srecord_type = ctx->srecord_type;
11181 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11182 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11183 {
11184 record_needs_remap = true;
11185 break;
11186 }
11187 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11188 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11189 {
11190 srecord_needs_remap = true;
11191 break;
11192 }
11193
11194 if (record_needs_remap || srecord_needs_remap)
11195 {
11196 memset (&tcctx, '\0', sizeof (tcctx));
11197 tcctx.cb.src_fn = ctx->cb.src_fn;
11198 tcctx.cb.dst_fn = child_fn;
11199 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11200 gcc_checking_assert (tcctx.cb.src_node);
11201 tcctx.cb.dst_node = tcctx.cb.src_node;
11202 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11203 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11204 tcctx.cb.eh_lp_nr = 0;
11205 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11206 tcctx.cb.decl_map = new hash_map<tree, tree>;
11207 tcctx.ctx = ctx;
11208
11209 if (record_needs_remap)
11210 record_type = task_copyfn_remap_type (&tcctx, record_type);
11211 if (srecord_needs_remap)
11212 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11213 }
11214 else
11215 tcctx.cb.decl_map = NULL;
11216
11217 arg = DECL_ARGUMENTS (child_fn);
11218 TREE_TYPE (arg) = build_pointer_type (record_type);
11219 sarg = DECL_CHAIN (arg);
11220 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11221
11222 /* First pass: initialize temporaries used in record_type and srecord_type
11223 sizes and field offsets. */
11224 if (tcctx.cb.decl_map)
11225 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11226 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11227 {
11228 tree *p;
11229
11230 decl = OMP_CLAUSE_DECL (c);
11231 p = tcctx.cb.decl_map->get (decl);
11232 if (p == NULL)
11233 continue;
11234 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11235 sf = (tree) n->value;
11236 sf = *tcctx.cb.decl_map->get (sf);
11237 src = build_simple_mem_ref_loc (loc, sarg);
11238 src = omp_build_component_ref (src, sf);
11239 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11240 append_to_statement_list (t, &list);
11241 }
11242
11243 /* Second pass: copy shared var pointers and copy construct non-VLA
11244 firstprivate vars. */
11245 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11246 switch (OMP_CLAUSE_CODE (c))
11247 {
11248 splay_tree_key key;
11249 case OMP_CLAUSE_SHARED:
11250 decl = OMP_CLAUSE_DECL (c);
11251 key = (splay_tree_key) decl;
11252 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11253 key = (splay_tree_key) &DECL_UID (decl);
11254 n = splay_tree_lookup (ctx->field_map, key);
11255 if (n == NULL)
11256 break;
11257 f = (tree) n->value;
11258 if (tcctx.cb.decl_map)
11259 f = *tcctx.cb.decl_map->get (f);
11260 n = splay_tree_lookup (ctx->sfield_map, key);
11261 sf = (tree) n->value;
11262 if (tcctx.cb.decl_map)
11263 sf = *tcctx.cb.decl_map->get (sf);
11264 src = build_simple_mem_ref_loc (loc, sarg);
11265 src = omp_build_component_ref (src, sf);
11266 dst = build_simple_mem_ref_loc (loc, arg);
11267 dst = omp_build_component_ref (dst, f);
11268 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11269 append_to_statement_list (t, &list);
11270 break;
11271 case OMP_CLAUSE_REDUCTION:
11272 case OMP_CLAUSE_IN_REDUCTION:
11273 decl = OMP_CLAUSE_DECL (c);
11274 if (TREE_CODE (decl) == MEM_REF)
11275 {
11276 decl = TREE_OPERAND (decl, 0);
11277 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11278 decl = TREE_OPERAND (decl, 0);
11279 if (TREE_CODE (decl) == INDIRECT_REF
11280 || TREE_CODE (decl) == ADDR_EXPR)
11281 decl = TREE_OPERAND (decl, 0);
11282 }
11283 key = (splay_tree_key) decl;
11284 n = splay_tree_lookup (ctx->field_map, key);
11285 if (n == NULL)
11286 break;
11287 f = (tree) n->value;
11288 if (tcctx.cb.decl_map)
11289 f = *tcctx.cb.decl_map->get (f);
11290 n = splay_tree_lookup (ctx->sfield_map, key);
11291 sf = (tree) n->value;
11292 if (tcctx.cb.decl_map)
11293 sf = *tcctx.cb.decl_map->get (sf);
11294 src = build_simple_mem_ref_loc (loc, sarg);
11295 src = omp_build_component_ref (src, sf);
11296 if (decl != OMP_CLAUSE_DECL (c)
11297 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11298 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11299 src = build_simple_mem_ref_loc (loc, src);
11300 dst = build_simple_mem_ref_loc (loc, arg);
11301 dst = omp_build_component_ref (dst, f);
11302 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11303 append_to_statement_list (t, &list);
11304 break;
11305 case OMP_CLAUSE__LOOPTEMP_:
11306 /* Fields for first two _looptemp_ clauses are initialized by
11307 GOMP_taskloop*, the rest are handled like firstprivate. */
11308 if (looptempno < 2)
11309 {
11310 looptempno++;
11311 break;
11312 }
11313 /* FALLTHRU */
11314 case OMP_CLAUSE__REDUCTEMP_:
11315 case OMP_CLAUSE_FIRSTPRIVATE:
11316 decl = OMP_CLAUSE_DECL (c);
11317 if (is_variable_sized (decl))
11318 break;
11319 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11320 if (n == NULL)
11321 break;
11322 f = (tree) n->value;
11323 if (tcctx.cb.decl_map)
11324 f = *tcctx.cb.decl_map->get (f);
11325 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11326 if (n != NULL)
11327 {
11328 sf = (tree) n->value;
11329 if (tcctx.cb.decl_map)
11330 sf = *tcctx.cb.decl_map->get (sf);
11331 src = build_simple_mem_ref_loc (loc, sarg);
11332 src = omp_build_component_ref (src, sf);
11333 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11334 src = build_simple_mem_ref_loc (loc, src);
11335 }
11336 else
11337 src = decl;
11338 dst = build_simple_mem_ref_loc (loc, arg);
11339 dst = omp_build_component_ref (dst, f);
11340 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11341 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11342 else
11343 {
11344 if (ctx->allocate_map)
11345 if (tree *allocatorp = ctx->allocate_map->get (decl))
11346 {
11347 tree allocator = *allocatorp;
11348 if (TREE_CODE (allocator) != INTEGER_CST)
11349 {
11350 n = splay_tree_lookup (ctx->sfield_map,
11351 (splay_tree_key) allocator);
11352 allocator = (tree) n->value;
11353 if (tcctx.cb.decl_map)
11354 allocator = *tcctx.cb.decl_map->get (allocator);
11355 tree a = build_simple_mem_ref_loc (loc, sarg);
11356 allocator = omp_build_component_ref (a, allocator);
11357 }
11358 allocator = fold_convert (pointer_sized_int_node, allocator);
11359 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
11360 tree align = build_int_cst (size_type_node,
11361 DECL_ALIGN_UNIT (decl));
11362 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
11363 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
11364 allocator);
11365 ptr = fold_convert (TREE_TYPE (dst), ptr);
11366 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
11367 append_to_statement_list (t, &list);
11368 dst = build_simple_mem_ref_loc (loc, dst);
11369 }
11370 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11371 }
11372 append_to_statement_list (t, &list);
11373 break;
11374 case OMP_CLAUSE_PRIVATE:
11375 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11376 break;
11377 decl = OMP_CLAUSE_DECL (c);
11378 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11379 f = (tree) n->value;
11380 if (tcctx.cb.decl_map)
11381 f = *tcctx.cb.decl_map->get (f);
11382 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11383 if (n != NULL)
11384 {
11385 sf = (tree) n->value;
11386 if (tcctx.cb.decl_map)
11387 sf = *tcctx.cb.decl_map->get (sf);
11388 src = build_simple_mem_ref_loc (loc, sarg);
11389 src = omp_build_component_ref (src, sf);
11390 if (use_pointer_for_field (decl, NULL))
11391 src = build_simple_mem_ref_loc (loc, src);
11392 }
11393 else
11394 src = decl;
11395 dst = build_simple_mem_ref_loc (loc, arg);
11396 dst = omp_build_component_ref (dst, f);
11397 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11398 append_to_statement_list (t, &list);
11399 break;
11400 default:
11401 break;
11402 }
11403
11404 /* Last pass: handle VLA firstprivates. */
11405 if (tcctx.cb.decl_map)
11406 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11407 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11408 {
11409 tree ind, ptr, df;
11410
11411 decl = OMP_CLAUSE_DECL (c);
11412 if (!is_variable_sized (decl))
11413 continue;
11414 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11415 if (n == NULL)
11416 continue;
11417 f = (tree) n->value;
11418 f = *tcctx.cb.decl_map->get (f);
11419 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11420 ind = DECL_VALUE_EXPR (decl);
11421 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11422 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11423 n = splay_tree_lookup (ctx->sfield_map,
11424 (splay_tree_key) TREE_OPERAND (ind, 0));
11425 sf = (tree) n->value;
11426 sf = *tcctx.cb.decl_map->get (sf);
11427 src = build_simple_mem_ref_loc (loc, sarg);
11428 src = omp_build_component_ref (src, sf);
11429 src = build_simple_mem_ref_loc (loc, src);
11430 dst = build_simple_mem_ref_loc (loc, arg);
11431 dst = omp_build_component_ref (dst, f);
11432 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11433 append_to_statement_list (t, &list);
11434 n = splay_tree_lookup (ctx->field_map,
11435 (splay_tree_key) TREE_OPERAND (ind, 0));
11436 df = (tree) n->value;
11437 df = *tcctx.cb.decl_map->get (df);
11438 ptr = build_simple_mem_ref_loc (loc, arg);
11439 ptr = omp_build_component_ref (ptr, df);
11440 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11441 build_fold_addr_expr_loc (loc, dst));
11442 append_to_statement_list (t, &list);
11443 }
11444
11445 t = build1 (RETURN_EXPR, void_type_node, NULL);
11446 append_to_statement_list (t, &list);
11447
11448 if (tcctx.cb.decl_map)
11449 delete tcctx.cb.decl_map;
11450 pop_gimplify_context (NULL);
11451 BIND_EXPR_BODY (bind) = list;
11452 pop_cfun ();
11453 }
11454
11455 static void
11456 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11457 {
11458 tree c, clauses;
11459 gimple *g;
11460 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11461
11462 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11463 gcc_assert (clauses);
11464 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11465 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11466 switch (OMP_CLAUSE_DEPEND_KIND (c))
11467 {
11468 case OMP_CLAUSE_DEPEND_LAST:
11469 /* Lowering already done at gimplification. */
11470 return;
11471 case OMP_CLAUSE_DEPEND_IN:
11472 cnt[2]++;
11473 break;
11474 case OMP_CLAUSE_DEPEND_OUT:
11475 case OMP_CLAUSE_DEPEND_INOUT:
11476 cnt[0]++;
11477 break;
11478 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11479 cnt[1]++;
11480 break;
11481 case OMP_CLAUSE_DEPEND_DEPOBJ:
11482 cnt[3]++;
11483 break;
11484 case OMP_CLAUSE_DEPEND_SOURCE:
11485 case OMP_CLAUSE_DEPEND_SINK:
11486 /* FALLTHRU */
11487 default:
11488 gcc_unreachable ();
11489 }
11490 if (cnt[1] || cnt[3])
11491 idx = 5;
11492 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11493 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11494 tree array = create_tmp_var (type);
11495 TREE_ADDRESSABLE (array) = 1;
11496 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11497 NULL_TREE);
11498 if (idx == 5)
11499 {
11500 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11501 gimple_seq_add_stmt (iseq, g);
11502 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11503 NULL_TREE);
11504 }
11505 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11506 gimple_seq_add_stmt (iseq, g);
11507 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11508 {
11509 r = build4 (ARRAY_REF, ptr_type_node, array,
11510 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11511 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11512 gimple_seq_add_stmt (iseq, g);
11513 }
11514 for (i = 0; i < 4; i++)
11515 {
11516 if (cnt[i] == 0)
11517 continue;
11518 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11519 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11520 continue;
11521 else
11522 {
11523 switch (OMP_CLAUSE_DEPEND_KIND (c))
11524 {
11525 case OMP_CLAUSE_DEPEND_IN:
11526 if (i != 2)
11527 continue;
11528 break;
11529 case OMP_CLAUSE_DEPEND_OUT:
11530 case OMP_CLAUSE_DEPEND_INOUT:
11531 if (i != 0)
11532 continue;
11533 break;
11534 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11535 if (i != 1)
11536 continue;
11537 break;
11538 case OMP_CLAUSE_DEPEND_DEPOBJ:
11539 if (i != 3)
11540 continue;
11541 break;
11542 default:
11543 gcc_unreachable ();
11544 }
11545 tree t = OMP_CLAUSE_DECL (c);
11546 t = fold_convert (ptr_type_node, t);
11547 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11548 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11549 NULL_TREE, NULL_TREE);
11550 g = gimple_build_assign (r, t);
11551 gimple_seq_add_stmt (iseq, g);
11552 }
11553 }
11554 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11555 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11556 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11557 OMP_CLAUSE_CHAIN (c) = *pclauses;
11558 *pclauses = c;
11559 tree clobber = build_clobber (type);
11560 g = gimple_build_assign (array, clobber);
11561 gimple_seq_add_stmt (oseq, g);
11562 }
11563
11564 /* Lower the OpenMP parallel or task directive in the current statement
11565 in GSI_P. CTX holds context information for the directive. */
11566
11567 static void
11568 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11569 {
11570 tree clauses;
11571 tree child_fn, t;
11572 gimple *stmt = gsi_stmt (*gsi_p);
11573 gbind *par_bind, *bind, *dep_bind = NULL;
11574 gimple_seq par_body;
11575 location_t loc = gimple_location (stmt);
11576
11577 clauses = gimple_omp_taskreg_clauses (stmt);
11578 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11579 && gimple_omp_task_taskwait_p (stmt))
11580 {
11581 par_bind = NULL;
11582 par_body = NULL;
11583 }
11584 else
11585 {
11586 par_bind
11587 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11588 par_body = gimple_bind_body (par_bind);
11589 }
11590 child_fn = ctx->cb.dst_fn;
11591 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11592 && !gimple_omp_parallel_combined_p (stmt))
11593 {
11594 struct walk_stmt_info wi;
11595 int ws_num = 0;
11596
11597 memset (&wi, 0, sizeof (wi));
11598 wi.info = &ws_num;
11599 wi.val_only = true;
11600 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11601 if (ws_num == 1)
11602 gimple_omp_parallel_set_combined_p (stmt, true);
11603 }
11604 gimple_seq dep_ilist = NULL;
11605 gimple_seq dep_olist = NULL;
11606 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11607 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11608 {
11609 push_gimplify_context ();
11610 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11611 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11612 &dep_ilist, &dep_olist);
11613 }
11614
11615 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11616 && gimple_omp_task_taskwait_p (stmt))
11617 {
11618 if (dep_bind)
11619 {
11620 gsi_replace (gsi_p, dep_bind, true);
11621 gimple_bind_add_seq (dep_bind, dep_ilist);
11622 gimple_bind_add_stmt (dep_bind, stmt);
11623 gimple_bind_add_seq (dep_bind, dep_olist);
11624 pop_gimplify_context (dep_bind);
11625 }
11626 return;
11627 }
11628
11629 if (ctx->srecord_type)
11630 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11631
11632 gimple_seq tskred_ilist = NULL;
11633 gimple_seq tskred_olist = NULL;
11634 if ((is_task_ctx (ctx)
11635 && gimple_omp_task_taskloop_p (ctx->stmt)
11636 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11637 OMP_CLAUSE_REDUCTION))
11638 || (is_parallel_ctx (ctx)
11639 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11640 OMP_CLAUSE__REDUCTEMP_)))
11641 {
11642 if (dep_bind == NULL)
11643 {
11644 push_gimplify_context ();
11645 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11646 }
11647 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11648 : OMP_PARALLEL,
11649 gimple_omp_taskreg_clauses (ctx->stmt),
11650 &tskred_ilist, &tskred_olist);
11651 }
11652
11653 push_gimplify_context ();
11654
11655 gimple_seq par_olist = NULL;
11656 gimple_seq par_ilist = NULL;
11657 gimple_seq par_rlist = NULL;
11658 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11659 lower_omp (&par_body, ctx);
11660 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11661 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11662
11663 /* Declare all the variables created by mapping and the variables
11664 declared in the scope of the parallel body. */
11665 record_vars_into (ctx->block_vars, child_fn);
11666 maybe_remove_omp_member_access_dummy_vars (par_bind);
11667 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11668
11669 if (ctx->record_type)
11670 {
11671 ctx->sender_decl
11672 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11673 : ctx->record_type, ".omp_data_o");
11674 DECL_NAMELESS (ctx->sender_decl) = 1;
11675 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11676 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11677 }
11678
11679 gimple_seq olist = NULL;
11680 gimple_seq ilist = NULL;
11681 lower_send_clauses (clauses, &ilist, &olist, ctx);
11682 lower_send_shared_vars (&ilist, &olist, ctx);
11683
11684 if (ctx->record_type)
11685 {
11686 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11687 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11688 clobber));
11689 }
11690
11691 /* Once all the expansions are done, sequence all the different
11692 fragments inside gimple_omp_body. */
11693
11694 gimple_seq new_body = NULL;
11695
11696 if (ctx->record_type)
11697 {
11698 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11699 /* fixup_child_record_type might have changed receiver_decl's type. */
11700 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11701 gimple_seq_add_stmt (&new_body,
11702 gimple_build_assign (ctx->receiver_decl, t));
11703 }
11704
11705 gimple_seq_add_seq (&new_body, par_ilist);
11706 gimple_seq_add_seq (&new_body, par_body);
11707 gimple_seq_add_seq (&new_body, par_rlist);
11708 if (ctx->cancellable)
11709 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11710 gimple_seq_add_seq (&new_body, par_olist);
11711 new_body = maybe_catch_exception (new_body);
11712 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11713 gimple_seq_add_stmt (&new_body,
11714 gimple_build_omp_continue (integer_zero_node,
11715 integer_zero_node));
11716 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11717 gimple_omp_set_body (stmt, new_body);
11718
11719 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11720 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11721 else
11722 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11723 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11724 gimple_bind_add_seq (bind, ilist);
11725 gimple_bind_add_stmt (bind, stmt);
11726 gimple_bind_add_seq (bind, olist);
11727
11728 pop_gimplify_context (NULL);
11729
11730 if (dep_bind)
11731 {
11732 gimple_bind_add_seq (dep_bind, dep_ilist);
11733 gimple_bind_add_seq (dep_bind, tskred_ilist);
11734 gimple_bind_add_stmt (dep_bind, bind);
11735 gimple_bind_add_seq (dep_bind, tskred_olist);
11736 gimple_bind_add_seq (dep_bind, dep_olist);
11737 pop_gimplify_context (dep_bind);
11738 }
11739 }
11740
11741 /* Lower the GIMPLE_OMP_TARGET in the current statement
11742 in GSI_P. CTX holds context information for the directive. */
11743
11744 static void
11745 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11746 {
11747 tree clauses;
11748 tree child_fn, t, c;
11749 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11750 gbind *tgt_bind, *bind, *dep_bind = NULL;
11751 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11752 location_t loc = gimple_location (stmt);
11753 bool offloaded, data_region;
11754 unsigned int map_cnt = 0;
11755
11756 offloaded = is_gimple_omp_offloaded (stmt);
11757 switch (gimple_omp_target_kind (stmt))
11758 {
11759 case GF_OMP_TARGET_KIND_REGION:
11760 case GF_OMP_TARGET_KIND_UPDATE:
11761 case GF_OMP_TARGET_KIND_ENTER_DATA:
11762 case GF_OMP_TARGET_KIND_EXIT_DATA:
11763 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11764 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11765 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11766 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11767 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11768 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11769 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
11770 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
11771 data_region = false;
11772 break;
11773 case GF_OMP_TARGET_KIND_DATA:
11774 case GF_OMP_TARGET_KIND_OACC_DATA:
11775 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11776 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
11777 data_region = true;
11778 break;
11779 default:
11780 gcc_unreachable ();
11781 }
11782
11783 clauses = gimple_omp_target_clauses (stmt);
11784
11785 gimple_seq dep_ilist = NULL;
11786 gimple_seq dep_olist = NULL;
11787 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11788 {
11789 push_gimplify_context ();
11790 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11791 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11792 &dep_ilist, &dep_olist);
11793 }
11794
11795 tgt_bind = NULL;
11796 tgt_body = NULL;
11797 if (offloaded)
11798 {
11799 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11800 tgt_body = gimple_bind_body (tgt_bind);
11801 }
11802 else if (data_region)
11803 tgt_body = gimple_omp_body (stmt);
11804 child_fn = ctx->cb.dst_fn;
11805
11806 push_gimplify_context ();
11807 fplist = NULL;
11808
11809 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11810 switch (OMP_CLAUSE_CODE (c))
11811 {
11812 tree var, x;
11813
11814 default:
11815 break;
11816 case OMP_CLAUSE_MAP:
11817 #if CHECKING_P
11818 /* First check what we're prepared to handle in the following. */
11819 switch (OMP_CLAUSE_MAP_KIND (c))
11820 {
11821 case GOMP_MAP_ALLOC:
11822 case GOMP_MAP_TO:
11823 case GOMP_MAP_FROM:
11824 case GOMP_MAP_TOFROM:
11825 case GOMP_MAP_POINTER:
11826 case GOMP_MAP_TO_PSET:
11827 case GOMP_MAP_DELETE:
11828 case GOMP_MAP_RELEASE:
11829 case GOMP_MAP_ALWAYS_TO:
11830 case GOMP_MAP_ALWAYS_FROM:
11831 case GOMP_MAP_ALWAYS_TOFROM:
11832 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11833 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11834 case GOMP_MAP_STRUCT:
11835 case GOMP_MAP_ALWAYS_POINTER:
11836 case GOMP_MAP_ATTACH:
11837 case GOMP_MAP_DETACH:
11838 break;
11839 case GOMP_MAP_IF_PRESENT:
11840 case GOMP_MAP_FORCE_ALLOC:
11841 case GOMP_MAP_FORCE_TO:
11842 case GOMP_MAP_FORCE_FROM:
11843 case GOMP_MAP_FORCE_TOFROM:
11844 case GOMP_MAP_FORCE_PRESENT:
11845 case GOMP_MAP_FORCE_DEVICEPTR:
11846 case GOMP_MAP_DEVICE_RESIDENT:
11847 case GOMP_MAP_LINK:
11848 case GOMP_MAP_FORCE_DETACH:
11849 gcc_assert (is_gimple_omp_oacc (stmt));
11850 break;
11851 default:
11852 gcc_unreachable ();
11853 }
11854 #endif
11855 /* FALLTHRU */
11856 case OMP_CLAUSE_TO:
11857 case OMP_CLAUSE_FROM:
11858 oacc_firstprivate:
11859 var = OMP_CLAUSE_DECL (c);
11860 if (!DECL_P (var))
11861 {
11862 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11863 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11864 && (OMP_CLAUSE_MAP_KIND (c)
11865 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11866 map_cnt++;
11867 continue;
11868 }
11869
11870 if (DECL_SIZE (var)
11871 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11872 {
11873 tree var2 = DECL_VALUE_EXPR (var);
11874 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11875 var2 = TREE_OPERAND (var2, 0);
11876 gcc_assert (DECL_P (var2));
11877 var = var2;
11878 }
11879
11880 if (offloaded
11881 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11882 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11883 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11884 {
11885 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11886 {
11887 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11888 && varpool_node::get_create (var)->offloadable)
11889 continue;
11890
11891 tree type = build_pointer_type (TREE_TYPE (var));
11892 tree new_var = lookup_decl (var, ctx);
11893 x = create_tmp_var_raw (type, get_name (new_var));
11894 gimple_add_tmp_var (x);
11895 x = build_simple_mem_ref (x);
11896 SET_DECL_VALUE_EXPR (new_var, x);
11897 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11898 }
11899 continue;
11900 }
11901
11902 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11903 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11904 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
11905 && is_omp_target (stmt))
11906 {
11907 gcc_assert (maybe_lookup_field (c, ctx));
11908 map_cnt++;
11909 continue;
11910 }
11911
11912 if (!maybe_lookup_field (var, ctx))
11913 continue;
11914
11915 /* Don't remap compute constructs' reduction variables, because the
11916 intermediate result must be local to each gang. */
11917 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11918 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11919 {
11920 x = build_receiver_ref (var, true, ctx);
11921 tree new_var = lookup_decl (var, ctx);
11922
11923 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11924 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11925 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11926 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11927 x = build_simple_mem_ref (x);
11928 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11929 {
11930 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11931 if (omp_is_reference (new_var)
11932 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11933 || DECL_BY_REFERENCE (var)))
11934 {
11935 /* Create a local object to hold the instance
11936 value. */
11937 tree type = TREE_TYPE (TREE_TYPE (new_var));
11938 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11939 tree inst = create_tmp_var (type, id);
11940 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11941 x = build_fold_addr_expr (inst);
11942 }
11943 gimplify_assign (new_var, x, &fplist);
11944 }
11945 else if (DECL_P (new_var))
11946 {
11947 SET_DECL_VALUE_EXPR (new_var, x);
11948 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11949 }
11950 else
11951 gcc_unreachable ();
11952 }
11953 map_cnt++;
11954 break;
11955
11956 case OMP_CLAUSE_FIRSTPRIVATE:
11957 gcc_checking_assert (offloaded);
11958 if (is_gimple_omp_oacc (ctx->stmt))
11959 {
11960 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
11961 gcc_checking_assert (!is_oacc_kernels (ctx));
11962 /* Likewise, on OpenACC 'kernels' decomposed parts. */
11963 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
11964
11965 goto oacc_firstprivate;
11966 }
11967 map_cnt++;
11968 var = OMP_CLAUSE_DECL (c);
11969 if (!omp_is_reference (var)
11970 && !is_gimple_reg_type (TREE_TYPE (var)))
11971 {
11972 tree new_var = lookup_decl (var, ctx);
11973 if (is_variable_sized (var))
11974 {
11975 tree pvar = DECL_VALUE_EXPR (var);
11976 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11977 pvar = TREE_OPERAND (pvar, 0);
11978 gcc_assert (DECL_P (pvar));
11979 tree new_pvar = lookup_decl (pvar, ctx);
11980 x = build_fold_indirect_ref (new_pvar);
11981 TREE_THIS_NOTRAP (x) = 1;
11982 }
11983 else
11984 x = build_receiver_ref (var, true, ctx);
11985 SET_DECL_VALUE_EXPR (new_var, x);
11986 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11987 }
11988 break;
11989
11990 case OMP_CLAUSE_PRIVATE:
11991 gcc_checking_assert (offloaded);
11992 if (is_gimple_omp_oacc (ctx->stmt))
11993 {
11994 /* No 'private' clauses on OpenACC 'kernels'. */
11995 gcc_checking_assert (!is_oacc_kernels (ctx));
11996 /* Likewise, on OpenACC 'kernels' decomposed parts. */
11997 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
11998
11999 break;
12000 }
12001 var = OMP_CLAUSE_DECL (c);
12002 if (is_variable_sized (var))
12003 {
12004 tree new_var = lookup_decl (var, ctx);
12005 tree pvar = DECL_VALUE_EXPR (var);
12006 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12007 pvar = TREE_OPERAND (pvar, 0);
12008 gcc_assert (DECL_P (pvar));
12009 tree new_pvar = lookup_decl (pvar, ctx);
12010 x = build_fold_indirect_ref (new_pvar);
12011 TREE_THIS_NOTRAP (x) = 1;
12012 SET_DECL_VALUE_EXPR (new_var, x);
12013 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12014 }
12015 break;
12016
12017 case OMP_CLAUSE_USE_DEVICE_PTR:
12018 case OMP_CLAUSE_USE_DEVICE_ADDR:
12019 case OMP_CLAUSE_IS_DEVICE_PTR:
12020 var = OMP_CLAUSE_DECL (c);
12021 map_cnt++;
12022 if (is_variable_sized (var))
12023 {
12024 tree new_var = lookup_decl (var, ctx);
12025 tree pvar = DECL_VALUE_EXPR (var);
12026 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12027 pvar = TREE_OPERAND (pvar, 0);
12028 gcc_assert (DECL_P (pvar));
12029 tree new_pvar = lookup_decl (pvar, ctx);
12030 x = build_fold_indirect_ref (new_pvar);
12031 TREE_THIS_NOTRAP (x) = 1;
12032 SET_DECL_VALUE_EXPR (new_var, x);
12033 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12034 }
12035 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12036 && !omp_is_reference (var)
12037 && !omp_is_allocatable_or_ptr (var)
12038 && !lang_hooks.decls.omp_array_data (var, true))
12039 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12040 {
12041 tree new_var = lookup_decl (var, ctx);
12042 tree type = build_pointer_type (TREE_TYPE (var));
12043 x = create_tmp_var_raw (type, get_name (new_var));
12044 gimple_add_tmp_var (x);
12045 x = build_simple_mem_ref (x);
12046 SET_DECL_VALUE_EXPR (new_var, x);
12047 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12048 }
12049 else
12050 {
12051 tree new_var = lookup_decl (var, ctx);
12052 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12053 gimple_add_tmp_var (x);
12054 SET_DECL_VALUE_EXPR (new_var, x);
12055 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12056 }
12057 break;
12058 }
12059
12060 if (offloaded)
12061 {
12062 target_nesting_level++;
12063 lower_omp (&tgt_body, ctx);
12064 target_nesting_level--;
12065 }
12066 else if (data_region)
12067 lower_omp (&tgt_body, ctx);
12068
12069 if (offloaded)
12070 {
12071 /* Declare all the variables created by mapping and the variables
12072 declared in the scope of the target body. */
12073 record_vars_into (ctx->block_vars, child_fn);
12074 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12075 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12076 }
12077
12078 olist = NULL;
12079 ilist = NULL;
12080 if (ctx->record_type)
12081 {
12082 ctx->sender_decl
12083 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12084 DECL_NAMELESS (ctx->sender_decl) = 1;
12085 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12086 t = make_tree_vec (3);
12087 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12088 TREE_VEC_ELT (t, 1)
12089 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12090 ".omp_data_sizes");
12091 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12092 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12093 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12094 tree tkind_type = short_unsigned_type_node;
12095 int talign_shift = 8;
12096 TREE_VEC_ELT (t, 2)
12097 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12098 ".omp_data_kinds");
12099 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12100 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12101 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12102 gimple_omp_target_set_data_arg (stmt, t);
12103
12104 vec<constructor_elt, va_gc> *vsize;
12105 vec<constructor_elt, va_gc> *vkind;
12106 vec_alloc (vsize, map_cnt);
12107 vec_alloc (vkind, map_cnt);
12108 unsigned int map_idx = 0;
12109
12110 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12111 switch (OMP_CLAUSE_CODE (c))
12112 {
12113 tree ovar, nc, s, purpose, var, x, type;
12114 unsigned int talign;
12115
12116 default:
12117 break;
12118
12119 case OMP_CLAUSE_MAP:
12120 case OMP_CLAUSE_TO:
12121 case OMP_CLAUSE_FROM:
12122 oacc_firstprivate_map:
12123 nc = c;
12124 ovar = OMP_CLAUSE_DECL (c);
12125 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12126 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12127 || (OMP_CLAUSE_MAP_KIND (c)
12128 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12129 break;
12130 if (!DECL_P (ovar))
12131 {
12132 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12133 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12134 {
12135 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
12136 == get_base_address (ovar));
12137 nc = OMP_CLAUSE_CHAIN (c);
12138 ovar = OMP_CLAUSE_DECL (nc);
12139 }
12140 else
12141 {
12142 tree x = build_sender_ref (ovar, ctx);
12143 tree v
12144 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
12145 gimplify_assign (x, v, &ilist);
12146 nc = NULL_TREE;
12147 }
12148 }
12149 else
12150 {
12151 if (DECL_SIZE (ovar)
12152 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12153 {
12154 tree ovar2 = DECL_VALUE_EXPR (ovar);
12155 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12156 ovar2 = TREE_OPERAND (ovar2, 0);
12157 gcc_assert (DECL_P (ovar2));
12158 ovar = ovar2;
12159 }
12160 if (!maybe_lookup_field (ovar, ctx)
12161 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12162 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12163 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12164 continue;
12165 }
12166
12167 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12168 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12169 talign = DECL_ALIGN_UNIT (ovar);
12170
12171 if (nc
12172 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12173 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12174 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12175 && is_omp_target (stmt))
12176 {
12177 var = lookup_decl_in_outer_ctx (ovar, ctx);
12178 x = build_sender_ref (c, ctx);
12179 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
12180 }
12181 else if (nc)
12182 {
12183 var = lookup_decl_in_outer_ctx (ovar, ctx);
12184 x = build_sender_ref (ovar, ctx);
12185
12186 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12187 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12188 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12189 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
12190 {
12191 gcc_assert (offloaded);
12192 tree avar
12193 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
12194 mark_addressable (avar);
12195 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
12196 talign = DECL_ALIGN_UNIT (avar);
12197 avar = build_fold_addr_expr (avar);
12198 gimplify_assign (x, avar, &ilist);
12199 }
12200 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12201 {
12202 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12203 if (!omp_is_reference (var))
12204 {
12205 if (is_gimple_reg (var)
12206 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12207 TREE_NO_WARNING (var) = 1;
12208 var = build_fold_addr_expr (var);
12209 }
12210 else
12211 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12212 gimplify_assign (x, var, &ilist);
12213 }
12214 else if (is_gimple_reg (var))
12215 {
12216 gcc_assert (offloaded);
12217 tree avar = create_tmp_var (TREE_TYPE (var));
12218 mark_addressable (avar);
12219 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
12220 if (GOMP_MAP_COPY_TO_P (map_kind)
12221 || map_kind == GOMP_MAP_POINTER
12222 || map_kind == GOMP_MAP_TO_PSET
12223 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12224 {
12225 /* If we need to initialize a temporary
12226 with VAR because it is not addressable, and
12227 the variable hasn't been initialized yet, then
12228 we'll get a warning for the store to avar.
12229 Don't warn in that case, the mapping might
12230 be implicit. */
12231 TREE_NO_WARNING (var) = 1;
12232 gimplify_assign (avar, var, &ilist);
12233 }
12234 avar = build_fold_addr_expr (avar);
12235 gimplify_assign (x, avar, &ilist);
12236 if ((GOMP_MAP_COPY_FROM_P (map_kind)
12237 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12238 && !TYPE_READONLY (TREE_TYPE (var)))
12239 {
12240 x = unshare_expr (x);
12241 x = build_simple_mem_ref (x);
12242 gimplify_assign (var, x, &olist);
12243 }
12244 }
12245 else
12246 {
12247 /* While MAP is handled explicitly by the FE,
12248 for 'target update', only the identified is passed. */
12249 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
12250 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
12251 && (omp_is_allocatable_or_ptr (var)
12252 && omp_check_optional_argument (var, false)))
12253 var = build_fold_indirect_ref (var);
12254 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
12255 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
12256 || (!omp_is_allocatable_or_ptr (var)
12257 && !omp_check_optional_argument (var, false)))
12258 var = build_fold_addr_expr (var);
12259 gimplify_assign (x, var, &ilist);
12260 }
12261 }
12262 s = NULL_TREE;
12263 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12264 {
12265 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12266 s = TREE_TYPE (ovar);
12267 if (TREE_CODE (s) == REFERENCE_TYPE
12268 || omp_check_optional_argument (ovar, false))
12269 s = TREE_TYPE (s);
12270 s = TYPE_SIZE_UNIT (s);
12271 }
12272 else
12273 s = OMP_CLAUSE_SIZE (c);
12274 if (s == NULL_TREE)
12275 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12276 s = fold_convert (size_type_node, s);
12277 purpose = size_int (map_idx++);
12278 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12279 if (TREE_CODE (s) != INTEGER_CST)
12280 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12281
12282 unsigned HOST_WIDE_INT tkind, tkind_zero;
12283 switch (OMP_CLAUSE_CODE (c))
12284 {
12285 case OMP_CLAUSE_MAP:
12286 tkind = OMP_CLAUSE_MAP_KIND (c);
12287 tkind_zero = tkind;
12288 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
12289 switch (tkind)
12290 {
12291 case GOMP_MAP_ALLOC:
12292 case GOMP_MAP_IF_PRESENT:
12293 case GOMP_MAP_TO:
12294 case GOMP_MAP_FROM:
12295 case GOMP_MAP_TOFROM:
12296 case GOMP_MAP_ALWAYS_TO:
12297 case GOMP_MAP_ALWAYS_FROM:
12298 case GOMP_MAP_ALWAYS_TOFROM:
12299 case GOMP_MAP_RELEASE:
12300 case GOMP_MAP_FORCE_TO:
12301 case GOMP_MAP_FORCE_FROM:
12302 case GOMP_MAP_FORCE_TOFROM:
12303 case GOMP_MAP_FORCE_PRESENT:
12304 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
12305 break;
12306 case GOMP_MAP_DELETE:
12307 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
12308 default:
12309 break;
12310 }
12311 if (tkind_zero != tkind)
12312 {
12313 if (integer_zerop (s))
12314 tkind = tkind_zero;
12315 else if (integer_nonzerop (s))
12316 tkind_zero = tkind;
12317 }
12318 break;
12319 case OMP_CLAUSE_FIRSTPRIVATE:
12320 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12321 tkind = GOMP_MAP_TO;
12322 tkind_zero = tkind;
12323 break;
12324 case OMP_CLAUSE_TO:
12325 tkind = GOMP_MAP_TO;
12326 tkind_zero = tkind;
12327 break;
12328 case OMP_CLAUSE_FROM:
12329 tkind = GOMP_MAP_FROM;
12330 tkind_zero = tkind;
12331 break;
12332 default:
12333 gcc_unreachable ();
12334 }
12335 gcc_checking_assert (tkind
12336 < (HOST_WIDE_INT_C (1U) << talign_shift));
12337 gcc_checking_assert (tkind_zero
12338 < (HOST_WIDE_INT_C (1U) << talign_shift));
12339 talign = ceil_log2 (talign);
12340 tkind |= talign << talign_shift;
12341 tkind_zero |= talign << talign_shift;
12342 gcc_checking_assert (tkind
12343 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12344 gcc_checking_assert (tkind_zero
12345 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12346 if (tkind == tkind_zero)
12347 x = build_int_cstu (tkind_type, tkind);
12348 else
12349 {
12350 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12351 x = build3 (COND_EXPR, tkind_type,
12352 fold_build2 (EQ_EXPR, boolean_type_node,
12353 unshare_expr (s), size_zero_node),
12354 build_int_cstu (tkind_type, tkind_zero),
12355 build_int_cstu (tkind_type, tkind));
12356 }
12357 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12358 if (nc && nc != c)
12359 c = nc;
12360 break;
12361
12362 case OMP_CLAUSE_FIRSTPRIVATE:
12363 if (is_gimple_omp_oacc (ctx->stmt))
12364 goto oacc_firstprivate_map;
12365 ovar = OMP_CLAUSE_DECL (c);
12366 if (omp_is_reference (ovar))
12367 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12368 else
12369 talign = DECL_ALIGN_UNIT (ovar);
12370 var = lookup_decl_in_outer_ctx (ovar, ctx);
12371 x = build_sender_ref (ovar, ctx);
12372 tkind = GOMP_MAP_FIRSTPRIVATE;
12373 type = TREE_TYPE (ovar);
12374 if (omp_is_reference (ovar))
12375 type = TREE_TYPE (type);
12376 if ((INTEGRAL_TYPE_P (type)
12377 && TYPE_PRECISION (type) <= POINTER_SIZE)
12378 || TREE_CODE (type) == POINTER_TYPE)
12379 {
12380 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12381 tree t = var;
12382 if (omp_is_reference (var))
12383 t = build_simple_mem_ref (var);
12384 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12385 TREE_NO_WARNING (var) = 1;
12386 if (TREE_CODE (type) != POINTER_TYPE)
12387 t = fold_convert (pointer_sized_int_node, t);
12388 t = fold_convert (TREE_TYPE (x), t);
12389 gimplify_assign (x, t, &ilist);
12390 }
12391 else if (omp_is_reference (var))
12392 gimplify_assign (x, var, &ilist);
12393 else if (is_gimple_reg (var))
12394 {
12395 tree avar = create_tmp_var (TREE_TYPE (var));
12396 mark_addressable (avar);
12397 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12398 TREE_NO_WARNING (var) = 1;
12399 gimplify_assign (avar, var, &ilist);
12400 avar = build_fold_addr_expr (avar);
12401 gimplify_assign (x, avar, &ilist);
12402 }
12403 else
12404 {
12405 var = build_fold_addr_expr (var);
12406 gimplify_assign (x, var, &ilist);
12407 }
12408 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12409 s = size_int (0);
12410 else if (omp_is_reference (ovar))
12411 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12412 else
12413 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12414 s = fold_convert (size_type_node, s);
12415 purpose = size_int (map_idx++);
12416 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12417 if (TREE_CODE (s) != INTEGER_CST)
12418 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12419
12420 gcc_checking_assert (tkind
12421 < (HOST_WIDE_INT_C (1U) << talign_shift));
12422 talign = ceil_log2 (talign);
12423 tkind |= talign << talign_shift;
12424 gcc_checking_assert (tkind
12425 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12426 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12427 build_int_cstu (tkind_type, tkind));
12428 break;
12429
12430 case OMP_CLAUSE_USE_DEVICE_PTR:
12431 case OMP_CLAUSE_USE_DEVICE_ADDR:
12432 case OMP_CLAUSE_IS_DEVICE_PTR:
12433 ovar = OMP_CLAUSE_DECL (c);
12434 var = lookup_decl_in_outer_ctx (ovar, ctx);
12435
12436 if (lang_hooks.decls.omp_array_data (ovar, true))
12437 {
12438 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12439 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12440 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12441 }
12442 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12443 {
12444 tkind = GOMP_MAP_USE_DEVICE_PTR;
12445 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12446 }
12447 else
12448 {
12449 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12450 x = build_sender_ref (ovar, ctx);
12451 }
12452
12453 if (is_gimple_omp_oacc (ctx->stmt))
12454 {
12455 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12456
12457 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12458 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12459 }
12460
12461 type = TREE_TYPE (ovar);
12462 if (lang_hooks.decls.omp_array_data (ovar, true))
12463 var = lang_hooks.decls.omp_array_data (ovar, false);
12464 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12465 && !omp_is_reference (ovar)
12466 && !omp_is_allocatable_or_ptr (ovar))
12467 || TREE_CODE (type) == ARRAY_TYPE)
12468 var = build_fold_addr_expr (var);
12469 else
12470 {
12471 if (omp_is_reference (ovar)
12472 || omp_check_optional_argument (ovar, false)
12473 || omp_is_allocatable_or_ptr (ovar))
12474 {
12475 type = TREE_TYPE (type);
12476 if (TREE_CODE (type) != ARRAY_TYPE
12477 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12478 && !omp_is_allocatable_or_ptr (ovar))
12479 || (omp_is_reference (ovar)
12480 && omp_is_allocatable_or_ptr (ovar))))
12481 var = build_simple_mem_ref (var);
12482 var = fold_convert (TREE_TYPE (x), var);
12483 }
12484 }
12485 tree present;
12486 present = omp_check_optional_argument (ovar, true);
12487 if (present)
12488 {
12489 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12490 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12491 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12492 tree new_x = unshare_expr (x);
12493 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12494 fb_rvalue);
12495 gcond *cond = gimple_build_cond_from_tree (present,
12496 notnull_label,
12497 null_label);
12498 gimple_seq_add_stmt (&ilist, cond);
12499 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12500 gimplify_assign (new_x, null_pointer_node, &ilist);
12501 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12502 gimple_seq_add_stmt (&ilist,
12503 gimple_build_label (notnull_label));
12504 gimplify_assign (x, var, &ilist);
12505 gimple_seq_add_stmt (&ilist,
12506 gimple_build_label (opt_arg_label));
12507 }
12508 else
12509 gimplify_assign (x, var, &ilist);
12510 s = size_int (0);
12511 purpose = size_int (map_idx++);
12512 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12513 gcc_checking_assert (tkind
12514 < (HOST_WIDE_INT_C (1U) << talign_shift));
12515 gcc_checking_assert (tkind
12516 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12517 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12518 build_int_cstu (tkind_type, tkind));
12519 break;
12520 }
12521
12522 gcc_assert (map_idx == map_cnt);
12523
12524 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12525 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12526 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12527 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12528 for (int i = 1; i <= 2; i++)
12529 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12530 {
12531 gimple_seq initlist = NULL;
12532 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12533 TREE_VEC_ELT (t, i)),
12534 &initlist, true, NULL_TREE);
12535 gimple_seq_add_seq (&ilist, initlist);
12536
12537 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12538 gimple_seq_add_stmt (&olist,
12539 gimple_build_assign (TREE_VEC_ELT (t, i),
12540 clobber));
12541 }
12542
12543 tree clobber = build_clobber (ctx->record_type);
12544 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12545 clobber));
12546 }
12547
12548 /* Once all the expansions are done, sequence all the different
12549 fragments inside gimple_omp_body. */
12550
12551 new_body = NULL;
12552
12553 if (offloaded
12554 && ctx->record_type)
12555 {
12556 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12557 /* fixup_child_record_type might have changed receiver_decl's type. */
12558 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12559 gimple_seq_add_stmt (&new_body,
12560 gimple_build_assign (ctx->receiver_decl, t));
12561 }
12562 gimple_seq_add_seq (&new_body, fplist);
12563
12564 if (offloaded || data_region)
12565 {
12566 tree prev = NULL_TREE;
12567 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12568 switch (OMP_CLAUSE_CODE (c))
12569 {
12570 tree var, x;
12571 default:
12572 break;
12573 case OMP_CLAUSE_FIRSTPRIVATE:
12574 if (is_gimple_omp_oacc (ctx->stmt))
12575 break;
12576 var = OMP_CLAUSE_DECL (c);
12577 if (omp_is_reference (var)
12578 || is_gimple_reg_type (TREE_TYPE (var)))
12579 {
12580 tree new_var = lookup_decl (var, ctx);
12581 tree type;
12582 type = TREE_TYPE (var);
12583 if (omp_is_reference (var))
12584 type = TREE_TYPE (type);
12585 if ((INTEGRAL_TYPE_P (type)
12586 && TYPE_PRECISION (type) <= POINTER_SIZE)
12587 || TREE_CODE (type) == POINTER_TYPE)
12588 {
12589 x = build_receiver_ref (var, false, ctx);
12590 if (TREE_CODE (type) != POINTER_TYPE)
12591 x = fold_convert (pointer_sized_int_node, x);
12592 x = fold_convert (type, x);
12593 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12594 fb_rvalue);
12595 if (omp_is_reference (var))
12596 {
12597 tree v = create_tmp_var_raw (type, get_name (var));
12598 gimple_add_tmp_var (v);
12599 TREE_ADDRESSABLE (v) = 1;
12600 gimple_seq_add_stmt (&new_body,
12601 gimple_build_assign (v, x));
12602 x = build_fold_addr_expr (v);
12603 }
12604 gimple_seq_add_stmt (&new_body,
12605 gimple_build_assign (new_var, x));
12606 }
12607 else
12608 {
12609 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12610 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12611 fb_rvalue);
12612 gimple_seq_add_stmt (&new_body,
12613 gimple_build_assign (new_var, x));
12614 }
12615 }
12616 else if (is_variable_sized (var))
12617 {
12618 tree pvar = DECL_VALUE_EXPR (var);
12619 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12620 pvar = TREE_OPERAND (pvar, 0);
12621 gcc_assert (DECL_P (pvar));
12622 tree new_var = lookup_decl (pvar, ctx);
12623 x = build_receiver_ref (var, false, ctx);
12624 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12625 gimple_seq_add_stmt (&new_body,
12626 gimple_build_assign (new_var, x));
12627 }
12628 break;
12629 case OMP_CLAUSE_PRIVATE:
12630 if (is_gimple_omp_oacc (ctx->stmt))
12631 break;
12632 var = OMP_CLAUSE_DECL (c);
12633 if (omp_is_reference (var))
12634 {
12635 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12636 tree new_var = lookup_decl (var, ctx);
12637 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12638 if (TREE_CONSTANT (x))
12639 {
12640 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12641 get_name (var));
12642 gimple_add_tmp_var (x);
12643 TREE_ADDRESSABLE (x) = 1;
12644 x = build_fold_addr_expr_loc (clause_loc, x);
12645 }
12646 else
12647 break;
12648
12649 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12650 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12651 gimple_seq_add_stmt (&new_body,
12652 gimple_build_assign (new_var, x));
12653 }
12654 break;
12655 case OMP_CLAUSE_USE_DEVICE_PTR:
12656 case OMP_CLAUSE_USE_DEVICE_ADDR:
12657 case OMP_CLAUSE_IS_DEVICE_PTR:
12658 tree new_var;
12659 gimple_seq assign_body;
12660 bool is_array_data;
12661 bool do_optional_check;
12662 assign_body = NULL;
12663 do_optional_check = false;
12664 var = OMP_CLAUSE_DECL (c);
12665 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12666
12667 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12668 x = build_sender_ref (is_array_data
12669 ? (splay_tree_key) &DECL_NAME (var)
12670 : (splay_tree_key) &DECL_UID (var), ctx);
12671 else
12672 x = build_receiver_ref (var, false, ctx);
12673
12674 if (is_array_data)
12675 {
12676 bool is_ref = omp_is_reference (var);
12677 do_optional_check = true;
12678 /* First, we copy the descriptor data from the host; then
12679 we update its data to point to the target address. */
12680 new_var = lookup_decl (var, ctx);
12681 new_var = DECL_VALUE_EXPR (new_var);
12682 tree v = new_var;
12683
12684 if (is_ref)
12685 {
12686 var = build_fold_indirect_ref (var);
12687 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12688 fb_rvalue);
12689 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12690 gimple_add_tmp_var (v);
12691 TREE_ADDRESSABLE (v) = 1;
12692 gimple_seq_add_stmt (&assign_body,
12693 gimple_build_assign (v, var));
12694 tree rhs = build_fold_addr_expr (v);
12695 gimple_seq_add_stmt (&assign_body,
12696 gimple_build_assign (new_var, rhs));
12697 }
12698 else
12699 gimple_seq_add_stmt (&assign_body,
12700 gimple_build_assign (new_var, var));
12701
12702 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12703 gcc_assert (v2);
12704 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12705 gimple_seq_add_stmt (&assign_body,
12706 gimple_build_assign (v2, x));
12707 }
12708 else if (is_variable_sized (var))
12709 {
12710 tree pvar = DECL_VALUE_EXPR (var);
12711 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12712 pvar = TREE_OPERAND (pvar, 0);
12713 gcc_assert (DECL_P (pvar));
12714 new_var = lookup_decl (pvar, ctx);
12715 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12716 gimple_seq_add_stmt (&assign_body,
12717 gimple_build_assign (new_var, x));
12718 }
12719 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12720 && !omp_is_reference (var)
12721 && !omp_is_allocatable_or_ptr (var))
12722 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12723 {
12724 new_var = lookup_decl (var, ctx);
12725 new_var = DECL_VALUE_EXPR (new_var);
12726 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12727 new_var = TREE_OPERAND (new_var, 0);
12728 gcc_assert (DECL_P (new_var));
12729 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12730 gimple_seq_add_stmt (&assign_body,
12731 gimple_build_assign (new_var, x));
12732 }
12733 else
12734 {
12735 tree type = TREE_TYPE (var);
12736 new_var = lookup_decl (var, ctx);
12737 if (omp_is_reference (var))
12738 {
12739 type = TREE_TYPE (type);
12740 if (TREE_CODE (type) != ARRAY_TYPE
12741 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12742 || (omp_is_reference (var)
12743 && omp_is_allocatable_or_ptr (var))))
12744 {
12745 tree v = create_tmp_var_raw (type, get_name (var));
12746 gimple_add_tmp_var (v);
12747 TREE_ADDRESSABLE (v) = 1;
12748 x = fold_convert (type, x);
12749 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12750 fb_rvalue);
12751 gimple_seq_add_stmt (&assign_body,
12752 gimple_build_assign (v, x));
12753 x = build_fold_addr_expr (v);
12754 do_optional_check = true;
12755 }
12756 }
12757 new_var = DECL_VALUE_EXPR (new_var);
12758 x = fold_convert (TREE_TYPE (new_var), x);
12759 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12760 gimple_seq_add_stmt (&assign_body,
12761 gimple_build_assign (new_var, x));
12762 }
12763 tree present;
12764 present = (do_optional_check
12765 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12766 : NULL_TREE);
12767 if (present)
12768 {
12769 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12770 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12771 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12772 glabel *null_glabel = gimple_build_label (null_label);
12773 glabel *notnull_glabel = gimple_build_label (notnull_label);
12774 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12775 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12776 fb_rvalue);
12777 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12778 fb_rvalue);
12779 gcond *cond = gimple_build_cond_from_tree (present,
12780 notnull_label,
12781 null_label);
12782 gimple_seq_add_stmt (&new_body, cond);
12783 gimple_seq_add_stmt (&new_body, null_glabel);
12784 gimplify_assign (new_var, null_pointer_node, &new_body);
12785 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12786 gimple_seq_add_stmt (&new_body, notnull_glabel);
12787 gimple_seq_add_seq (&new_body, assign_body);
12788 gimple_seq_add_stmt (&new_body,
12789 gimple_build_label (opt_arg_label));
12790 }
12791 else
12792 gimple_seq_add_seq (&new_body, assign_body);
12793 break;
12794 }
12795 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12796 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12797 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12798 or references to VLAs. */
12799 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12800 switch (OMP_CLAUSE_CODE (c))
12801 {
12802 tree var;
12803 default:
12804 break;
12805 case OMP_CLAUSE_MAP:
12806 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12807 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12808 {
12809 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12810 poly_int64 offset = 0;
12811 gcc_assert (prev);
12812 var = OMP_CLAUSE_DECL (c);
12813 if (DECL_P (var)
12814 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12815 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12816 ctx))
12817 && varpool_node::get_create (var)->offloadable)
12818 break;
12819 if (TREE_CODE (var) == INDIRECT_REF
12820 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12821 var = TREE_OPERAND (var, 0);
12822 if (TREE_CODE (var) == COMPONENT_REF)
12823 {
12824 var = get_addr_base_and_unit_offset (var, &offset);
12825 gcc_assert (var != NULL_TREE && DECL_P (var));
12826 }
12827 else if (DECL_SIZE (var)
12828 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12829 {
12830 tree var2 = DECL_VALUE_EXPR (var);
12831 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12832 var2 = TREE_OPERAND (var2, 0);
12833 gcc_assert (DECL_P (var2));
12834 var = var2;
12835 }
12836 tree new_var = lookup_decl (var, ctx), x;
12837 tree type = TREE_TYPE (new_var);
12838 bool is_ref;
12839 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12840 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12841 == COMPONENT_REF))
12842 {
12843 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12844 is_ref = true;
12845 new_var = build2 (MEM_REF, type,
12846 build_fold_addr_expr (new_var),
12847 build_int_cst (build_pointer_type (type),
12848 offset));
12849 }
12850 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12851 {
12852 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12853 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12854 new_var = build2 (MEM_REF, type,
12855 build_fold_addr_expr (new_var),
12856 build_int_cst (build_pointer_type (type),
12857 offset));
12858 }
12859 else
12860 is_ref = omp_is_reference (var);
12861 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12862 is_ref = false;
12863 bool ref_to_array = false;
12864 if (is_ref)
12865 {
12866 type = TREE_TYPE (type);
12867 if (TREE_CODE (type) == ARRAY_TYPE)
12868 {
12869 type = build_pointer_type (type);
12870 ref_to_array = true;
12871 }
12872 }
12873 else if (TREE_CODE (type) == ARRAY_TYPE)
12874 {
12875 tree decl2 = DECL_VALUE_EXPR (new_var);
12876 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12877 decl2 = TREE_OPERAND (decl2, 0);
12878 gcc_assert (DECL_P (decl2));
12879 new_var = decl2;
12880 type = TREE_TYPE (new_var);
12881 }
12882 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12883 x = fold_convert_loc (clause_loc, type, x);
12884 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12885 {
12886 tree bias = OMP_CLAUSE_SIZE (c);
12887 if (DECL_P (bias))
12888 bias = lookup_decl (bias, ctx);
12889 bias = fold_convert_loc (clause_loc, sizetype, bias);
12890 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12891 bias);
12892 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12893 TREE_TYPE (x), x, bias);
12894 }
12895 if (ref_to_array)
12896 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12897 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12898 if (is_ref && !ref_to_array)
12899 {
12900 tree t = create_tmp_var_raw (type, get_name (var));
12901 gimple_add_tmp_var (t);
12902 TREE_ADDRESSABLE (t) = 1;
12903 gimple_seq_add_stmt (&new_body,
12904 gimple_build_assign (t, x));
12905 x = build_fold_addr_expr_loc (clause_loc, t);
12906 }
12907 gimple_seq_add_stmt (&new_body,
12908 gimple_build_assign (new_var, x));
12909 prev = NULL_TREE;
12910 }
12911 else if (OMP_CLAUSE_CHAIN (c)
12912 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12913 == OMP_CLAUSE_MAP
12914 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12915 == GOMP_MAP_FIRSTPRIVATE_POINTER
12916 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12917 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12918 prev = c;
12919 break;
12920 case OMP_CLAUSE_PRIVATE:
12921 var = OMP_CLAUSE_DECL (c);
12922 if (is_variable_sized (var))
12923 {
12924 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12925 tree new_var = lookup_decl (var, ctx);
12926 tree pvar = DECL_VALUE_EXPR (var);
12927 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12928 pvar = TREE_OPERAND (pvar, 0);
12929 gcc_assert (DECL_P (pvar));
12930 tree new_pvar = lookup_decl (pvar, ctx);
12931 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12932 tree al = size_int (DECL_ALIGN (var));
12933 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12934 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12935 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12936 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12937 gimple_seq_add_stmt (&new_body,
12938 gimple_build_assign (new_pvar, x));
12939 }
12940 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12941 {
12942 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12943 tree new_var = lookup_decl (var, ctx);
12944 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12945 if (TREE_CONSTANT (x))
12946 break;
12947 else
12948 {
12949 tree atmp
12950 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12951 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12952 tree al = size_int (TYPE_ALIGN (rtype));
12953 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12954 }
12955
12956 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12957 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12958 gimple_seq_add_stmt (&new_body,
12959 gimple_build_assign (new_var, x));
12960 }
12961 break;
12962 }
12963
12964 gimple_seq fork_seq = NULL;
12965 gimple_seq join_seq = NULL;
12966
12967 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
12968 {
12969 /* If there are reductions on the offloaded region itself, treat
12970 them as a dummy GANG loop. */
12971 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12972
12973 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12974 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12975 }
12976
12977 gimple_seq_add_seq (&new_body, fork_seq);
12978 gimple_seq_add_seq (&new_body, tgt_body);
12979 gimple_seq_add_seq (&new_body, join_seq);
12980
12981 if (offloaded)
12982 {
12983 new_body = maybe_catch_exception (new_body);
12984 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12985 }
12986 gimple_omp_set_body (stmt, new_body);
12987 }
12988
12989 bind = gimple_build_bind (NULL, NULL,
12990 tgt_bind ? gimple_bind_block (tgt_bind)
12991 : NULL_TREE);
12992 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12993 gimple_bind_add_seq (bind, ilist);
12994 gimple_bind_add_stmt (bind, stmt);
12995 gimple_bind_add_seq (bind, olist);
12996
12997 pop_gimplify_context (NULL);
12998
12999 if (dep_bind)
13000 {
13001 gimple_bind_add_seq (dep_bind, dep_ilist);
13002 gimple_bind_add_stmt (dep_bind, bind);
13003 gimple_bind_add_seq (dep_bind, dep_olist);
13004 pop_gimplify_context (dep_bind);
13005 }
13006 }
13007
13008 /* Expand code for an OpenMP teams directive. */
13009
13010 static void
13011 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13012 {
13013 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13014 push_gimplify_context ();
13015
13016 tree block = make_node (BLOCK);
13017 gbind *bind = gimple_build_bind (NULL, NULL, block);
13018 gsi_replace (gsi_p, bind, true);
13019 gimple_seq bind_body = NULL;
13020 gimple_seq dlist = NULL;
13021 gimple_seq olist = NULL;
13022
13023 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13024 OMP_CLAUSE_NUM_TEAMS);
13025 if (num_teams == NULL_TREE)
13026 num_teams = build_int_cst (unsigned_type_node, 0);
13027 else
13028 {
13029 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13030 num_teams = fold_convert (unsigned_type_node, num_teams);
13031 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13032 }
13033 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13034 OMP_CLAUSE_THREAD_LIMIT);
13035 if (thread_limit == NULL_TREE)
13036 thread_limit = build_int_cst (unsigned_type_node, 0);
13037 else
13038 {
13039 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13040 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13041 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13042 fb_rvalue);
13043 }
13044
13045 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13046 &bind_body, &dlist, ctx, NULL);
13047 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13048 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13049 NULL, ctx);
13050 gimple_seq_add_stmt (&bind_body, teams_stmt);
13051
13052 location_t loc = gimple_location (teams_stmt);
13053 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13054 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13055 gimple_set_location (call, loc);
13056 gimple_seq_add_stmt (&bind_body, call);
13057
13058 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13059 gimple_omp_set_body (teams_stmt, NULL);
13060 gimple_seq_add_seq (&bind_body, olist);
13061 gimple_seq_add_seq (&bind_body, dlist);
13062 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13063 gimple_bind_set_body (bind, bind_body);
13064
13065 pop_gimplify_context (bind);
13066
13067 gimple_bind_append_vars (bind, ctx->block_vars);
13068 BLOCK_VARS (block) = ctx->block_vars;
13069 if (BLOCK_VARS (block))
13070 TREE_USED (block) = 1;
13071 }
13072
13073 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13074 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13075 of OMP context, but with task_shared_vars set. */
13076
13077 static tree
13078 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13079 void *data)
13080 {
13081 tree t = *tp;
13082
13083 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13084 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
13085 return t;
13086
13087 if (task_shared_vars
13088 && DECL_P (t)
13089 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13090 return t;
13091
13092 /* If a global variable has been privatized, TREE_CONSTANT on
13093 ADDR_EXPR might be wrong. */
13094 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13095 recompute_tree_invariant_for_addr_expr (t);
13096
13097 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13098 return NULL_TREE;
13099 }
13100
13101 /* Data to be communicated between lower_omp_regimplify_operands and
13102 lower_omp_regimplify_operands_p. */
13103
13104 struct lower_omp_regimplify_operands_data
13105 {
13106 omp_context *ctx;
13107 vec<tree> *decls;
13108 };
13109
13110 /* Helper function for lower_omp_regimplify_operands. Find
13111 omp_member_access_dummy_var vars and adjust temporarily their
13112 DECL_VALUE_EXPRs if needed. */
13113
13114 static tree
13115 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13116 void *data)
13117 {
13118 tree t = omp_member_access_dummy_var (*tp);
13119 if (t)
13120 {
13121 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13122 lower_omp_regimplify_operands_data *ldata
13123 = (lower_omp_regimplify_operands_data *) wi->info;
13124 tree o = maybe_lookup_decl (t, ldata->ctx);
13125 if (o != t)
13126 {
13127 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13128 ldata->decls->safe_push (*tp);
13129 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13130 SET_DECL_VALUE_EXPR (*tp, v);
13131 }
13132 }
13133 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13134 return NULL_TREE;
13135 }
13136
13137 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13138 of omp_member_access_dummy_var vars during regimplification. */
13139
13140 static void
13141 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13142 gimple_stmt_iterator *gsi_p)
13143 {
13144 auto_vec<tree, 10> decls;
13145 if (ctx)
13146 {
13147 struct walk_stmt_info wi;
13148 memset (&wi, '\0', sizeof (wi));
13149 struct lower_omp_regimplify_operands_data data;
13150 data.ctx = ctx;
13151 data.decls = &decls;
13152 wi.info = &data;
13153 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
13154 }
13155 gimple_regimplify_operands (stmt, gsi_p);
13156 while (!decls.is_empty ())
13157 {
13158 tree t = decls.pop ();
13159 tree v = decls.pop ();
13160 SET_DECL_VALUE_EXPR (t, v);
13161 }
13162 }
13163
13164 static void
13165 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13166 {
13167 gimple *stmt = gsi_stmt (*gsi_p);
13168 struct walk_stmt_info wi;
13169 gcall *call_stmt;
13170
13171 if (gimple_has_location (stmt))
13172 input_location = gimple_location (stmt);
13173
13174 if (task_shared_vars)
13175 memset (&wi, '\0', sizeof (wi));
13176
13177 /* If we have issued syntax errors, avoid doing any heavy lifting.
13178 Just replace the OMP directives with a NOP to avoid
13179 confusing RTL expansion. */
13180 if (seen_error () && is_gimple_omp (stmt))
13181 {
13182 gsi_replace (gsi_p, gimple_build_nop (), true);
13183 return;
13184 }
13185
13186 switch (gimple_code (stmt))
13187 {
13188 case GIMPLE_COND:
13189 {
13190 gcond *cond_stmt = as_a <gcond *> (stmt);
13191 if ((ctx || task_shared_vars)
13192 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
13193 lower_omp_regimplify_p,
13194 ctx ? NULL : &wi, NULL)
13195 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
13196 lower_omp_regimplify_p,
13197 ctx ? NULL : &wi, NULL)))
13198 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
13199 }
13200 break;
13201 case GIMPLE_CATCH:
13202 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
13203 break;
13204 case GIMPLE_EH_FILTER:
13205 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
13206 break;
13207 case GIMPLE_TRY:
13208 lower_omp (gimple_try_eval_ptr (stmt), ctx);
13209 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
13210 break;
13211 case GIMPLE_TRANSACTION:
13212 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
13213 ctx);
13214 break;
13215 case GIMPLE_BIND:
13216 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
13217 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
13218 break;
13219 case GIMPLE_OMP_PARALLEL:
13220 case GIMPLE_OMP_TASK:
13221 ctx = maybe_lookup_ctx (stmt);
13222 gcc_assert (ctx);
13223 if (ctx->cancellable)
13224 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13225 lower_omp_taskreg (gsi_p, ctx);
13226 break;
13227 case GIMPLE_OMP_FOR:
13228 ctx = maybe_lookup_ctx (stmt);
13229 gcc_assert (ctx);
13230 if (ctx->cancellable)
13231 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13232 lower_omp_for (gsi_p, ctx);
13233 break;
13234 case GIMPLE_OMP_SECTIONS:
13235 ctx = maybe_lookup_ctx (stmt);
13236 gcc_assert (ctx);
13237 if (ctx->cancellable)
13238 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13239 lower_omp_sections (gsi_p, ctx);
13240 break;
13241 case GIMPLE_OMP_SINGLE:
13242 ctx = maybe_lookup_ctx (stmt);
13243 gcc_assert (ctx);
13244 lower_omp_single (gsi_p, ctx);
13245 break;
13246 case GIMPLE_OMP_MASTER:
13247 ctx = maybe_lookup_ctx (stmt);
13248 gcc_assert (ctx);
13249 lower_omp_master (gsi_p, ctx);
13250 break;
13251 case GIMPLE_OMP_TASKGROUP:
13252 ctx = maybe_lookup_ctx (stmt);
13253 gcc_assert (ctx);
13254 lower_omp_taskgroup (gsi_p, ctx);
13255 break;
13256 case GIMPLE_OMP_ORDERED:
13257 ctx = maybe_lookup_ctx (stmt);
13258 gcc_assert (ctx);
13259 lower_omp_ordered (gsi_p, ctx);
13260 break;
13261 case GIMPLE_OMP_SCAN:
13262 ctx = maybe_lookup_ctx (stmt);
13263 gcc_assert (ctx);
13264 lower_omp_scan (gsi_p, ctx);
13265 break;
13266 case GIMPLE_OMP_CRITICAL:
13267 ctx = maybe_lookup_ctx (stmt);
13268 gcc_assert (ctx);
13269 lower_omp_critical (gsi_p, ctx);
13270 break;
13271 case GIMPLE_OMP_ATOMIC_LOAD:
13272 if ((ctx || task_shared_vars)
13273 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13274 as_a <gomp_atomic_load *> (stmt)),
13275 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
13276 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13277 break;
13278 case GIMPLE_OMP_TARGET:
13279 ctx = maybe_lookup_ctx (stmt);
13280 gcc_assert (ctx);
13281 lower_omp_target (gsi_p, ctx);
13282 break;
13283 case GIMPLE_OMP_TEAMS:
13284 ctx = maybe_lookup_ctx (stmt);
13285 gcc_assert (ctx);
13286 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
13287 lower_omp_taskreg (gsi_p, ctx);
13288 else
13289 lower_omp_teams (gsi_p, ctx);
13290 break;
13291 case GIMPLE_CALL:
13292 tree fndecl;
13293 call_stmt = as_a <gcall *> (stmt);
13294 fndecl = gimple_call_fndecl (call_stmt);
13295 if (fndecl
13296 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13297 switch (DECL_FUNCTION_CODE (fndecl))
13298 {
13299 case BUILT_IN_GOMP_BARRIER:
13300 if (ctx == NULL)
13301 break;
13302 /* FALLTHRU */
13303 case BUILT_IN_GOMP_CANCEL:
13304 case BUILT_IN_GOMP_CANCELLATION_POINT:
13305 omp_context *cctx;
13306 cctx = ctx;
13307 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
13308 cctx = cctx->outer;
13309 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
13310 if (!cctx->cancellable)
13311 {
13312 if (DECL_FUNCTION_CODE (fndecl)
13313 == BUILT_IN_GOMP_CANCELLATION_POINT)
13314 {
13315 stmt = gimple_build_nop ();
13316 gsi_replace (gsi_p, stmt, false);
13317 }
13318 break;
13319 }
13320 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
13321 {
13322 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13323 gimple_call_set_fndecl (call_stmt, fndecl);
13324 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13325 }
13326 tree lhs;
13327 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13328 gimple_call_set_lhs (call_stmt, lhs);
13329 tree fallthru_label;
13330 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13331 gimple *g;
13332 g = gimple_build_label (fallthru_label);
13333 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13334 g = gimple_build_cond (NE_EXPR, lhs,
13335 fold_convert (TREE_TYPE (lhs),
13336 boolean_false_node),
13337 cctx->cancel_label, fallthru_label);
13338 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13339 break;
13340 default:
13341 break;
13342 }
13343 goto regimplify;
13344
13345 case GIMPLE_ASSIGN:
13346 for (omp_context *up = ctx; up; up = up->outer)
13347 {
13348 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13349 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13350 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13351 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13352 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13353 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13354 && (gimple_omp_target_kind (up->stmt)
13355 == GF_OMP_TARGET_KIND_DATA)))
13356 continue;
13357 else if (!up->lastprivate_conditional_map)
13358 break;
13359 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13360 if (TREE_CODE (lhs) == MEM_REF
13361 && DECL_P (TREE_OPERAND (lhs, 0))
13362 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13363 0))) == REFERENCE_TYPE)
13364 lhs = TREE_OPERAND (lhs, 0);
13365 if (DECL_P (lhs))
13366 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13367 {
13368 tree clauses;
13369 if (up->combined_into_simd_safelen1)
13370 {
13371 up = up->outer;
13372 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13373 up = up->outer;
13374 }
13375 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13376 clauses = gimple_omp_for_clauses (up->stmt);
13377 else
13378 clauses = gimple_omp_sections_clauses (up->stmt);
13379 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13380 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13381 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13382 OMP_CLAUSE__CONDTEMP_);
13383 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13384 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13385 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13386 }
13387 }
13388 /* FALLTHRU */
13389
13390 default:
13391 regimplify:
13392 if ((ctx || task_shared_vars)
13393 && walk_gimple_op (stmt, lower_omp_regimplify_p,
13394 ctx ? NULL : &wi))
13395 {
13396 /* Just remove clobbers, this should happen only if we have
13397 "privatized" local addressable variables in SIMD regions,
13398 the clobber isn't needed in that case and gimplifying address
13399 of the ARRAY_REF into a pointer and creating MEM_REF based
13400 clobber would create worse code than we get with the clobber
13401 dropped. */
13402 if (gimple_clobber_p (stmt))
13403 {
13404 gsi_replace (gsi_p, gimple_build_nop (), true);
13405 break;
13406 }
13407 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13408 }
13409 break;
13410 }
13411 }
13412
13413 static void
13414 lower_omp (gimple_seq *body, omp_context *ctx)
13415 {
13416 location_t saved_location = input_location;
13417 gimple_stmt_iterator gsi;
13418 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13419 lower_omp_1 (&gsi, ctx);
13420 /* During gimplification, we haven't folded statments inside offloading
13421 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13422 if (target_nesting_level || taskreg_nesting_level)
13423 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13424 fold_stmt (&gsi);
13425 input_location = saved_location;
13426 }
13427
13428 /* Main entry point. */
13429
13430 static unsigned int
13431 execute_lower_omp (void)
13432 {
13433 gimple_seq body;
13434 int i;
13435 omp_context *ctx;
13436
13437 /* This pass always runs, to provide PROP_gimple_lomp.
13438 But often, there is nothing to do. */
13439 if (flag_openacc == 0 && flag_openmp == 0
13440 && flag_openmp_simd == 0)
13441 return 0;
13442
13443 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13444 delete_omp_context);
13445
13446 body = gimple_body (current_function_decl);
13447
13448 scan_omp (&body, NULL);
13449 gcc_assert (taskreg_nesting_level == 0);
13450 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13451 finish_taskreg_scan (ctx);
13452 taskreg_contexts.release ();
13453
13454 if (all_contexts->root)
13455 {
13456 if (task_shared_vars)
13457 push_gimplify_context ();
13458 lower_omp (&body, NULL);
13459 if (task_shared_vars)
13460 pop_gimplify_context (NULL);
13461 }
13462
13463 if (all_contexts)
13464 {
13465 splay_tree_delete (all_contexts);
13466 all_contexts = NULL;
13467 }
13468 BITMAP_FREE (task_shared_vars);
13469 BITMAP_FREE (global_nonaddressable_vars);
13470
13471 /* If current function is a method, remove artificial dummy VAR_DECL created
13472 for non-static data member privatization, they aren't needed for
13473 debuginfo nor anything else, have been already replaced everywhere in the
13474 IL and cause problems with LTO. */
13475 if (DECL_ARGUMENTS (current_function_decl)
13476 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13477 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13478 == POINTER_TYPE))
13479 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13480 return 0;
13481 }
13482
13483 namespace {
13484
13485 const pass_data pass_data_lower_omp =
13486 {
13487 GIMPLE_PASS, /* type */
13488 "omplower", /* name */
13489 OPTGROUP_OMP, /* optinfo_flags */
13490 TV_NONE, /* tv_id */
13491 PROP_gimple_any, /* properties_required */
13492 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13493 0, /* properties_destroyed */
13494 0, /* todo_flags_start */
13495 0, /* todo_flags_finish */
13496 };
13497
13498 class pass_lower_omp : public gimple_opt_pass
13499 {
13500 public:
13501 pass_lower_omp (gcc::context *ctxt)
13502 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13503 {}
13504
13505 /* opt_pass methods: */
13506 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13507
13508 }; // class pass_lower_omp
13509
13510 } // anon namespace
13511
13512 gimple_opt_pass *
13513 make_pass_lower_omp (gcc::context *ctxt)
13514 {
13515 return new pass_lower_omp (ctxt);
13516 }
13517 \f
13518 /* The following is a utility to diagnose structured block violations.
13519 It is not part of the "omplower" pass, as that's invoked too late. It
13520 should be invoked by the respective front ends after gimplification. */
13521
13522 static splay_tree all_labels;
13523
13524 /* Check for mismatched contexts and generate an error if needed. Return
13525 true if an error is detected. */
13526
13527 static bool
13528 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13529 gimple *branch_ctx, gimple *label_ctx)
13530 {
13531 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13532 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13533
13534 if (label_ctx == branch_ctx)
13535 return false;
13536
13537 const char* kind = NULL;
13538
13539 if (flag_openacc)
13540 {
13541 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13542 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13543 {
13544 gcc_checking_assert (kind == NULL);
13545 kind = "OpenACC";
13546 }
13547 }
13548 if (kind == NULL)
13549 {
13550 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13551 kind = "OpenMP";
13552 }
13553
13554 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13555 so we could traverse it and issue a correct "exit" or "enter" error
13556 message upon a structured block violation.
13557
13558 We built the context by building a list with tree_cons'ing, but there is
13559 no easy counterpart in gimple tuples. It seems like far too much work
13560 for issuing exit/enter error messages. If someone really misses the
13561 distinct error message... patches welcome. */
13562
13563 #if 0
13564 /* Try to avoid confusing the user by producing and error message
13565 with correct "exit" or "enter" verbiage. We prefer "exit"
13566 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13567 if (branch_ctx == NULL)
13568 exit_p = false;
13569 else
13570 {
13571 while (label_ctx)
13572 {
13573 if (TREE_VALUE (label_ctx) == branch_ctx)
13574 {
13575 exit_p = false;
13576 break;
13577 }
13578 label_ctx = TREE_CHAIN (label_ctx);
13579 }
13580 }
13581
13582 if (exit_p)
13583 error ("invalid exit from %s structured block", kind);
13584 else
13585 error ("invalid entry to %s structured block", kind);
13586 #endif
13587
13588 /* If it's obvious we have an invalid entry, be specific about the error. */
13589 if (branch_ctx == NULL)
13590 error ("invalid entry to %s structured block", kind);
13591 else
13592 {
13593 /* Otherwise, be vague and lazy, but efficient. */
13594 error ("invalid branch to/from %s structured block", kind);
13595 }
13596
13597 gsi_replace (gsi_p, gimple_build_nop (), false);
13598 return true;
13599 }
13600
13601 /* Pass 1: Create a minimal tree of structured blocks, and record
13602 where each label is found. */
13603
13604 static tree
13605 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13606 struct walk_stmt_info *wi)
13607 {
13608 gimple *context = (gimple *) wi->info;
13609 gimple *inner_context;
13610 gimple *stmt = gsi_stmt (*gsi_p);
13611
13612 *handled_ops_p = true;
13613
13614 switch (gimple_code (stmt))
13615 {
13616 WALK_SUBSTMTS;
13617
13618 case GIMPLE_OMP_PARALLEL:
13619 case GIMPLE_OMP_TASK:
13620 case GIMPLE_OMP_SECTIONS:
13621 case GIMPLE_OMP_SINGLE:
13622 case GIMPLE_OMP_SECTION:
13623 case GIMPLE_OMP_MASTER:
13624 case GIMPLE_OMP_ORDERED:
13625 case GIMPLE_OMP_SCAN:
13626 case GIMPLE_OMP_CRITICAL:
13627 case GIMPLE_OMP_TARGET:
13628 case GIMPLE_OMP_TEAMS:
13629 case GIMPLE_OMP_TASKGROUP:
13630 /* The minimal context here is just the current OMP construct. */
13631 inner_context = stmt;
13632 wi->info = inner_context;
13633 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13634 wi->info = context;
13635 break;
13636
13637 case GIMPLE_OMP_FOR:
13638 inner_context = stmt;
13639 wi->info = inner_context;
13640 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13641 walk them. */
13642 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13643 diagnose_sb_1, NULL, wi);
13644 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13645 wi->info = context;
13646 break;
13647
13648 case GIMPLE_LABEL:
13649 splay_tree_insert (all_labels,
13650 (splay_tree_key) gimple_label_label (
13651 as_a <glabel *> (stmt)),
13652 (splay_tree_value) context);
13653 break;
13654
13655 default:
13656 break;
13657 }
13658
13659 return NULL_TREE;
13660 }
13661
13662 /* Pass 2: Check each branch and see if its context differs from that of
13663 the destination label's context. */
13664
13665 static tree
13666 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13667 struct walk_stmt_info *wi)
13668 {
13669 gimple *context = (gimple *) wi->info;
13670 splay_tree_node n;
13671 gimple *stmt = gsi_stmt (*gsi_p);
13672
13673 *handled_ops_p = true;
13674
13675 switch (gimple_code (stmt))
13676 {
13677 WALK_SUBSTMTS;
13678
13679 case GIMPLE_OMP_PARALLEL:
13680 case GIMPLE_OMP_TASK:
13681 case GIMPLE_OMP_SECTIONS:
13682 case GIMPLE_OMP_SINGLE:
13683 case GIMPLE_OMP_SECTION:
13684 case GIMPLE_OMP_MASTER:
13685 case GIMPLE_OMP_ORDERED:
13686 case GIMPLE_OMP_SCAN:
13687 case GIMPLE_OMP_CRITICAL:
13688 case GIMPLE_OMP_TARGET:
13689 case GIMPLE_OMP_TEAMS:
13690 case GIMPLE_OMP_TASKGROUP:
13691 wi->info = stmt;
13692 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13693 wi->info = context;
13694 break;
13695
13696 case GIMPLE_OMP_FOR:
13697 wi->info = stmt;
13698 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13699 walk them. */
13700 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13701 diagnose_sb_2, NULL, wi);
13702 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13703 wi->info = context;
13704 break;
13705
13706 case GIMPLE_COND:
13707 {
13708 gcond *cond_stmt = as_a <gcond *> (stmt);
13709 tree lab = gimple_cond_true_label (cond_stmt);
13710 if (lab)
13711 {
13712 n = splay_tree_lookup (all_labels,
13713 (splay_tree_key) lab);
13714 diagnose_sb_0 (gsi_p, context,
13715 n ? (gimple *) n->value : NULL);
13716 }
13717 lab = gimple_cond_false_label (cond_stmt);
13718 if (lab)
13719 {
13720 n = splay_tree_lookup (all_labels,
13721 (splay_tree_key) lab);
13722 diagnose_sb_0 (gsi_p, context,
13723 n ? (gimple *) n->value : NULL);
13724 }
13725 }
13726 break;
13727
13728 case GIMPLE_GOTO:
13729 {
13730 tree lab = gimple_goto_dest (stmt);
13731 if (TREE_CODE (lab) != LABEL_DECL)
13732 break;
13733
13734 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13735 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13736 }
13737 break;
13738
13739 case GIMPLE_SWITCH:
13740 {
13741 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13742 unsigned int i;
13743 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13744 {
13745 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13746 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13747 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13748 break;
13749 }
13750 }
13751 break;
13752
13753 case GIMPLE_RETURN:
13754 diagnose_sb_0 (gsi_p, context, NULL);
13755 break;
13756
13757 default:
13758 break;
13759 }
13760
13761 return NULL_TREE;
13762 }
13763
13764 static unsigned int
13765 diagnose_omp_structured_block_errors (void)
13766 {
13767 struct walk_stmt_info wi;
13768 gimple_seq body = gimple_body (current_function_decl);
13769
13770 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13771
13772 memset (&wi, 0, sizeof (wi));
13773 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13774
13775 memset (&wi, 0, sizeof (wi));
13776 wi.want_locations = true;
13777 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13778
13779 gimple_set_body (current_function_decl, body);
13780
13781 splay_tree_delete (all_labels);
13782 all_labels = NULL;
13783
13784 return 0;
13785 }
13786
13787 namespace {
13788
13789 const pass_data pass_data_diagnose_omp_blocks =
13790 {
13791 GIMPLE_PASS, /* type */
13792 "*diagnose_omp_blocks", /* name */
13793 OPTGROUP_OMP, /* optinfo_flags */
13794 TV_NONE, /* tv_id */
13795 PROP_gimple_any, /* properties_required */
13796 0, /* properties_provided */
13797 0, /* properties_destroyed */
13798 0, /* todo_flags_start */
13799 0, /* todo_flags_finish */
13800 };
13801
13802 class pass_diagnose_omp_blocks : public gimple_opt_pass
13803 {
13804 public:
13805 pass_diagnose_omp_blocks (gcc::context *ctxt)
13806 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13807 {}
13808
13809 /* opt_pass methods: */
13810 virtual bool gate (function *)
13811 {
13812 return flag_openacc || flag_openmp || flag_openmp_simd;
13813 }
13814 virtual unsigned int execute (function *)
13815 {
13816 return diagnose_omp_structured_block_errors ();
13817 }
13818
13819 }; // class pass_diagnose_omp_blocks
13820
13821 } // anon namespace
13822
13823 gimple_opt_pass *
13824 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13825 {
13826 return new pass_diagnose_omp_blocks (ctxt);
13827 }
13828 \f
13829
13830 #include "gt-omp-low.h"