C-family, Objective-C [1/3] : Implement Wobjc-root-class [PR77404].
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
69
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
73
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
76
77 struct omp_context
78 {
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
84
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
88
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
95
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
103
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
107
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
111
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
115
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
120
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
124
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
128
129 /* And a hash map from the allocate variables to their corresponding
130 allocators. */
131 hash_map<tree, tree> *allocate_map;
132
133 /* A tree_list of the reduction clauses in this context. This is
134 only used for checking the consistency of OpenACC reduction
135 clauses in scan_omp_for and is not guaranteed to contain a valid
136 value outside of this function. */
137 tree local_reduction_clauses;
138
139 /* A tree_list of the reduction clauses in outer contexts. This is
140 only used for checking the consistency of OpenACC reduction
141 clauses in scan_omp_for and is not guaranteed to contain a valid
142 value outside of this function. */
143 tree outer_reduction_clauses;
144
145 /* Nesting depth of this context. Used to beautify error messages re
146 invalid gotos. The outermost ctx is depth 1, with depth 0 being
147 reserved for the main body of the function. */
148 int depth;
149
150 /* True if this parallel directive is nested within another. */
151 bool is_nested;
152
153 /* True if this construct can be cancelled. */
154 bool cancellable;
155
156 /* True if lower_omp_1 should look up lastprivate conditional in parent
157 context. */
158 bool combined_into_simd_safelen1;
159
160 /* True if there is nested scan context with inclusive clause. */
161 bool scan_inclusive;
162
163 /* True if there is nested scan context with exclusive clause. */
164 bool scan_exclusive;
165
166 /* True in the second simd loop of for simd with inscan reductions. */
167 bool for_simd_scan_phase;
168
169 /* True if there is order(concurrent) clause on the construct. */
170 bool order_concurrent;
171
172 /* True if there is bind clause on the construct (i.e. a loop construct). */
173 bool loop_p;
174 };
175
176 static splay_tree all_contexts;
177 static int taskreg_nesting_level;
178 static int target_nesting_level;
179 static bitmap task_shared_vars;
180 static bitmap global_nonaddressable_vars;
181 static vec<omp_context *> taskreg_contexts;
182
183 static void scan_omp (gimple_seq *, omp_context *);
184 static tree scan_omp_1_op (tree *, int *, void *);
185
186 #define WALK_SUBSTMTS \
187 case GIMPLE_BIND: \
188 case GIMPLE_TRY: \
189 case GIMPLE_CATCH: \
190 case GIMPLE_EH_FILTER: \
191 case GIMPLE_TRANSACTION: \
192 /* The sub-statements for these should be walked. */ \
193 *handled_ops_p = false; \
194 break;
195
196 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
197 region. */
198
199 static bool
200 is_oacc_parallel_or_serial (omp_context *ctx)
201 {
202 enum gimple_code outer_type = gimple_code (ctx->stmt);
203 return ((outer_type == GIMPLE_OMP_TARGET)
204 && ((gimple_omp_target_kind (ctx->stmt)
205 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
206 || (gimple_omp_target_kind (ctx->stmt)
207 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
208 }
209
210 /* Return true if CTX corresponds to an oacc kernels region. */
211
212 static bool
213 is_oacc_kernels (omp_context *ctx)
214 {
215 enum gimple_code outer_type = gimple_code (ctx->stmt);
216 return ((outer_type == GIMPLE_OMP_TARGET)
217 && (gimple_omp_target_kind (ctx->stmt)
218 == GF_OMP_TARGET_KIND_OACC_KERNELS));
219 }
220
221 /* Return true if STMT corresponds to an OpenMP target region. */
222 static bool
223 is_omp_target (gimple *stmt)
224 {
225 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
226 {
227 int kind = gimple_omp_target_kind (stmt);
228 return (kind == GF_OMP_TARGET_KIND_REGION
229 || kind == GF_OMP_TARGET_KIND_DATA
230 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
231 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
232 }
233 return false;
234 }
235
236 /* If DECL is the artificial dummy VAR_DECL created for non-static
237 data member privatization, return the underlying "this" parameter,
238 otherwise return NULL. */
239
240 tree
241 omp_member_access_dummy_var (tree decl)
242 {
243 if (!VAR_P (decl)
244 || !DECL_ARTIFICIAL (decl)
245 || !DECL_IGNORED_P (decl)
246 || !DECL_HAS_VALUE_EXPR_P (decl)
247 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
248 return NULL_TREE;
249
250 tree v = DECL_VALUE_EXPR (decl);
251 if (TREE_CODE (v) != COMPONENT_REF)
252 return NULL_TREE;
253
254 while (1)
255 switch (TREE_CODE (v))
256 {
257 case COMPONENT_REF:
258 case MEM_REF:
259 case INDIRECT_REF:
260 CASE_CONVERT:
261 case POINTER_PLUS_EXPR:
262 v = TREE_OPERAND (v, 0);
263 continue;
264 case PARM_DECL:
265 if (DECL_CONTEXT (v) == current_function_decl
266 && DECL_ARTIFICIAL (v)
267 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
268 return v;
269 return NULL_TREE;
270 default:
271 return NULL_TREE;
272 }
273 }
274
275 /* Helper for unshare_and_remap, called through walk_tree. */
276
277 static tree
278 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
279 {
280 tree *pair = (tree *) data;
281 if (*tp == pair[0])
282 {
283 *tp = unshare_expr (pair[1]);
284 *walk_subtrees = 0;
285 }
286 else if (IS_TYPE_OR_DECL_P (*tp))
287 *walk_subtrees = 0;
288 return NULL_TREE;
289 }
290
291 /* Return unshare_expr (X) with all occurrences of FROM
292 replaced with TO. */
293
294 static tree
295 unshare_and_remap (tree x, tree from, tree to)
296 {
297 tree pair[2] = { from, to };
298 x = unshare_expr (x);
299 walk_tree (&x, unshare_and_remap_1, pair, NULL);
300 return x;
301 }
302
303 /* Convenience function for calling scan_omp_1_op on tree operands. */
304
305 static inline tree
306 scan_omp_op (tree *tp, omp_context *ctx)
307 {
308 struct walk_stmt_info wi;
309
310 memset (&wi, 0, sizeof (wi));
311 wi.info = ctx;
312 wi.want_locations = true;
313
314 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
315 }
316
317 static void lower_omp (gimple_seq *, omp_context *);
318 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
319 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
320
321 /* Return true if CTX is for an omp parallel. */
322
323 static inline bool
324 is_parallel_ctx (omp_context *ctx)
325 {
326 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
327 }
328
329
330 /* Return true if CTX is for an omp task. */
331
332 static inline bool
333 is_task_ctx (omp_context *ctx)
334 {
335 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
336 }
337
338
339 /* Return true if CTX is for an omp taskloop. */
340
341 static inline bool
342 is_taskloop_ctx (omp_context *ctx)
343 {
344 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
345 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
346 }
347
348
349 /* Return true if CTX is for a host omp teams. */
350
351 static inline bool
352 is_host_teams_ctx (omp_context *ctx)
353 {
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
355 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
356 }
357
358 /* Return true if CTX is for an omp parallel or omp task or host omp teams
359 (the last one is strictly not a task region in OpenMP speak, but we
360 need to treat it similarly). */
361
362 static inline bool
363 is_taskreg_ctx (omp_context *ctx)
364 {
365 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
366 }
367
368 /* Return true if EXPR is variable sized. */
369
370 static inline bool
371 is_variable_sized (const_tree expr)
372 {
373 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
374 }
375
376 /* Lookup variables. The "maybe" form
377 allows for the variable form to not have been entered, otherwise we
378 assert that the variable must have been entered. */
379
380 static inline tree
381 lookup_decl (tree var, omp_context *ctx)
382 {
383 tree *n = ctx->cb.decl_map->get (var);
384 return *n;
385 }
386
387 static inline tree
388 maybe_lookup_decl (const_tree var, omp_context *ctx)
389 {
390 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
391 return n ? *n : NULL_TREE;
392 }
393
394 static inline tree
395 lookup_field (tree var, omp_context *ctx)
396 {
397 splay_tree_node n;
398 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
399 return (tree) n->value;
400 }
401
402 static inline tree
403 lookup_sfield (splay_tree_key key, omp_context *ctx)
404 {
405 splay_tree_node n;
406 n = splay_tree_lookup (ctx->sfield_map
407 ? ctx->sfield_map : ctx->field_map, key);
408 return (tree) n->value;
409 }
410
411 static inline tree
412 lookup_sfield (tree var, omp_context *ctx)
413 {
414 return lookup_sfield ((splay_tree_key) var, ctx);
415 }
416
417 static inline tree
418 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
419 {
420 splay_tree_node n;
421 n = splay_tree_lookup (ctx->field_map, key);
422 return n ? (tree) n->value : NULL_TREE;
423 }
424
425 static inline tree
426 maybe_lookup_field (tree var, omp_context *ctx)
427 {
428 return maybe_lookup_field ((splay_tree_key) var, ctx);
429 }
430
431 /* Return true if DECL should be copied by pointer. SHARED_CTX is
432 the parallel context if DECL is to be shared. */
433
434 static bool
435 use_pointer_for_field (tree decl, omp_context *shared_ctx)
436 {
437 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
438 || TYPE_ATOMIC (TREE_TYPE (decl)))
439 return true;
440
441 /* We can only use copy-in/copy-out semantics for shared variables
442 when we know the value is not accessible from an outer scope. */
443 if (shared_ctx)
444 {
445 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
446
447 /* ??? Trivially accessible from anywhere. But why would we even
448 be passing an address in this case? Should we simply assert
449 this to be false, or should we have a cleanup pass that removes
450 these from the list of mappings? */
451 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
452 return true;
453
454 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
455 without analyzing the expression whether or not its location
456 is accessible to anyone else. In the case of nested parallel
457 regions it certainly may be. */
458 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
459 return true;
460
461 /* Do not use copy-in/copy-out for variables that have their
462 address taken. */
463 if (is_global_var (decl))
464 {
465 /* For file scope vars, track whether we've seen them as
466 non-addressable initially and in that case, keep the same
467 answer for the duration of the pass, even when they are made
468 addressable later on e.g. through reduction expansion. Global
469 variables which weren't addressable before the pass will not
470 have their privatized copies address taken. See PR91216. */
471 if (!TREE_ADDRESSABLE (decl))
472 {
473 if (!global_nonaddressable_vars)
474 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
475 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
476 }
477 else if (!global_nonaddressable_vars
478 || !bitmap_bit_p (global_nonaddressable_vars,
479 DECL_UID (decl)))
480 return true;
481 }
482 else if (TREE_ADDRESSABLE (decl))
483 return true;
484
485 /* lower_send_shared_vars only uses copy-in, but not copy-out
486 for these. */
487 if (TREE_READONLY (decl)
488 || ((TREE_CODE (decl) == RESULT_DECL
489 || TREE_CODE (decl) == PARM_DECL)
490 && DECL_BY_REFERENCE (decl)))
491 return false;
492
493 /* Disallow copy-in/out in nested parallel if
494 decl is shared in outer parallel, otherwise
495 each thread could store the shared variable
496 in its own copy-in location, making the
497 variable no longer really shared. */
498 if (shared_ctx->is_nested)
499 {
500 omp_context *up;
501
502 for (up = shared_ctx->outer; up; up = up->outer)
503 if ((is_taskreg_ctx (up)
504 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
505 && is_gimple_omp_offloaded (up->stmt)))
506 && maybe_lookup_decl (decl, up))
507 break;
508
509 if (up)
510 {
511 tree c;
512
513 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
514 {
515 for (c = gimple_omp_target_clauses (up->stmt);
516 c; c = OMP_CLAUSE_CHAIN (c))
517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
518 && OMP_CLAUSE_DECL (c) == decl)
519 break;
520 }
521 else
522 for (c = gimple_omp_taskreg_clauses (up->stmt);
523 c; c = OMP_CLAUSE_CHAIN (c))
524 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
525 && OMP_CLAUSE_DECL (c) == decl)
526 break;
527
528 if (c)
529 goto maybe_mark_addressable_and_ret;
530 }
531 }
532
533 /* For tasks avoid using copy-in/out. As tasks can be
534 deferred or executed in different thread, when GOMP_task
535 returns, the task hasn't necessarily terminated. */
536 if (is_task_ctx (shared_ctx))
537 {
538 tree outer;
539 maybe_mark_addressable_and_ret:
540 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
541 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
542 {
543 /* Taking address of OUTER in lower_send_shared_vars
544 might need regimplification of everything that uses the
545 variable. */
546 if (!task_shared_vars)
547 task_shared_vars = BITMAP_ALLOC (NULL);
548 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
549 TREE_ADDRESSABLE (outer) = 1;
550 }
551 return true;
552 }
553 }
554
555 return false;
556 }
557
558 /* Construct a new automatic decl similar to VAR. */
559
560 static tree
561 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
562 {
563 tree copy = copy_var_decl (var, name, type);
564
565 DECL_CONTEXT (copy) = current_function_decl;
566 DECL_CHAIN (copy) = ctx->block_vars;
567 /* If VAR is listed in task_shared_vars, it means it wasn't
568 originally addressable and is just because task needs to take
569 it's address. But we don't need to take address of privatizations
570 from that var. */
571 if (TREE_ADDRESSABLE (var)
572 && ((task_shared_vars
573 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
574 || (global_nonaddressable_vars
575 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
576 TREE_ADDRESSABLE (copy) = 0;
577 ctx->block_vars = copy;
578
579 return copy;
580 }
581
582 static tree
583 omp_copy_decl_1 (tree var, omp_context *ctx)
584 {
585 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
586 }
587
588 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
589 as appropriate. */
590 static tree
591 omp_build_component_ref (tree obj, tree field)
592 {
593 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
594 if (TREE_THIS_VOLATILE (field))
595 TREE_THIS_VOLATILE (ret) |= 1;
596 if (TREE_READONLY (field))
597 TREE_READONLY (ret) |= 1;
598 return ret;
599 }
600
601 /* Build tree nodes to access the field for VAR on the receiver side. */
602
603 static tree
604 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
605 {
606 tree x, field = lookup_field (var, ctx);
607
608 /* If the receiver record type was remapped in the child function,
609 remap the field into the new record type. */
610 x = maybe_lookup_field (field, ctx);
611 if (x != NULL)
612 field = x;
613
614 x = build_simple_mem_ref (ctx->receiver_decl);
615 TREE_THIS_NOTRAP (x) = 1;
616 x = omp_build_component_ref (x, field);
617 if (by_ref)
618 {
619 x = build_simple_mem_ref (x);
620 TREE_THIS_NOTRAP (x) = 1;
621 }
622
623 return x;
624 }
625
626 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
627 of a parallel, this is a component reference; for workshare constructs
628 this is some variable. */
629
630 static tree
631 build_outer_var_ref (tree var, omp_context *ctx,
632 enum omp_clause_code code = OMP_CLAUSE_ERROR)
633 {
634 tree x;
635 omp_context *outer = ctx->outer;
636 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
637 outer = outer->outer;
638
639 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
640 x = var;
641 else if (is_variable_sized (var))
642 {
643 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
644 x = build_outer_var_ref (x, ctx, code);
645 x = build_simple_mem_ref (x);
646 }
647 else if (is_taskreg_ctx (ctx))
648 {
649 bool by_ref = use_pointer_for_field (var, NULL);
650 x = build_receiver_ref (var, by_ref, ctx);
651 }
652 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
653 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
654 || ctx->loop_p
655 || (code == OMP_CLAUSE_PRIVATE
656 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
657 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
658 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
659 {
660 /* #pragma omp simd isn't a worksharing construct, and can reference
661 even private vars in its linear etc. clauses.
662 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
663 to private vars in all worksharing constructs. */
664 x = NULL_TREE;
665 if (outer && is_taskreg_ctx (outer))
666 x = lookup_decl (var, outer);
667 else if (outer)
668 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
669 if (x == NULL_TREE)
670 x = var;
671 }
672 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
673 {
674 gcc_assert (outer);
675 splay_tree_node n
676 = splay_tree_lookup (outer->field_map,
677 (splay_tree_key) &DECL_UID (var));
678 if (n == NULL)
679 {
680 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
681 x = var;
682 else
683 x = lookup_decl (var, outer);
684 }
685 else
686 {
687 tree field = (tree) n->value;
688 /* If the receiver record type was remapped in the child function,
689 remap the field into the new record type. */
690 x = maybe_lookup_field (field, outer);
691 if (x != NULL)
692 field = x;
693
694 x = build_simple_mem_ref (outer->receiver_decl);
695 x = omp_build_component_ref (x, field);
696 if (use_pointer_for_field (var, outer))
697 x = build_simple_mem_ref (x);
698 }
699 }
700 else if (outer)
701 x = lookup_decl (var, outer);
702 else if (omp_is_reference (var))
703 /* This can happen with orphaned constructs. If var is reference, it is
704 possible it is shared and as such valid. */
705 x = var;
706 else if (omp_member_access_dummy_var (var))
707 x = var;
708 else
709 gcc_unreachable ();
710
711 if (x == var)
712 {
713 tree t = omp_member_access_dummy_var (var);
714 if (t)
715 {
716 x = DECL_VALUE_EXPR (var);
717 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
718 if (o != t)
719 x = unshare_and_remap (x, t, o);
720 else
721 x = unshare_expr (x);
722 }
723 }
724
725 if (omp_is_reference (var))
726 x = build_simple_mem_ref (x);
727
728 return x;
729 }
730
731 /* Build tree nodes to access the field for VAR on the sender side. */
732
733 static tree
734 build_sender_ref (splay_tree_key key, omp_context *ctx)
735 {
736 tree field = lookup_sfield (key, ctx);
737 return omp_build_component_ref (ctx->sender_decl, field);
738 }
739
740 static tree
741 build_sender_ref (tree var, omp_context *ctx)
742 {
743 return build_sender_ref ((splay_tree_key) var, ctx);
744 }
745
746 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
747 BASE_POINTERS_RESTRICT, declare the field with restrict. */
748
749 static void
750 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
751 {
752 tree field, type, sfield = NULL_TREE;
753 splay_tree_key key = (splay_tree_key) var;
754
755 if ((mask & 16) != 0)
756 {
757 key = (splay_tree_key) &DECL_NAME (var);
758 gcc_checking_assert (key != (splay_tree_key) var);
759 }
760 if ((mask & 8) != 0)
761 {
762 key = (splay_tree_key) &DECL_UID (var);
763 gcc_checking_assert (key != (splay_tree_key) var);
764 }
765 gcc_assert ((mask & 1) == 0
766 || !splay_tree_lookup (ctx->field_map, key));
767 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
768 || !splay_tree_lookup (ctx->sfield_map, key));
769 gcc_assert ((mask & 3) == 3
770 || !is_gimple_omp_oacc (ctx->stmt));
771
772 type = TREE_TYPE (var);
773 if ((mask & 16) != 0)
774 type = lang_hooks.decls.omp_array_data (var, true);
775
776 /* Prevent redeclaring the var in the split-off function with a restrict
777 pointer type. Note that we only clear type itself, restrict qualifiers in
778 the pointed-to type will be ignored by points-to analysis. */
779 if (POINTER_TYPE_P (type)
780 && TYPE_RESTRICT (type))
781 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
782
783 if (mask & 4)
784 {
785 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
786 type = build_pointer_type (build_pointer_type (type));
787 }
788 else if (by_ref)
789 type = build_pointer_type (type);
790 else if ((mask & 3) == 1 && omp_is_reference (var))
791 type = TREE_TYPE (type);
792
793 field = build_decl (DECL_SOURCE_LOCATION (var),
794 FIELD_DECL, DECL_NAME (var), type);
795
796 /* Remember what variable this field was created for. This does have a
797 side effect of making dwarf2out ignore this member, so for helpful
798 debugging we clear it later in delete_omp_context. */
799 DECL_ABSTRACT_ORIGIN (field) = var;
800 if ((mask & 16) == 0 && type == TREE_TYPE (var))
801 {
802 SET_DECL_ALIGN (field, DECL_ALIGN (var));
803 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
804 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
805 }
806 else
807 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
808
809 if ((mask & 3) == 3)
810 {
811 insert_field_into_struct (ctx->record_type, field);
812 if (ctx->srecord_type)
813 {
814 sfield = build_decl (DECL_SOURCE_LOCATION (var),
815 FIELD_DECL, DECL_NAME (var), type);
816 DECL_ABSTRACT_ORIGIN (sfield) = var;
817 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
818 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
819 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
820 insert_field_into_struct (ctx->srecord_type, sfield);
821 }
822 }
823 else
824 {
825 if (ctx->srecord_type == NULL_TREE)
826 {
827 tree t;
828
829 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
830 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
831 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
832 {
833 sfield = build_decl (DECL_SOURCE_LOCATION (t),
834 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
835 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
836 insert_field_into_struct (ctx->srecord_type, sfield);
837 splay_tree_insert (ctx->sfield_map,
838 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
839 (splay_tree_value) sfield);
840 }
841 }
842 sfield = field;
843 insert_field_into_struct ((mask & 1) ? ctx->record_type
844 : ctx->srecord_type, field);
845 }
846
847 if (mask & 1)
848 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
849 if ((mask & 2) && ctx->sfield_map)
850 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
851 }
852
853 static tree
854 install_var_local (tree var, omp_context *ctx)
855 {
856 tree new_var = omp_copy_decl_1 (var, ctx);
857 insert_decl_map (&ctx->cb, var, new_var);
858 return new_var;
859 }
860
861 /* Adjust the replacement for DECL in CTX for the new context. This means
862 copying the DECL_VALUE_EXPR, and fixing up the type. */
863
864 static void
865 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
866 {
867 tree new_decl, size;
868
869 new_decl = lookup_decl (decl, ctx);
870
871 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
872
873 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
874 && DECL_HAS_VALUE_EXPR_P (decl))
875 {
876 tree ve = DECL_VALUE_EXPR (decl);
877 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
878 SET_DECL_VALUE_EXPR (new_decl, ve);
879 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
880 }
881
882 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
883 {
884 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
885 if (size == error_mark_node)
886 size = TYPE_SIZE (TREE_TYPE (new_decl));
887 DECL_SIZE (new_decl) = size;
888
889 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
890 if (size == error_mark_node)
891 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
892 DECL_SIZE_UNIT (new_decl) = size;
893 }
894 }
895
896 /* The callback for remap_decl. Search all containing contexts for a
897 mapping of the variable; this avoids having to duplicate the splay
898 tree ahead of time. We know a mapping doesn't already exist in the
899 given context. Create new mappings to implement default semantics. */
900
901 static tree
902 omp_copy_decl (tree var, copy_body_data *cb)
903 {
904 omp_context *ctx = (omp_context *) cb;
905 tree new_var;
906
907 if (TREE_CODE (var) == LABEL_DECL)
908 {
909 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
910 return var;
911 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
912 DECL_CONTEXT (new_var) = current_function_decl;
913 insert_decl_map (&ctx->cb, var, new_var);
914 return new_var;
915 }
916
917 while (!is_taskreg_ctx (ctx))
918 {
919 ctx = ctx->outer;
920 if (ctx == NULL)
921 return var;
922 new_var = maybe_lookup_decl (var, ctx);
923 if (new_var)
924 return new_var;
925 }
926
927 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
928 return var;
929
930 return error_mark_node;
931 }
932
933 /* Create a new context, with OUTER_CTX being the surrounding context. */
934
935 static omp_context *
936 new_omp_context (gimple *stmt, omp_context *outer_ctx)
937 {
938 omp_context *ctx = XCNEW (omp_context);
939
940 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
941 (splay_tree_value) ctx);
942 ctx->stmt = stmt;
943
944 if (outer_ctx)
945 {
946 ctx->outer = outer_ctx;
947 ctx->cb = outer_ctx->cb;
948 ctx->cb.block = NULL;
949 ctx->depth = outer_ctx->depth + 1;
950 }
951 else
952 {
953 ctx->cb.src_fn = current_function_decl;
954 ctx->cb.dst_fn = current_function_decl;
955 ctx->cb.src_node = cgraph_node::get (current_function_decl);
956 gcc_checking_assert (ctx->cb.src_node);
957 ctx->cb.dst_node = ctx->cb.src_node;
958 ctx->cb.src_cfun = cfun;
959 ctx->cb.copy_decl = omp_copy_decl;
960 ctx->cb.eh_lp_nr = 0;
961 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
962 ctx->cb.adjust_array_error_bounds = true;
963 ctx->cb.dont_remap_vla_if_no_change = true;
964 ctx->depth = 1;
965 }
966
967 ctx->cb.decl_map = new hash_map<tree, tree>;
968
969 return ctx;
970 }
971
972 static gimple_seq maybe_catch_exception (gimple_seq);
973
974 /* Finalize task copyfn. */
975
976 static void
977 finalize_task_copyfn (gomp_task *task_stmt)
978 {
979 struct function *child_cfun;
980 tree child_fn;
981 gimple_seq seq = NULL, new_seq;
982 gbind *bind;
983
984 child_fn = gimple_omp_task_copy_fn (task_stmt);
985 if (child_fn == NULL_TREE)
986 return;
987
988 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
989 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
990
991 push_cfun (child_cfun);
992 bind = gimplify_body (child_fn, false);
993 gimple_seq_add_stmt (&seq, bind);
994 new_seq = maybe_catch_exception (seq);
995 if (new_seq != seq)
996 {
997 bind = gimple_build_bind (NULL, new_seq, NULL);
998 seq = NULL;
999 gimple_seq_add_stmt (&seq, bind);
1000 }
1001 gimple_set_body (child_fn, seq);
1002 pop_cfun ();
1003
1004 /* Inform the callgraph about the new function. */
1005 cgraph_node *node = cgraph_node::get_create (child_fn);
1006 node->parallelized_function = 1;
1007 cgraph_node::add_new_function (child_fn, false);
1008 }
1009
1010 /* Destroy a omp_context data structures. Called through the splay tree
1011 value delete callback. */
1012
1013 static void
1014 delete_omp_context (splay_tree_value value)
1015 {
1016 omp_context *ctx = (omp_context *) value;
1017
1018 delete ctx->cb.decl_map;
1019
1020 if (ctx->field_map)
1021 splay_tree_delete (ctx->field_map);
1022 if (ctx->sfield_map)
1023 splay_tree_delete (ctx->sfield_map);
1024
1025 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1026 it produces corrupt debug information. */
1027 if (ctx->record_type)
1028 {
1029 tree t;
1030 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1031 DECL_ABSTRACT_ORIGIN (t) = NULL;
1032 }
1033 if (ctx->srecord_type)
1034 {
1035 tree t;
1036 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1037 DECL_ABSTRACT_ORIGIN (t) = NULL;
1038 }
1039
1040 if (is_task_ctx (ctx))
1041 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1042
1043 if (ctx->task_reduction_map)
1044 {
1045 ctx->task_reductions.release ();
1046 delete ctx->task_reduction_map;
1047 }
1048
1049 delete ctx->lastprivate_conditional_map;
1050 delete ctx->allocate_map;
1051
1052 XDELETE (ctx);
1053 }
1054
1055 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1056 context. */
1057
1058 static void
1059 fixup_child_record_type (omp_context *ctx)
1060 {
1061 tree f, type = ctx->record_type;
1062
1063 if (!ctx->receiver_decl)
1064 return;
1065 /* ??? It isn't sufficient to just call remap_type here, because
1066 variably_modified_type_p doesn't work the way we expect for
1067 record types. Testing each field for whether it needs remapping
1068 and creating a new record by hand works, however. */
1069 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1070 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1071 break;
1072 if (f)
1073 {
1074 tree name, new_fields = NULL;
1075
1076 type = lang_hooks.types.make_type (RECORD_TYPE);
1077 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1078 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1079 TYPE_DECL, name, type);
1080 TYPE_NAME (type) = name;
1081
1082 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1083 {
1084 tree new_f = copy_node (f);
1085 DECL_CONTEXT (new_f) = type;
1086 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1087 DECL_CHAIN (new_f) = new_fields;
1088 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1089 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1090 &ctx->cb, NULL);
1091 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1092 &ctx->cb, NULL);
1093 new_fields = new_f;
1094
1095 /* Arrange to be able to look up the receiver field
1096 given the sender field. */
1097 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1098 (splay_tree_value) new_f);
1099 }
1100 TYPE_FIELDS (type) = nreverse (new_fields);
1101 layout_type (type);
1102 }
1103
1104 /* In a target region we never modify any of the pointers in *.omp_data_i,
1105 so attempt to help the optimizers. */
1106 if (is_gimple_omp_offloaded (ctx->stmt))
1107 type = build_qualified_type (type, TYPE_QUAL_CONST);
1108
1109 TREE_TYPE (ctx->receiver_decl)
1110 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1111 }
1112
1113 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1114 specified by CLAUSES. */
1115
1116 static void
1117 scan_sharing_clauses (tree clauses, omp_context *ctx)
1118 {
1119 tree c, decl;
1120 bool scan_array_reductions = false;
1121
1122 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1123 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1124 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1125 /* omp_default_mem_alloc is 1 */
1126 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))))
1127 {
1128 if (is_task_ctx (ctx))
1129 continue; /* For now. */
1130 if (ctx->allocate_map == NULL)
1131 ctx->allocate_map = new hash_map<tree, tree>;
1132 ctx->allocate_map->put (OMP_CLAUSE_DECL (c),
1133 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1134 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1135 : integer_zero_node);
1136 }
1137
1138 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1139 {
1140 bool by_ref;
1141
1142 switch (OMP_CLAUSE_CODE (c))
1143 {
1144 case OMP_CLAUSE_PRIVATE:
1145 decl = OMP_CLAUSE_DECL (c);
1146 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1147 goto do_private;
1148 else if (!is_variable_sized (decl))
1149 install_var_local (decl, ctx);
1150 break;
1151
1152 case OMP_CLAUSE_SHARED:
1153 decl = OMP_CLAUSE_DECL (c);
1154 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1155 ctx->allocate_map->remove (decl);
1156 /* Ignore shared directives in teams construct inside of
1157 target construct. */
1158 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1159 && !is_host_teams_ctx (ctx))
1160 {
1161 /* Global variables don't need to be copied,
1162 the receiver side will use them directly. */
1163 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1164 if (is_global_var (odecl))
1165 break;
1166 insert_decl_map (&ctx->cb, decl, odecl);
1167 break;
1168 }
1169 gcc_assert (is_taskreg_ctx (ctx));
1170 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1171 || !is_variable_sized (decl));
1172 /* Global variables don't need to be copied,
1173 the receiver side will use them directly. */
1174 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1175 break;
1176 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1177 {
1178 use_pointer_for_field (decl, ctx);
1179 break;
1180 }
1181 by_ref = use_pointer_for_field (decl, NULL);
1182 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1183 || TREE_ADDRESSABLE (decl)
1184 || by_ref
1185 || omp_is_reference (decl))
1186 {
1187 by_ref = use_pointer_for_field (decl, ctx);
1188 install_var_field (decl, by_ref, 3, ctx);
1189 install_var_local (decl, ctx);
1190 break;
1191 }
1192 /* We don't need to copy const scalar vars back. */
1193 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1194 goto do_private;
1195
1196 case OMP_CLAUSE_REDUCTION:
1197 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1198 ctx->local_reduction_clauses
1199 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1200 /* FALLTHRU */
1201
1202 case OMP_CLAUSE_IN_REDUCTION:
1203 decl = OMP_CLAUSE_DECL (c);
1204 if (TREE_CODE (decl) == MEM_REF)
1205 {
1206 tree t = TREE_OPERAND (decl, 0);
1207 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1208 t = TREE_OPERAND (t, 0);
1209 if (TREE_CODE (t) == INDIRECT_REF
1210 || TREE_CODE (t) == ADDR_EXPR)
1211 t = TREE_OPERAND (t, 0);
1212 install_var_local (t, ctx);
1213 if (is_taskreg_ctx (ctx)
1214 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1215 || (is_task_ctx (ctx)
1216 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1217 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1218 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1219 == POINTER_TYPE)))))
1220 && !is_variable_sized (t)
1221 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1222 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1223 && !is_task_ctx (ctx))))
1224 {
1225 by_ref = use_pointer_for_field (t, NULL);
1226 if (is_task_ctx (ctx)
1227 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1228 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1229 {
1230 install_var_field (t, false, 1, ctx);
1231 install_var_field (t, by_ref, 2, ctx);
1232 }
1233 else
1234 install_var_field (t, by_ref, 3, ctx);
1235 }
1236 break;
1237 }
1238 if (is_task_ctx (ctx)
1239 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1240 && OMP_CLAUSE_REDUCTION_TASK (c)
1241 && is_parallel_ctx (ctx)))
1242 {
1243 /* Global variables don't need to be copied,
1244 the receiver side will use them directly. */
1245 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1246 {
1247 by_ref = use_pointer_for_field (decl, ctx);
1248 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1249 install_var_field (decl, by_ref, 3, ctx);
1250 }
1251 install_var_local (decl, ctx);
1252 break;
1253 }
1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1255 && OMP_CLAUSE_REDUCTION_TASK (c))
1256 {
1257 install_var_local (decl, ctx);
1258 break;
1259 }
1260 goto do_private;
1261
1262 case OMP_CLAUSE_LASTPRIVATE:
1263 /* Let the corresponding firstprivate clause create
1264 the variable. */
1265 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1266 break;
1267 /* FALLTHRU */
1268
1269 case OMP_CLAUSE_FIRSTPRIVATE:
1270 case OMP_CLAUSE_LINEAR:
1271 decl = OMP_CLAUSE_DECL (c);
1272 do_private:
1273 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1274 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1275 && is_gimple_omp_offloaded (ctx->stmt))
1276 {
1277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1278 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1279 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1280 install_var_field (decl, true, 3, ctx);
1281 else
1282 install_var_field (decl, false, 3, ctx);
1283 }
1284 if (is_variable_sized (decl))
1285 {
1286 if (is_task_ctx (ctx))
1287 install_var_field (decl, false, 1, ctx);
1288 break;
1289 }
1290 else if (is_taskreg_ctx (ctx))
1291 {
1292 bool global
1293 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1294 by_ref = use_pointer_for_field (decl, NULL);
1295
1296 if (is_task_ctx (ctx)
1297 && (global || by_ref || omp_is_reference (decl)))
1298 {
1299 install_var_field (decl, false, 1, ctx);
1300 if (!global)
1301 install_var_field (decl, by_ref, 2, ctx);
1302 }
1303 else if (!global)
1304 install_var_field (decl, by_ref, 3, ctx);
1305 }
1306 install_var_local (decl, ctx);
1307 break;
1308
1309 case OMP_CLAUSE_USE_DEVICE_PTR:
1310 case OMP_CLAUSE_USE_DEVICE_ADDR:
1311 decl = OMP_CLAUSE_DECL (c);
1312
1313 /* Fortran array descriptors. */
1314 if (lang_hooks.decls.omp_array_data (decl, true))
1315 install_var_field (decl, false, 19, ctx);
1316 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1317 && !omp_is_reference (decl)
1318 && !omp_is_allocatable_or_ptr (decl))
1319 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1320 install_var_field (decl, true, 11, ctx);
1321 else
1322 install_var_field (decl, false, 11, ctx);
1323 if (DECL_SIZE (decl)
1324 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1325 {
1326 tree decl2 = DECL_VALUE_EXPR (decl);
1327 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1328 decl2 = TREE_OPERAND (decl2, 0);
1329 gcc_assert (DECL_P (decl2));
1330 install_var_local (decl2, ctx);
1331 }
1332 install_var_local (decl, ctx);
1333 break;
1334
1335 case OMP_CLAUSE_IS_DEVICE_PTR:
1336 decl = OMP_CLAUSE_DECL (c);
1337 goto do_private;
1338
1339 case OMP_CLAUSE__LOOPTEMP_:
1340 case OMP_CLAUSE__REDUCTEMP_:
1341 gcc_assert (is_taskreg_ctx (ctx));
1342 decl = OMP_CLAUSE_DECL (c);
1343 install_var_field (decl, false, 3, ctx);
1344 install_var_local (decl, ctx);
1345 break;
1346
1347 case OMP_CLAUSE_COPYPRIVATE:
1348 case OMP_CLAUSE_COPYIN:
1349 decl = OMP_CLAUSE_DECL (c);
1350 by_ref = use_pointer_for_field (decl, NULL);
1351 install_var_field (decl, by_ref, 3, ctx);
1352 break;
1353
1354 case OMP_CLAUSE_FINAL:
1355 case OMP_CLAUSE_IF:
1356 case OMP_CLAUSE_NUM_THREADS:
1357 case OMP_CLAUSE_NUM_TEAMS:
1358 case OMP_CLAUSE_THREAD_LIMIT:
1359 case OMP_CLAUSE_DEVICE:
1360 case OMP_CLAUSE_SCHEDULE:
1361 case OMP_CLAUSE_DIST_SCHEDULE:
1362 case OMP_CLAUSE_DEPEND:
1363 case OMP_CLAUSE_PRIORITY:
1364 case OMP_CLAUSE_GRAINSIZE:
1365 case OMP_CLAUSE_NUM_TASKS:
1366 case OMP_CLAUSE_NUM_GANGS:
1367 case OMP_CLAUSE_NUM_WORKERS:
1368 case OMP_CLAUSE_VECTOR_LENGTH:
1369 if (ctx->outer)
1370 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1371 break;
1372
1373 case OMP_CLAUSE_TO:
1374 case OMP_CLAUSE_FROM:
1375 case OMP_CLAUSE_MAP:
1376 if (ctx->outer)
1377 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1378 decl = OMP_CLAUSE_DECL (c);
1379 /* Global variables with "omp declare target" attribute
1380 don't need to be copied, the receiver side will use them
1381 directly. However, global variables with "omp declare target link"
1382 attribute need to be copied. Or when ALWAYS modifier is used. */
1383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1384 && DECL_P (decl)
1385 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1386 && (OMP_CLAUSE_MAP_KIND (c)
1387 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1388 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1389 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1390 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1391 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1392 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1393 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1394 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1395 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1396 && varpool_node::get_create (decl)->offloadable
1397 && !lookup_attribute ("omp declare target link",
1398 DECL_ATTRIBUTES (decl)))
1399 break;
1400 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1401 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1402 {
1403 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1404 not offloaded; there is nothing to map for those. */
1405 if (!is_gimple_omp_offloaded (ctx->stmt)
1406 && !POINTER_TYPE_P (TREE_TYPE (decl))
1407 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1408 break;
1409 }
1410 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1411 && DECL_P (decl)
1412 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1413 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1414 && is_omp_target (ctx->stmt))
1415 {
1416 /* If this is an offloaded region, an attach operation should
1417 only exist when the pointer variable is mapped in a prior
1418 clause. */
1419 if (is_gimple_omp_offloaded (ctx->stmt))
1420 gcc_assert
1421 (maybe_lookup_decl (decl, ctx)
1422 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1423 && lookup_attribute ("omp declare target",
1424 DECL_ATTRIBUTES (decl))));
1425
1426 /* By itself, attach/detach is generated as part of pointer
1427 variable mapping and should not create new variables in the
1428 offloaded region, however sender refs for it must be created
1429 for its address to be passed to the runtime. */
1430 tree field
1431 = build_decl (OMP_CLAUSE_LOCATION (c),
1432 FIELD_DECL, NULL_TREE, ptr_type_node);
1433 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1434 insert_field_into_struct (ctx->record_type, field);
1435 /* To not clash with a map of the pointer variable itself,
1436 attach/detach maps have their field looked up by the *clause*
1437 tree expression, not the decl. */
1438 gcc_assert (!splay_tree_lookup (ctx->field_map,
1439 (splay_tree_key) c));
1440 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1441 (splay_tree_value) field);
1442 break;
1443 }
1444 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1445 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1446 || (OMP_CLAUSE_MAP_KIND (c)
1447 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1448 {
1449 if (TREE_CODE (decl) == COMPONENT_REF
1450 || (TREE_CODE (decl) == INDIRECT_REF
1451 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1452 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1453 == REFERENCE_TYPE)))
1454 break;
1455 if (DECL_SIZE (decl)
1456 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1457 {
1458 tree decl2 = DECL_VALUE_EXPR (decl);
1459 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1460 decl2 = TREE_OPERAND (decl2, 0);
1461 gcc_assert (DECL_P (decl2));
1462 install_var_local (decl2, ctx);
1463 }
1464 install_var_local (decl, ctx);
1465 break;
1466 }
1467 if (DECL_P (decl))
1468 {
1469 if (DECL_SIZE (decl)
1470 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1471 {
1472 tree decl2 = DECL_VALUE_EXPR (decl);
1473 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1474 decl2 = TREE_OPERAND (decl2, 0);
1475 gcc_assert (DECL_P (decl2));
1476 install_var_field (decl2, true, 3, ctx);
1477 install_var_local (decl2, ctx);
1478 install_var_local (decl, ctx);
1479 }
1480 else
1481 {
1482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1483 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1484 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1485 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1486 install_var_field (decl, true, 7, ctx);
1487 else
1488 install_var_field (decl, true, 3, ctx);
1489 if (is_gimple_omp_offloaded (ctx->stmt)
1490 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1491 install_var_local (decl, ctx);
1492 }
1493 }
1494 else
1495 {
1496 tree base = get_base_address (decl);
1497 tree nc = OMP_CLAUSE_CHAIN (c);
1498 if (DECL_P (base)
1499 && nc != NULL_TREE
1500 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1501 && OMP_CLAUSE_DECL (nc) == base
1502 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1503 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1504 {
1505 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1506 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1507 }
1508 else
1509 {
1510 if (ctx->outer)
1511 {
1512 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1513 decl = OMP_CLAUSE_DECL (c);
1514 }
1515 gcc_assert (!splay_tree_lookup (ctx->field_map,
1516 (splay_tree_key) decl));
1517 tree field
1518 = build_decl (OMP_CLAUSE_LOCATION (c),
1519 FIELD_DECL, NULL_TREE, ptr_type_node);
1520 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1521 insert_field_into_struct (ctx->record_type, field);
1522 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1523 (splay_tree_value) field);
1524 }
1525 }
1526 break;
1527
1528 case OMP_CLAUSE_ORDER:
1529 ctx->order_concurrent = true;
1530 break;
1531
1532 case OMP_CLAUSE_BIND:
1533 ctx->loop_p = true;
1534 break;
1535
1536 case OMP_CLAUSE_NOWAIT:
1537 case OMP_CLAUSE_ORDERED:
1538 case OMP_CLAUSE_COLLAPSE:
1539 case OMP_CLAUSE_UNTIED:
1540 case OMP_CLAUSE_MERGEABLE:
1541 case OMP_CLAUSE_PROC_BIND:
1542 case OMP_CLAUSE_SAFELEN:
1543 case OMP_CLAUSE_SIMDLEN:
1544 case OMP_CLAUSE_THREADS:
1545 case OMP_CLAUSE_SIMD:
1546 case OMP_CLAUSE_NOGROUP:
1547 case OMP_CLAUSE_DEFAULTMAP:
1548 case OMP_CLAUSE_ASYNC:
1549 case OMP_CLAUSE_WAIT:
1550 case OMP_CLAUSE_GANG:
1551 case OMP_CLAUSE_WORKER:
1552 case OMP_CLAUSE_VECTOR:
1553 case OMP_CLAUSE_INDEPENDENT:
1554 case OMP_CLAUSE_AUTO:
1555 case OMP_CLAUSE_SEQ:
1556 case OMP_CLAUSE_TILE:
1557 case OMP_CLAUSE__SIMT_:
1558 case OMP_CLAUSE_DEFAULT:
1559 case OMP_CLAUSE_NONTEMPORAL:
1560 case OMP_CLAUSE_IF_PRESENT:
1561 case OMP_CLAUSE_FINALIZE:
1562 case OMP_CLAUSE_TASK_REDUCTION:
1563 case OMP_CLAUSE_ALLOCATE:
1564 break;
1565
1566 case OMP_CLAUSE_ALIGNED:
1567 decl = OMP_CLAUSE_DECL (c);
1568 if (is_global_var (decl)
1569 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1570 install_var_local (decl, ctx);
1571 break;
1572
1573 case OMP_CLAUSE__CONDTEMP_:
1574 decl = OMP_CLAUSE_DECL (c);
1575 if (is_parallel_ctx (ctx))
1576 {
1577 install_var_field (decl, false, 3, ctx);
1578 install_var_local (decl, ctx);
1579 }
1580 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1581 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1582 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1583 install_var_local (decl, ctx);
1584 break;
1585
1586 case OMP_CLAUSE__CACHE_:
1587 default:
1588 gcc_unreachable ();
1589 }
1590 }
1591
1592 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1593 {
1594 switch (OMP_CLAUSE_CODE (c))
1595 {
1596 case OMP_CLAUSE_LASTPRIVATE:
1597 /* Let the corresponding firstprivate clause create
1598 the variable. */
1599 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1600 scan_array_reductions = true;
1601 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1602 break;
1603 /* FALLTHRU */
1604
1605 case OMP_CLAUSE_FIRSTPRIVATE:
1606 case OMP_CLAUSE_PRIVATE:
1607 case OMP_CLAUSE_LINEAR:
1608 case OMP_CLAUSE_IS_DEVICE_PTR:
1609 decl = OMP_CLAUSE_DECL (c);
1610 if (is_variable_sized (decl))
1611 {
1612 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1613 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1614 && is_gimple_omp_offloaded (ctx->stmt))
1615 {
1616 tree decl2 = DECL_VALUE_EXPR (decl);
1617 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1618 decl2 = TREE_OPERAND (decl2, 0);
1619 gcc_assert (DECL_P (decl2));
1620 install_var_local (decl2, ctx);
1621 fixup_remapped_decl (decl2, ctx, false);
1622 }
1623 install_var_local (decl, ctx);
1624 }
1625 fixup_remapped_decl (decl, ctx,
1626 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1627 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1628 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1629 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1630 scan_array_reductions = true;
1631 break;
1632
1633 case OMP_CLAUSE_REDUCTION:
1634 case OMP_CLAUSE_IN_REDUCTION:
1635 decl = OMP_CLAUSE_DECL (c);
1636 if (TREE_CODE (decl) != MEM_REF)
1637 {
1638 if (is_variable_sized (decl))
1639 install_var_local (decl, ctx);
1640 fixup_remapped_decl (decl, ctx, false);
1641 }
1642 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1643 scan_array_reductions = true;
1644 break;
1645
1646 case OMP_CLAUSE_TASK_REDUCTION:
1647 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1648 scan_array_reductions = true;
1649 break;
1650
1651 case OMP_CLAUSE_SHARED:
1652 /* Ignore shared directives in teams construct inside of
1653 target construct. */
1654 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1655 && !is_host_teams_ctx (ctx))
1656 break;
1657 decl = OMP_CLAUSE_DECL (c);
1658 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1659 break;
1660 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1661 {
1662 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1663 ctx->outer)))
1664 break;
1665 bool by_ref = use_pointer_for_field (decl, ctx);
1666 install_var_field (decl, by_ref, 11, ctx);
1667 break;
1668 }
1669 fixup_remapped_decl (decl, ctx, false);
1670 break;
1671
1672 case OMP_CLAUSE_MAP:
1673 if (!is_gimple_omp_offloaded (ctx->stmt))
1674 break;
1675 decl = OMP_CLAUSE_DECL (c);
1676 if (DECL_P (decl)
1677 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1678 && (OMP_CLAUSE_MAP_KIND (c)
1679 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1680 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1681 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1682 && varpool_node::get_create (decl)->offloadable)
1683 break;
1684 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1685 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1686 && is_omp_target (ctx->stmt)
1687 && !is_gimple_omp_offloaded (ctx->stmt))
1688 break;
1689 if (DECL_P (decl))
1690 {
1691 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1692 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1693 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1694 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1695 {
1696 tree new_decl = lookup_decl (decl, ctx);
1697 TREE_TYPE (new_decl)
1698 = remap_type (TREE_TYPE (decl), &ctx->cb);
1699 }
1700 else if (DECL_SIZE (decl)
1701 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1702 {
1703 tree decl2 = DECL_VALUE_EXPR (decl);
1704 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1705 decl2 = TREE_OPERAND (decl2, 0);
1706 gcc_assert (DECL_P (decl2));
1707 fixup_remapped_decl (decl2, ctx, false);
1708 fixup_remapped_decl (decl, ctx, true);
1709 }
1710 else
1711 fixup_remapped_decl (decl, ctx, false);
1712 }
1713 break;
1714
1715 case OMP_CLAUSE_COPYPRIVATE:
1716 case OMP_CLAUSE_COPYIN:
1717 case OMP_CLAUSE_DEFAULT:
1718 case OMP_CLAUSE_IF:
1719 case OMP_CLAUSE_NUM_THREADS:
1720 case OMP_CLAUSE_NUM_TEAMS:
1721 case OMP_CLAUSE_THREAD_LIMIT:
1722 case OMP_CLAUSE_DEVICE:
1723 case OMP_CLAUSE_SCHEDULE:
1724 case OMP_CLAUSE_DIST_SCHEDULE:
1725 case OMP_CLAUSE_NOWAIT:
1726 case OMP_CLAUSE_ORDERED:
1727 case OMP_CLAUSE_COLLAPSE:
1728 case OMP_CLAUSE_UNTIED:
1729 case OMP_CLAUSE_FINAL:
1730 case OMP_CLAUSE_MERGEABLE:
1731 case OMP_CLAUSE_PROC_BIND:
1732 case OMP_CLAUSE_SAFELEN:
1733 case OMP_CLAUSE_SIMDLEN:
1734 case OMP_CLAUSE_ALIGNED:
1735 case OMP_CLAUSE_DEPEND:
1736 case OMP_CLAUSE_ALLOCATE:
1737 case OMP_CLAUSE__LOOPTEMP_:
1738 case OMP_CLAUSE__REDUCTEMP_:
1739 case OMP_CLAUSE_TO:
1740 case OMP_CLAUSE_FROM:
1741 case OMP_CLAUSE_PRIORITY:
1742 case OMP_CLAUSE_GRAINSIZE:
1743 case OMP_CLAUSE_NUM_TASKS:
1744 case OMP_CLAUSE_THREADS:
1745 case OMP_CLAUSE_SIMD:
1746 case OMP_CLAUSE_NOGROUP:
1747 case OMP_CLAUSE_DEFAULTMAP:
1748 case OMP_CLAUSE_ORDER:
1749 case OMP_CLAUSE_BIND:
1750 case OMP_CLAUSE_USE_DEVICE_PTR:
1751 case OMP_CLAUSE_USE_DEVICE_ADDR:
1752 case OMP_CLAUSE_NONTEMPORAL:
1753 case OMP_CLAUSE_ASYNC:
1754 case OMP_CLAUSE_WAIT:
1755 case OMP_CLAUSE_NUM_GANGS:
1756 case OMP_CLAUSE_NUM_WORKERS:
1757 case OMP_CLAUSE_VECTOR_LENGTH:
1758 case OMP_CLAUSE_GANG:
1759 case OMP_CLAUSE_WORKER:
1760 case OMP_CLAUSE_VECTOR:
1761 case OMP_CLAUSE_INDEPENDENT:
1762 case OMP_CLAUSE_AUTO:
1763 case OMP_CLAUSE_SEQ:
1764 case OMP_CLAUSE_TILE:
1765 case OMP_CLAUSE__SIMT_:
1766 case OMP_CLAUSE_IF_PRESENT:
1767 case OMP_CLAUSE_FINALIZE:
1768 case OMP_CLAUSE__CONDTEMP_:
1769 break;
1770
1771 case OMP_CLAUSE__CACHE_:
1772 default:
1773 gcc_unreachable ();
1774 }
1775 }
1776
1777 gcc_checking_assert (!scan_array_reductions
1778 || !is_gimple_omp_oacc (ctx->stmt));
1779 if (scan_array_reductions)
1780 {
1781 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1782 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1783 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1784 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1785 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1786 {
1787 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1788 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1789 }
1790 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1791 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1792 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1793 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1794 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1795 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1796 }
1797 }
1798
1799 /* Create a new name for omp child function. Returns an identifier. */
1800
1801 static tree
1802 create_omp_child_function_name (bool task_copy)
1803 {
1804 return clone_function_name_numbered (current_function_decl,
1805 task_copy ? "_omp_cpyfn" : "_omp_fn");
1806 }
1807
1808 /* Return true if CTX may belong to offloaded code: either if current function
1809 is offloaded, or any enclosing context corresponds to a target region. */
1810
1811 static bool
1812 omp_maybe_offloaded_ctx (omp_context *ctx)
1813 {
1814 if (cgraph_node::get (current_function_decl)->offloadable)
1815 return true;
1816 for (; ctx; ctx = ctx->outer)
1817 if (is_gimple_omp_offloaded (ctx->stmt))
1818 return true;
1819 return false;
1820 }
1821
1822 /* Build a decl for the omp child function. It'll not contain a body
1823 yet, just the bare decl. */
1824
1825 static void
1826 create_omp_child_function (omp_context *ctx, bool task_copy)
1827 {
1828 tree decl, type, name, t;
1829
1830 name = create_omp_child_function_name (task_copy);
1831 if (task_copy)
1832 type = build_function_type_list (void_type_node, ptr_type_node,
1833 ptr_type_node, NULL_TREE);
1834 else
1835 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1836
1837 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1838
1839 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1840 || !task_copy);
1841 if (!task_copy)
1842 ctx->cb.dst_fn = decl;
1843 else
1844 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1845
1846 TREE_STATIC (decl) = 1;
1847 TREE_USED (decl) = 1;
1848 DECL_ARTIFICIAL (decl) = 1;
1849 DECL_IGNORED_P (decl) = 0;
1850 TREE_PUBLIC (decl) = 0;
1851 DECL_UNINLINABLE (decl) = 1;
1852 DECL_EXTERNAL (decl) = 0;
1853 DECL_CONTEXT (decl) = NULL_TREE;
1854 DECL_INITIAL (decl) = make_node (BLOCK);
1855 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1856 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1857 /* Remove omp declare simd attribute from the new attributes. */
1858 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1859 {
1860 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1861 a = a2;
1862 a = TREE_CHAIN (a);
1863 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1864 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1865 *p = TREE_CHAIN (*p);
1866 else
1867 {
1868 tree chain = TREE_CHAIN (*p);
1869 *p = copy_node (*p);
1870 p = &TREE_CHAIN (*p);
1871 *p = chain;
1872 }
1873 }
1874 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1875 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1876 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1877 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1878 DECL_FUNCTION_VERSIONED (decl)
1879 = DECL_FUNCTION_VERSIONED (current_function_decl);
1880
1881 if (omp_maybe_offloaded_ctx (ctx))
1882 {
1883 cgraph_node::get_create (decl)->offloadable = 1;
1884 if (ENABLE_OFFLOADING)
1885 g->have_offload = true;
1886 }
1887
1888 if (cgraph_node::get_create (decl)->offloadable
1889 && !lookup_attribute ("omp declare target",
1890 DECL_ATTRIBUTES (current_function_decl)))
1891 {
1892 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1893 ? "omp target entrypoint"
1894 : "omp declare target");
1895 DECL_ATTRIBUTES (decl)
1896 = tree_cons (get_identifier (target_attr),
1897 NULL_TREE, DECL_ATTRIBUTES (decl));
1898 }
1899
1900 t = build_decl (DECL_SOURCE_LOCATION (decl),
1901 RESULT_DECL, NULL_TREE, void_type_node);
1902 DECL_ARTIFICIAL (t) = 1;
1903 DECL_IGNORED_P (t) = 1;
1904 DECL_CONTEXT (t) = decl;
1905 DECL_RESULT (decl) = t;
1906
1907 tree data_name = get_identifier (".omp_data_i");
1908 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1909 ptr_type_node);
1910 DECL_ARTIFICIAL (t) = 1;
1911 DECL_NAMELESS (t) = 1;
1912 DECL_ARG_TYPE (t) = ptr_type_node;
1913 DECL_CONTEXT (t) = current_function_decl;
1914 TREE_USED (t) = 1;
1915 TREE_READONLY (t) = 1;
1916 DECL_ARGUMENTS (decl) = t;
1917 if (!task_copy)
1918 ctx->receiver_decl = t;
1919 else
1920 {
1921 t = build_decl (DECL_SOURCE_LOCATION (decl),
1922 PARM_DECL, get_identifier (".omp_data_o"),
1923 ptr_type_node);
1924 DECL_ARTIFICIAL (t) = 1;
1925 DECL_NAMELESS (t) = 1;
1926 DECL_ARG_TYPE (t) = ptr_type_node;
1927 DECL_CONTEXT (t) = current_function_decl;
1928 TREE_USED (t) = 1;
1929 TREE_ADDRESSABLE (t) = 1;
1930 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1931 DECL_ARGUMENTS (decl) = t;
1932 }
1933
1934 /* Allocate memory for the function structure. The call to
1935 allocate_struct_function clobbers CFUN, so we need to restore
1936 it afterward. */
1937 push_struct_function (decl);
1938 cfun->function_end_locus = gimple_location (ctx->stmt);
1939 init_tree_ssa (cfun);
1940 pop_cfun ();
1941 }
1942
1943 /* Callback for walk_gimple_seq. Check if combined parallel
1944 contains gimple_omp_for_combined_into_p OMP_FOR. */
1945
1946 tree
1947 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1948 bool *handled_ops_p,
1949 struct walk_stmt_info *wi)
1950 {
1951 gimple *stmt = gsi_stmt (*gsi_p);
1952
1953 *handled_ops_p = true;
1954 switch (gimple_code (stmt))
1955 {
1956 WALK_SUBSTMTS;
1957
1958 case GIMPLE_OMP_FOR:
1959 if (gimple_omp_for_combined_into_p (stmt)
1960 && gimple_omp_for_kind (stmt)
1961 == *(const enum gf_mask *) (wi->info))
1962 {
1963 wi->info = stmt;
1964 return integer_zero_node;
1965 }
1966 break;
1967 default:
1968 break;
1969 }
1970 return NULL;
1971 }
1972
1973 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1974
1975 static void
1976 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1977 omp_context *outer_ctx)
1978 {
1979 struct walk_stmt_info wi;
1980
1981 memset (&wi, 0, sizeof (wi));
1982 wi.val_only = true;
1983 wi.info = (void *) &msk;
1984 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1985 if (wi.info != (void *) &msk)
1986 {
1987 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1988 struct omp_for_data fd;
1989 omp_extract_for_data (for_stmt, &fd, NULL);
1990 /* We need two temporaries with fd.loop.v type (istart/iend)
1991 and then (fd.collapse - 1) temporaries with the same
1992 type for count2 ... countN-1 vars if not constant. */
1993 size_t count = 2, i;
1994 tree type = fd.iter_type;
1995 if (fd.collapse > 1
1996 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1997 {
1998 count += fd.collapse - 1;
1999 /* If there are lastprivate clauses on the inner
2000 GIMPLE_OMP_FOR, add one more temporaries for the total number
2001 of iterations (product of count1 ... countN-1). */
2002 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2003 OMP_CLAUSE_LASTPRIVATE)
2004 || (msk == GF_OMP_FOR_KIND_FOR
2005 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2006 OMP_CLAUSE_LASTPRIVATE)))
2007 {
2008 tree temp = create_tmp_var (type);
2009 tree c = build_omp_clause (UNKNOWN_LOCATION,
2010 OMP_CLAUSE__LOOPTEMP_);
2011 insert_decl_map (&outer_ctx->cb, temp, temp);
2012 OMP_CLAUSE_DECL (c) = temp;
2013 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2014 gimple_omp_taskreg_set_clauses (stmt, c);
2015 }
2016 if (fd.non_rect
2017 && fd.last_nonrect == fd.first_nonrect + 1)
2018 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2019 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2020 {
2021 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2022 tree type2 = TREE_TYPE (v);
2023 count++;
2024 for (i = 0; i < 3; i++)
2025 {
2026 tree temp = create_tmp_var (type2);
2027 tree c = build_omp_clause (UNKNOWN_LOCATION,
2028 OMP_CLAUSE__LOOPTEMP_);
2029 insert_decl_map (&outer_ctx->cb, temp, temp);
2030 OMP_CLAUSE_DECL (c) = temp;
2031 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2032 gimple_omp_taskreg_set_clauses (stmt, c);
2033 }
2034 }
2035 }
2036 for (i = 0; i < count; i++)
2037 {
2038 tree temp = create_tmp_var (type);
2039 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2040 insert_decl_map (&outer_ctx->cb, temp, temp);
2041 OMP_CLAUSE_DECL (c) = temp;
2042 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2043 gimple_omp_taskreg_set_clauses (stmt, c);
2044 }
2045 }
2046 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2047 && omp_find_clause (gimple_omp_task_clauses (stmt),
2048 OMP_CLAUSE_REDUCTION))
2049 {
2050 tree type = build_pointer_type (pointer_sized_int_node);
2051 tree temp = create_tmp_var (type);
2052 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2053 insert_decl_map (&outer_ctx->cb, temp, temp);
2054 OMP_CLAUSE_DECL (c) = temp;
2055 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2056 gimple_omp_task_set_clauses (stmt, c);
2057 }
2058 }
2059
2060 /* Scan an OpenMP parallel directive. */
2061
2062 static void
2063 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2064 {
2065 omp_context *ctx;
2066 tree name;
2067 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2068
2069 /* Ignore parallel directives with empty bodies, unless there
2070 are copyin clauses. */
2071 if (optimize > 0
2072 && empty_body_p (gimple_omp_body (stmt))
2073 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2074 OMP_CLAUSE_COPYIN) == NULL)
2075 {
2076 gsi_replace (gsi, gimple_build_nop (), false);
2077 return;
2078 }
2079
2080 if (gimple_omp_parallel_combined_p (stmt))
2081 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2082 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2083 OMP_CLAUSE_REDUCTION);
2084 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2085 if (OMP_CLAUSE_REDUCTION_TASK (c))
2086 {
2087 tree type = build_pointer_type (pointer_sized_int_node);
2088 tree temp = create_tmp_var (type);
2089 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2090 if (outer_ctx)
2091 insert_decl_map (&outer_ctx->cb, temp, temp);
2092 OMP_CLAUSE_DECL (c) = temp;
2093 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2094 gimple_omp_parallel_set_clauses (stmt, c);
2095 break;
2096 }
2097 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2098 break;
2099
2100 ctx = new_omp_context (stmt, outer_ctx);
2101 taskreg_contexts.safe_push (ctx);
2102 if (taskreg_nesting_level > 1)
2103 ctx->is_nested = true;
2104 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2105 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2106 name = create_tmp_var_name (".omp_data_s");
2107 name = build_decl (gimple_location (stmt),
2108 TYPE_DECL, name, ctx->record_type);
2109 DECL_ARTIFICIAL (name) = 1;
2110 DECL_NAMELESS (name) = 1;
2111 TYPE_NAME (ctx->record_type) = name;
2112 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2113 create_omp_child_function (ctx, false);
2114 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2115
2116 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2117 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2118
2119 if (TYPE_FIELDS (ctx->record_type) == NULL)
2120 ctx->record_type = ctx->receiver_decl = NULL;
2121 }
2122
2123 /* Scan an OpenMP task directive. */
2124
2125 static void
2126 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2127 {
2128 omp_context *ctx;
2129 tree name, t;
2130 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2131
2132 /* Ignore task directives with empty bodies, unless they have depend
2133 clause. */
2134 if (optimize > 0
2135 && gimple_omp_body (stmt)
2136 && empty_body_p (gimple_omp_body (stmt))
2137 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2138 {
2139 gsi_replace (gsi, gimple_build_nop (), false);
2140 return;
2141 }
2142
2143 if (gimple_omp_task_taskloop_p (stmt))
2144 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2145
2146 ctx = new_omp_context (stmt, outer_ctx);
2147
2148 if (gimple_omp_task_taskwait_p (stmt))
2149 {
2150 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2151 return;
2152 }
2153
2154 taskreg_contexts.safe_push (ctx);
2155 if (taskreg_nesting_level > 1)
2156 ctx->is_nested = true;
2157 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2158 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2159 name = create_tmp_var_name (".omp_data_s");
2160 name = build_decl (gimple_location (stmt),
2161 TYPE_DECL, name, ctx->record_type);
2162 DECL_ARTIFICIAL (name) = 1;
2163 DECL_NAMELESS (name) = 1;
2164 TYPE_NAME (ctx->record_type) = name;
2165 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2166 create_omp_child_function (ctx, false);
2167 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2168
2169 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2170
2171 if (ctx->srecord_type)
2172 {
2173 name = create_tmp_var_name (".omp_data_a");
2174 name = build_decl (gimple_location (stmt),
2175 TYPE_DECL, name, ctx->srecord_type);
2176 DECL_ARTIFICIAL (name) = 1;
2177 DECL_NAMELESS (name) = 1;
2178 TYPE_NAME (ctx->srecord_type) = name;
2179 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2180 create_omp_child_function (ctx, true);
2181 }
2182
2183 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2184
2185 if (TYPE_FIELDS (ctx->record_type) == NULL)
2186 {
2187 ctx->record_type = ctx->receiver_decl = NULL;
2188 t = build_int_cst (long_integer_type_node, 0);
2189 gimple_omp_task_set_arg_size (stmt, t);
2190 t = build_int_cst (long_integer_type_node, 1);
2191 gimple_omp_task_set_arg_align (stmt, t);
2192 }
2193 }
2194
2195 /* Helper function for finish_taskreg_scan, called through walk_tree.
2196 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2197 tree, replace it in the expression. */
2198
2199 static tree
2200 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2201 {
2202 if (VAR_P (*tp))
2203 {
2204 omp_context *ctx = (omp_context *) data;
2205 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2206 if (t != *tp)
2207 {
2208 if (DECL_HAS_VALUE_EXPR_P (t))
2209 t = unshare_expr (DECL_VALUE_EXPR (t));
2210 *tp = t;
2211 }
2212 *walk_subtrees = 0;
2213 }
2214 else if (IS_TYPE_OR_DECL_P (*tp))
2215 *walk_subtrees = 0;
2216 return NULL_TREE;
2217 }
2218
2219 /* If any decls have been made addressable during scan_omp,
2220 adjust their fields if needed, and layout record types
2221 of parallel/task constructs. */
2222
2223 static void
2224 finish_taskreg_scan (omp_context *ctx)
2225 {
2226 if (ctx->record_type == NULL_TREE)
2227 return;
2228
2229 /* If any task_shared_vars were needed, verify all
2230 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2231 statements if use_pointer_for_field hasn't changed
2232 because of that. If it did, update field types now. */
2233 if (task_shared_vars)
2234 {
2235 tree c;
2236
2237 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2238 c; c = OMP_CLAUSE_CHAIN (c))
2239 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2240 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2241 {
2242 tree decl = OMP_CLAUSE_DECL (c);
2243
2244 /* Global variables don't need to be copied,
2245 the receiver side will use them directly. */
2246 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2247 continue;
2248 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2249 || !use_pointer_for_field (decl, ctx))
2250 continue;
2251 tree field = lookup_field (decl, ctx);
2252 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2253 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2254 continue;
2255 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2256 TREE_THIS_VOLATILE (field) = 0;
2257 DECL_USER_ALIGN (field) = 0;
2258 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2259 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2260 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2261 if (ctx->srecord_type)
2262 {
2263 tree sfield = lookup_sfield (decl, ctx);
2264 TREE_TYPE (sfield) = TREE_TYPE (field);
2265 TREE_THIS_VOLATILE (sfield) = 0;
2266 DECL_USER_ALIGN (sfield) = 0;
2267 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2268 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2269 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2270 }
2271 }
2272 }
2273
2274 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2275 {
2276 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2277 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2278 if (c)
2279 {
2280 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2281 expects to find it at the start of data. */
2282 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2283 tree *p = &TYPE_FIELDS (ctx->record_type);
2284 while (*p)
2285 if (*p == f)
2286 {
2287 *p = DECL_CHAIN (*p);
2288 break;
2289 }
2290 else
2291 p = &DECL_CHAIN (*p);
2292 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2293 TYPE_FIELDS (ctx->record_type) = f;
2294 }
2295 layout_type (ctx->record_type);
2296 fixup_child_record_type (ctx);
2297 }
2298 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2299 {
2300 layout_type (ctx->record_type);
2301 fixup_child_record_type (ctx);
2302 }
2303 else
2304 {
2305 location_t loc = gimple_location (ctx->stmt);
2306 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2307 /* Move VLA fields to the end. */
2308 p = &TYPE_FIELDS (ctx->record_type);
2309 while (*p)
2310 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2311 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2312 {
2313 *q = *p;
2314 *p = TREE_CHAIN (*p);
2315 TREE_CHAIN (*q) = NULL_TREE;
2316 q = &TREE_CHAIN (*q);
2317 }
2318 else
2319 p = &DECL_CHAIN (*p);
2320 *p = vla_fields;
2321 if (gimple_omp_task_taskloop_p (ctx->stmt))
2322 {
2323 /* Move fields corresponding to first and second _looptemp_
2324 clause first. There are filled by GOMP_taskloop
2325 and thus need to be in specific positions. */
2326 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2327 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2328 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2329 OMP_CLAUSE__LOOPTEMP_);
2330 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2331 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2332 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2333 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2334 p = &TYPE_FIELDS (ctx->record_type);
2335 while (*p)
2336 if (*p == f1 || *p == f2 || *p == f3)
2337 *p = DECL_CHAIN (*p);
2338 else
2339 p = &DECL_CHAIN (*p);
2340 DECL_CHAIN (f1) = f2;
2341 if (c3)
2342 {
2343 DECL_CHAIN (f2) = f3;
2344 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2345 }
2346 else
2347 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2348 TYPE_FIELDS (ctx->record_type) = f1;
2349 if (ctx->srecord_type)
2350 {
2351 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2352 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2353 if (c3)
2354 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2355 p = &TYPE_FIELDS (ctx->srecord_type);
2356 while (*p)
2357 if (*p == f1 || *p == f2 || *p == f3)
2358 *p = DECL_CHAIN (*p);
2359 else
2360 p = &DECL_CHAIN (*p);
2361 DECL_CHAIN (f1) = f2;
2362 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2363 if (c3)
2364 {
2365 DECL_CHAIN (f2) = f3;
2366 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2367 }
2368 else
2369 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2370 TYPE_FIELDS (ctx->srecord_type) = f1;
2371 }
2372 }
2373 layout_type (ctx->record_type);
2374 fixup_child_record_type (ctx);
2375 if (ctx->srecord_type)
2376 layout_type (ctx->srecord_type);
2377 tree t = fold_convert_loc (loc, long_integer_type_node,
2378 TYPE_SIZE_UNIT (ctx->record_type));
2379 if (TREE_CODE (t) != INTEGER_CST)
2380 {
2381 t = unshare_expr (t);
2382 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2383 }
2384 gimple_omp_task_set_arg_size (ctx->stmt, t);
2385 t = build_int_cst (long_integer_type_node,
2386 TYPE_ALIGN_UNIT (ctx->record_type));
2387 gimple_omp_task_set_arg_align (ctx->stmt, t);
2388 }
2389 }
2390
2391 /* Find the enclosing offload context. */
2392
2393 static omp_context *
2394 enclosing_target_ctx (omp_context *ctx)
2395 {
2396 for (; ctx; ctx = ctx->outer)
2397 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2398 break;
2399
2400 return ctx;
2401 }
2402
2403 /* Return true if ctx is part of an oacc kernels region. */
2404
2405 static bool
2406 ctx_in_oacc_kernels_region (omp_context *ctx)
2407 {
2408 for (;ctx != NULL; ctx = ctx->outer)
2409 {
2410 gimple *stmt = ctx->stmt;
2411 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2412 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2413 return true;
2414 }
2415
2416 return false;
2417 }
2418
2419 /* Check the parallelism clauses inside a kernels regions.
2420 Until kernels handling moves to use the same loop indirection
2421 scheme as parallel, we need to do this checking early. */
2422
2423 static unsigned
2424 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2425 {
2426 bool checking = true;
2427 unsigned outer_mask = 0;
2428 unsigned this_mask = 0;
2429 bool has_seq = false, has_auto = false;
2430
2431 if (ctx->outer)
2432 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2433 if (!stmt)
2434 {
2435 checking = false;
2436 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2437 return outer_mask;
2438 stmt = as_a <gomp_for *> (ctx->stmt);
2439 }
2440
2441 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2442 {
2443 switch (OMP_CLAUSE_CODE (c))
2444 {
2445 case OMP_CLAUSE_GANG:
2446 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2447 break;
2448 case OMP_CLAUSE_WORKER:
2449 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2450 break;
2451 case OMP_CLAUSE_VECTOR:
2452 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2453 break;
2454 case OMP_CLAUSE_SEQ:
2455 has_seq = true;
2456 break;
2457 case OMP_CLAUSE_AUTO:
2458 has_auto = true;
2459 break;
2460 default:
2461 break;
2462 }
2463 }
2464
2465 if (checking)
2466 {
2467 if (has_seq && (this_mask || has_auto))
2468 error_at (gimple_location (stmt), "%<seq%> overrides other"
2469 " OpenACC loop specifiers");
2470 else if (has_auto && this_mask)
2471 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2472 " OpenACC loop specifiers");
2473
2474 if (this_mask & outer_mask)
2475 error_at (gimple_location (stmt), "inner loop uses same"
2476 " OpenACC parallelism as containing loop");
2477 }
2478
2479 return outer_mask | this_mask;
2480 }
2481
2482 /* Scan a GIMPLE_OMP_FOR. */
2483
2484 static omp_context *
2485 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2486 {
2487 omp_context *ctx;
2488 size_t i;
2489 tree clauses = gimple_omp_for_clauses (stmt);
2490
2491 ctx = new_omp_context (stmt, outer_ctx);
2492
2493 if (is_gimple_omp_oacc (stmt))
2494 {
2495 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2496
2497 if (!tgt || is_oacc_parallel_or_serial (tgt))
2498 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2499 {
2500 tree c_op0;
2501 switch (OMP_CLAUSE_CODE (c))
2502 {
2503 case OMP_CLAUSE_GANG:
2504 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2505 break;
2506
2507 case OMP_CLAUSE_WORKER:
2508 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2509 break;
2510
2511 case OMP_CLAUSE_VECTOR:
2512 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2513 break;
2514
2515 default:
2516 continue;
2517 }
2518
2519 if (c_op0)
2520 {
2521 error_at (OMP_CLAUSE_LOCATION (c),
2522 "argument not permitted on %qs clause",
2523 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2524 if (tgt)
2525 inform (gimple_location (tgt->stmt),
2526 "enclosing parent compute construct");
2527 else if (oacc_get_fn_attrib (current_function_decl))
2528 inform (DECL_SOURCE_LOCATION (current_function_decl),
2529 "enclosing routine");
2530 else
2531 gcc_unreachable ();
2532 }
2533 }
2534
2535 if (tgt && is_oacc_kernels (tgt))
2536 check_oacc_kernel_gwv (stmt, ctx);
2537
2538 /* Collect all variables named in reductions on this loop. Ensure
2539 that, if this loop has a reduction on some variable v, and there is
2540 a reduction on v somewhere in an outer context, then there is a
2541 reduction on v on all intervening loops as well. */
2542 tree local_reduction_clauses = NULL;
2543 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2544 {
2545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2546 local_reduction_clauses
2547 = tree_cons (NULL, c, local_reduction_clauses);
2548 }
2549 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2550 ctx->outer_reduction_clauses
2551 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2552 ctx->outer->outer_reduction_clauses);
2553 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2554 tree local_iter = local_reduction_clauses;
2555 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2556 {
2557 tree local_clause = TREE_VALUE (local_iter);
2558 tree local_var = OMP_CLAUSE_DECL (local_clause);
2559 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2560 bool have_outer_reduction = false;
2561 tree ctx_iter = outer_reduction_clauses;
2562 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2563 {
2564 tree outer_clause = TREE_VALUE (ctx_iter);
2565 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2566 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2567 if (outer_var == local_var && outer_op != local_op)
2568 {
2569 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2570 "conflicting reduction operations for %qE",
2571 local_var);
2572 inform (OMP_CLAUSE_LOCATION (outer_clause),
2573 "location of the previous reduction for %qE",
2574 outer_var);
2575 }
2576 if (outer_var == local_var)
2577 {
2578 have_outer_reduction = true;
2579 break;
2580 }
2581 }
2582 if (have_outer_reduction)
2583 {
2584 /* There is a reduction on outer_var both on this loop and on
2585 some enclosing loop. Walk up the context tree until such a
2586 loop with a reduction on outer_var is found, and complain
2587 about all intervening loops that do not have such a
2588 reduction. */
2589 struct omp_context *curr_loop = ctx->outer;
2590 bool found = false;
2591 while (curr_loop != NULL)
2592 {
2593 tree curr_iter = curr_loop->local_reduction_clauses;
2594 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2595 {
2596 tree curr_clause = TREE_VALUE (curr_iter);
2597 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2598 if (curr_var == local_var)
2599 {
2600 found = true;
2601 break;
2602 }
2603 }
2604 if (!found)
2605 warning_at (gimple_location (curr_loop->stmt), 0,
2606 "nested loop in reduction needs "
2607 "reduction clause for %qE",
2608 local_var);
2609 else
2610 break;
2611 curr_loop = curr_loop->outer;
2612 }
2613 }
2614 }
2615 ctx->local_reduction_clauses = local_reduction_clauses;
2616 ctx->outer_reduction_clauses
2617 = chainon (unshare_expr (ctx->local_reduction_clauses),
2618 ctx->outer_reduction_clauses);
2619
2620 if (tgt && is_oacc_kernels (tgt))
2621 {
2622 /* Strip out reductions, as they are not handled yet. */
2623 tree *prev_ptr = &clauses;
2624
2625 while (tree probe = *prev_ptr)
2626 {
2627 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2628
2629 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2630 *prev_ptr = *next_ptr;
2631 else
2632 prev_ptr = next_ptr;
2633 }
2634
2635 gimple_omp_for_set_clauses (stmt, clauses);
2636 }
2637 }
2638
2639 scan_sharing_clauses (clauses, ctx);
2640
2641 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2642 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2643 {
2644 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2645 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2646 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2647 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2648 }
2649 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2650 return ctx;
2651 }
2652
2653 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2654
2655 static void
2656 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2657 omp_context *outer_ctx)
2658 {
2659 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2660 gsi_replace (gsi, bind, false);
2661 gimple_seq seq = NULL;
2662 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2663 tree cond = create_tmp_var_raw (integer_type_node);
2664 DECL_CONTEXT (cond) = current_function_decl;
2665 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2666 gimple_bind_set_vars (bind, cond);
2667 gimple_call_set_lhs (g, cond);
2668 gimple_seq_add_stmt (&seq, g);
2669 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2670 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2671 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2672 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2673 gimple_seq_add_stmt (&seq, g);
2674 g = gimple_build_label (lab1);
2675 gimple_seq_add_stmt (&seq, g);
2676 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2677 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2678 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2679 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2680 gimple_omp_for_set_clauses (new_stmt, clause);
2681 gimple_seq_add_stmt (&seq, new_stmt);
2682 g = gimple_build_goto (lab3);
2683 gimple_seq_add_stmt (&seq, g);
2684 g = gimple_build_label (lab2);
2685 gimple_seq_add_stmt (&seq, g);
2686 gimple_seq_add_stmt (&seq, stmt);
2687 g = gimple_build_label (lab3);
2688 gimple_seq_add_stmt (&seq, g);
2689 gimple_bind_set_body (bind, seq);
2690 update_stmt (bind);
2691 scan_omp_for (new_stmt, outer_ctx);
2692 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2693 }
2694
2695 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2696 struct walk_stmt_info *);
2697 static omp_context *maybe_lookup_ctx (gimple *);
2698
2699 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2700 for scan phase loop. */
2701
2702 static void
2703 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2704 omp_context *outer_ctx)
2705 {
2706 /* The only change between inclusive and exclusive scan will be
2707 within the first simd loop, so just use inclusive in the
2708 worksharing loop. */
2709 outer_ctx->scan_inclusive = true;
2710 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2711 OMP_CLAUSE_DECL (c) = integer_zero_node;
2712
2713 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2714 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2715 gsi_replace (gsi, input_stmt, false);
2716 gimple_seq input_body = NULL;
2717 gimple_seq_add_stmt (&input_body, stmt);
2718 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2719
2720 gimple_stmt_iterator input1_gsi = gsi_none ();
2721 struct walk_stmt_info wi;
2722 memset (&wi, 0, sizeof (wi));
2723 wi.val_only = true;
2724 wi.info = (void *) &input1_gsi;
2725 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2726 gcc_assert (!gsi_end_p (input1_gsi));
2727
2728 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2729 gsi_next (&input1_gsi);
2730 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2731 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2732 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2733 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2734 std::swap (input_stmt1, scan_stmt1);
2735
2736 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2737 gimple_omp_set_body (input_stmt1, NULL);
2738
2739 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2740 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2741
2742 gimple_omp_set_body (input_stmt1, input_body1);
2743 gimple_omp_set_body (scan_stmt1, NULL);
2744
2745 gimple_stmt_iterator input2_gsi = gsi_none ();
2746 memset (&wi, 0, sizeof (wi));
2747 wi.val_only = true;
2748 wi.info = (void *) &input2_gsi;
2749 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2750 NULL, &wi);
2751 gcc_assert (!gsi_end_p (input2_gsi));
2752
2753 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2754 gsi_next (&input2_gsi);
2755 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2756 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2757 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2758 std::swap (input_stmt2, scan_stmt2);
2759
2760 gimple_omp_set_body (input_stmt2, NULL);
2761
2762 gimple_omp_set_body (input_stmt, input_body);
2763 gimple_omp_set_body (scan_stmt, scan_body);
2764
2765 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2766 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2767
2768 ctx = new_omp_context (scan_stmt, outer_ctx);
2769 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2770
2771 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2772 }
2773
2774 /* Scan an OpenMP sections directive. */
2775
2776 static void
2777 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2778 {
2779 omp_context *ctx;
2780
2781 ctx = new_omp_context (stmt, outer_ctx);
2782 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2783 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2784 }
2785
2786 /* Scan an OpenMP single directive. */
2787
2788 static void
2789 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2790 {
2791 omp_context *ctx;
2792 tree name;
2793
2794 ctx = new_omp_context (stmt, outer_ctx);
2795 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2796 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2797 name = create_tmp_var_name (".omp_copy_s");
2798 name = build_decl (gimple_location (stmt),
2799 TYPE_DECL, name, ctx->record_type);
2800 TYPE_NAME (ctx->record_type) = name;
2801
2802 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2803 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2804
2805 if (TYPE_FIELDS (ctx->record_type) == NULL)
2806 ctx->record_type = NULL;
2807 else
2808 layout_type (ctx->record_type);
2809 }
2810
2811 /* Scan a GIMPLE_OMP_TARGET. */
2812
2813 static void
2814 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2815 {
2816 omp_context *ctx;
2817 tree name;
2818 bool offloaded = is_gimple_omp_offloaded (stmt);
2819 tree clauses = gimple_omp_target_clauses (stmt);
2820
2821 ctx = new_omp_context (stmt, outer_ctx);
2822 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2823 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2824 name = create_tmp_var_name (".omp_data_t");
2825 name = build_decl (gimple_location (stmt),
2826 TYPE_DECL, name, ctx->record_type);
2827 DECL_ARTIFICIAL (name) = 1;
2828 DECL_NAMELESS (name) = 1;
2829 TYPE_NAME (ctx->record_type) = name;
2830 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2831
2832 if (offloaded)
2833 {
2834 create_omp_child_function (ctx, false);
2835 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2836 }
2837
2838 scan_sharing_clauses (clauses, ctx);
2839 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2840
2841 if (TYPE_FIELDS (ctx->record_type) == NULL)
2842 ctx->record_type = ctx->receiver_decl = NULL;
2843 else
2844 {
2845 TYPE_FIELDS (ctx->record_type)
2846 = nreverse (TYPE_FIELDS (ctx->record_type));
2847 if (flag_checking)
2848 {
2849 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2850 for (tree field = TYPE_FIELDS (ctx->record_type);
2851 field;
2852 field = DECL_CHAIN (field))
2853 gcc_assert (DECL_ALIGN (field) == align);
2854 }
2855 layout_type (ctx->record_type);
2856 if (offloaded)
2857 fixup_child_record_type (ctx);
2858 }
2859 }
2860
2861 /* Scan an OpenMP teams directive. */
2862
2863 static void
2864 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2865 {
2866 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2867
2868 if (!gimple_omp_teams_host (stmt))
2869 {
2870 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2871 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2872 return;
2873 }
2874 taskreg_contexts.safe_push (ctx);
2875 gcc_assert (taskreg_nesting_level == 1);
2876 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2877 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2878 tree name = create_tmp_var_name (".omp_data_s");
2879 name = build_decl (gimple_location (stmt),
2880 TYPE_DECL, name, ctx->record_type);
2881 DECL_ARTIFICIAL (name) = 1;
2882 DECL_NAMELESS (name) = 1;
2883 TYPE_NAME (ctx->record_type) = name;
2884 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2885 create_omp_child_function (ctx, false);
2886 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2887
2888 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2889 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2890
2891 if (TYPE_FIELDS (ctx->record_type) == NULL)
2892 ctx->record_type = ctx->receiver_decl = NULL;
2893 }
2894
2895 /* Check nesting restrictions. */
2896 static bool
2897 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2898 {
2899 tree c;
2900
2901 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2902 inside an OpenACC CTX. */
2903 if (!(is_gimple_omp (stmt)
2904 && is_gimple_omp_oacc (stmt))
2905 /* Except for atomic codes that we share with OpenMP. */
2906 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2907 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2908 {
2909 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2910 {
2911 error_at (gimple_location (stmt),
2912 "non-OpenACC construct inside of OpenACC routine");
2913 return false;
2914 }
2915 else
2916 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2917 if (is_gimple_omp (octx->stmt)
2918 && is_gimple_omp_oacc (octx->stmt))
2919 {
2920 error_at (gimple_location (stmt),
2921 "non-OpenACC construct inside of OpenACC region");
2922 return false;
2923 }
2924 }
2925
2926 if (ctx != NULL)
2927 {
2928 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2929 && ctx->outer
2930 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2931 ctx = ctx->outer;
2932 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2933 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2934 && !ctx->loop_p)
2935 {
2936 c = NULL_TREE;
2937 if (ctx->order_concurrent
2938 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2939 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2940 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2941 {
2942 error_at (gimple_location (stmt),
2943 "OpenMP constructs other than %<parallel%>, %<loop%>"
2944 " or %<simd%> may not be nested inside a region with"
2945 " the %<order(concurrent)%> clause");
2946 return false;
2947 }
2948 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2949 {
2950 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2951 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2952 {
2953 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2954 && (ctx->outer == NULL
2955 || !gimple_omp_for_combined_into_p (ctx->stmt)
2956 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2957 || (gimple_omp_for_kind (ctx->outer->stmt)
2958 != GF_OMP_FOR_KIND_FOR)
2959 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2960 {
2961 error_at (gimple_location (stmt),
2962 "%<ordered simd threads%> must be closely "
2963 "nested inside of %<%s simd%> region",
2964 lang_GNU_Fortran () ? "do" : "for");
2965 return false;
2966 }
2967 return true;
2968 }
2969 }
2970 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2971 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2972 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2973 return true;
2974 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2975 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2976 return true;
2977 error_at (gimple_location (stmt),
2978 "OpenMP constructs other than "
2979 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2980 "not be nested inside %<simd%> region");
2981 return false;
2982 }
2983 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2984 {
2985 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2986 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2987 && omp_find_clause (gimple_omp_for_clauses (stmt),
2988 OMP_CLAUSE_BIND) == NULL_TREE))
2989 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2990 {
2991 error_at (gimple_location (stmt),
2992 "only %<distribute%>, %<parallel%> or %<loop%> "
2993 "regions are allowed to be strictly nested inside "
2994 "%<teams%> region");
2995 return false;
2996 }
2997 }
2998 else if (ctx->order_concurrent
2999 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3000 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3001 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3002 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3003 {
3004 if (ctx->loop_p)
3005 error_at (gimple_location (stmt),
3006 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3007 "%<simd%> may not be nested inside a %<loop%> region");
3008 else
3009 error_at (gimple_location (stmt),
3010 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3011 "%<simd%> may not be nested inside a region with "
3012 "the %<order(concurrent)%> clause");
3013 return false;
3014 }
3015 }
3016 switch (gimple_code (stmt))
3017 {
3018 case GIMPLE_OMP_FOR:
3019 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3020 return true;
3021 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3022 {
3023 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3024 {
3025 error_at (gimple_location (stmt),
3026 "%<distribute%> region must be strictly nested "
3027 "inside %<teams%> construct");
3028 return false;
3029 }
3030 return true;
3031 }
3032 /* We split taskloop into task and nested taskloop in it. */
3033 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3034 return true;
3035 /* For now, hope this will change and loop bind(parallel) will not
3036 be allowed in lots of contexts. */
3037 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3038 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3039 return true;
3040 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3041 {
3042 bool ok = false;
3043
3044 if (ctx)
3045 switch (gimple_code (ctx->stmt))
3046 {
3047 case GIMPLE_OMP_FOR:
3048 ok = (gimple_omp_for_kind (ctx->stmt)
3049 == GF_OMP_FOR_KIND_OACC_LOOP);
3050 break;
3051
3052 case GIMPLE_OMP_TARGET:
3053 switch (gimple_omp_target_kind (ctx->stmt))
3054 {
3055 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3056 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3057 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3058 ok = true;
3059 break;
3060
3061 default:
3062 break;
3063 }
3064
3065 default:
3066 break;
3067 }
3068 else if (oacc_get_fn_attrib (current_function_decl))
3069 ok = true;
3070 if (!ok)
3071 {
3072 error_at (gimple_location (stmt),
3073 "OpenACC loop directive must be associated with"
3074 " an OpenACC compute region");
3075 return false;
3076 }
3077 }
3078 /* FALLTHRU */
3079 case GIMPLE_CALL:
3080 if (is_gimple_call (stmt)
3081 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3082 == BUILT_IN_GOMP_CANCEL
3083 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3084 == BUILT_IN_GOMP_CANCELLATION_POINT))
3085 {
3086 const char *bad = NULL;
3087 const char *kind = NULL;
3088 const char *construct
3089 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3090 == BUILT_IN_GOMP_CANCEL)
3091 ? "cancel"
3092 : "cancellation point";
3093 if (ctx == NULL)
3094 {
3095 error_at (gimple_location (stmt), "orphaned %qs construct",
3096 construct);
3097 return false;
3098 }
3099 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3100 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3101 : 0)
3102 {
3103 case 1:
3104 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3105 bad = "parallel";
3106 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3107 == BUILT_IN_GOMP_CANCEL
3108 && !integer_zerop (gimple_call_arg (stmt, 1)))
3109 ctx->cancellable = true;
3110 kind = "parallel";
3111 break;
3112 case 2:
3113 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3114 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3115 bad = "for";
3116 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3117 == BUILT_IN_GOMP_CANCEL
3118 && !integer_zerop (gimple_call_arg (stmt, 1)))
3119 {
3120 ctx->cancellable = true;
3121 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3122 OMP_CLAUSE_NOWAIT))
3123 warning_at (gimple_location (stmt), 0,
3124 "%<cancel for%> inside "
3125 "%<nowait%> for construct");
3126 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3127 OMP_CLAUSE_ORDERED))
3128 warning_at (gimple_location (stmt), 0,
3129 "%<cancel for%> inside "
3130 "%<ordered%> for construct");
3131 }
3132 kind = "for";
3133 break;
3134 case 4:
3135 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3136 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3137 bad = "sections";
3138 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3139 == BUILT_IN_GOMP_CANCEL
3140 && !integer_zerop (gimple_call_arg (stmt, 1)))
3141 {
3142 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3143 {
3144 ctx->cancellable = true;
3145 if (omp_find_clause (gimple_omp_sections_clauses
3146 (ctx->stmt),
3147 OMP_CLAUSE_NOWAIT))
3148 warning_at (gimple_location (stmt), 0,
3149 "%<cancel sections%> inside "
3150 "%<nowait%> sections construct");
3151 }
3152 else
3153 {
3154 gcc_assert (ctx->outer
3155 && gimple_code (ctx->outer->stmt)
3156 == GIMPLE_OMP_SECTIONS);
3157 ctx->outer->cancellable = true;
3158 if (omp_find_clause (gimple_omp_sections_clauses
3159 (ctx->outer->stmt),
3160 OMP_CLAUSE_NOWAIT))
3161 warning_at (gimple_location (stmt), 0,
3162 "%<cancel sections%> inside "
3163 "%<nowait%> sections construct");
3164 }
3165 }
3166 kind = "sections";
3167 break;
3168 case 8:
3169 if (!is_task_ctx (ctx)
3170 && (!is_taskloop_ctx (ctx)
3171 || ctx->outer == NULL
3172 || !is_task_ctx (ctx->outer)))
3173 bad = "task";
3174 else
3175 {
3176 for (omp_context *octx = ctx->outer;
3177 octx; octx = octx->outer)
3178 {
3179 switch (gimple_code (octx->stmt))
3180 {
3181 case GIMPLE_OMP_TASKGROUP:
3182 break;
3183 case GIMPLE_OMP_TARGET:
3184 if (gimple_omp_target_kind (octx->stmt)
3185 != GF_OMP_TARGET_KIND_REGION)
3186 continue;
3187 /* FALLTHRU */
3188 case GIMPLE_OMP_PARALLEL:
3189 case GIMPLE_OMP_TEAMS:
3190 error_at (gimple_location (stmt),
3191 "%<%s taskgroup%> construct not closely "
3192 "nested inside of %<taskgroup%> region",
3193 construct);
3194 return false;
3195 case GIMPLE_OMP_TASK:
3196 if (gimple_omp_task_taskloop_p (octx->stmt)
3197 && octx->outer
3198 && is_taskloop_ctx (octx->outer))
3199 {
3200 tree clauses
3201 = gimple_omp_for_clauses (octx->outer->stmt);
3202 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3203 break;
3204 }
3205 continue;
3206 default:
3207 continue;
3208 }
3209 break;
3210 }
3211 ctx->cancellable = true;
3212 }
3213 kind = "taskgroup";
3214 break;
3215 default:
3216 error_at (gimple_location (stmt), "invalid arguments");
3217 return false;
3218 }
3219 if (bad)
3220 {
3221 error_at (gimple_location (stmt),
3222 "%<%s %s%> construct not closely nested inside of %qs",
3223 construct, kind, bad);
3224 return false;
3225 }
3226 }
3227 /* FALLTHRU */
3228 case GIMPLE_OMP_SECTIONS:
3229 case GIMPLE_OMP_SINGLE:
3230 for (; ctx != NULL; ctx = ctx->outer)
3231 switch (gimple_code (ctx->stmt))
3232 {
3233 case GIMPLE_OMP_FOR:
3234 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3235 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3236 break;
3237 /* FALLTHRU */
3238 case GIMPLE_OMP_SECTIONS:
3239 case GIMPLE_OMP_SINGLE:
3240 case GIMPLE_OMP_ORDERED:
3241 case GIMPLE_OMP_MASTER:
3242 case GIMPLE_OMP_TASK:
3243 case GIMPLE_OMP_CRITICAL:
3244 if (is_gimple_call (stmt))
3245 {
3246 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3247 != BUILT_IN_GOMP_BARRIER)
3248 return true;
3249 error_at (gimple_location (stmt),
3250 "barrier region may not be closely nested inside "
3251 "of work-sharing, %<loop%>, %<critical%>, "
3252 "%<ordered%>, %<master%>, explicit %<task%> or "
3253 "%<taskloop%> region");
3254 return false;
3255 }
3256 error_at (gimple_location (stmt),
3257 "work-sharing region may not be closely nested inside "
3258 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3259 "%<master%>, explicit %<task%> or %<taskloop%> region");
3260 return false;
3261 case GIMPLE_OMP_PARALLEL:
3262 case GIMPLE_OMP_TEAMS:
3263 return true;
3264 case GIMPLE_OMP_TARGET:
3265 if (gimple_omp_target_kind (ctx->stmt)
3266 == GF_OMP_TARGET_KIND_REGION)
3267 return true;
3268 break;
3269 default:
3270 break;
3271 }
3272 break;
3273 case GIMPLE_OMP_MASTER:
3274 for (; ctx != NULL; ctx = ctx->outer)
3275 switch (gimple_code (ctx->stmt))
3276 {
3277 case GIMPLE_OMP_FOR:
3278 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3279 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3280 break;
3281 /* FALLTHRU */
3282 case GIMPLE_OMP_SECTIONS:
3283 case GIMPLE_OMP_SINGLE:
3284 case GIMPLE_OMP_TASK:
3285 error_at (gimple_location (stmt),
3286 "%<master%> region may not be closely nested inside "
3287 "of work-sharing, %<loop%>, explicit %<task%> or "
3288 "%<taskloop%> region");
3289 return false;
3290 case GIMPLE_OMP_PARALLEL:
3291 case GIMPLE_OMP_TEAMS:
3292 return true;
3293 case GIMPLE_OMP_TARGET:
3294 if (gimple_omp_target_kind (ctx->stmt)
3295 == GF_OMP_TARGET_KIND_REGION)
3296 return true;
3297 break;
3298 default:
3299 break;
3300 }
3301 break;
3302 case GIMPLE_OMP_TASK:
3303 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3304 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3305 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3306 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3307 {
3308 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3309 error_at (OMP_CLAUSE_LOCATION (c),
3310 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3311 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3312 return false;
3313 }
3314 break;
3315 case GIMPLE_OMP_ORDERED:
3316 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3317 c; c = OMP_CLAUSE_CHAIN (c))
3318 {
3319 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3320 {
3321 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3322 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3323 continue;
3324 }
3325 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3326 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3327 || kind == OMP_CLAUSE_DEPEND_SINK)
3328 {
3329 tree oclause;
3330 /* Look for containing ordered(N) loop. */
3331 if (ctx == NULL
3332 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3333 || (oclause
3334 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3335 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3336 {
3337 error_at (OMP_CLAUSE_LOCATION (c),
3338 "%<ordered%> construct with %<depend%> clause "
3339 "must be closely nested inside an %<ordered%> "
3340 "loop");
3341 return false;
3342 }
3343 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3344 {
3345 error_at (OMP_CLAUSE_LOCATION (c),
3346 "%<ordered%> construct with %<depend%> clause "
3347 "must be closely nested inside a loop with "
3348 "%<ordered%> clause with a parameter");
3349 return false;
3350 }
3351 }
3352 else
3353 {
3354 error_at (OMP_CLAUSE_LOCATION (c),
3355 "invalid depend kind in omp %<ordered%> %<depend%>");
3356 return false;
3357 }
3358 }
3359 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3360 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3361 {
3362 /* ordered simd must be closely nested inside of simd region,
3363 and simd region must not encounter constructs other than
3364 ordered simd, therefore ordered simd may be either orphaned,
3365 or ctx->stmt must be simd. The latter case is handled already
3366 earlier. */
3367 if (ctx != NULL)
3368 {
3369 error_at (gimple_location (stmt),
3370 "%<ordered%> %<simd%> must be closely nested inside "
3371 "%<simd%> region");
3372 return false;
3373 }
3374 }
3375 for (; ctx != NULL; ctx = ctx->outer)
3376 switch (gimple_code (ctx->stmt))
3377 {
3378 case GIMPLE_OMP_CRITICAL:
3379 case GIMPLE_OMP_TASK:
3380 case GIMPLE_OMP_ORDERED:
3381 ordered_in_taskloop:
3382 error_at (gimple_location (stmt),
3383 "%<ordered%> region may not be closely nested inside "
3384 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3385 "%<taskloop%> region");
3386 return false;
3387 case GIMPLE_OMP_FOR:
3388 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3389 goto ordered_in_taskloop;
3390 tree o;
3391 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3392 OMP_CLAUSE_ORDERED);
3393 if (o == NULL)
3394 {
3395 error_at (gimple_location (stmt),
3396 "%<ordered%> region must be closely nested inside "
3397 "a loop region with an %<ordered%> clause");
3398 return false;
3399 }
3400 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3401 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3402 {
3403 error_at (gimple_location (stmt),
3404 "%<ordered%> region without %<depend%> clause may "
3405 "not be closely nested inside a loop region with "
3406 "an %<ordered%> clause with a parameter");
3407 return false;
3408 }
3409 return true;
3410 case GIMPLE_OMP_TARGET:
3411 if (gimple_omp_target_kind (ctx->stmt)
3412 != GF_OMP_TARGET_KIND_REGION)
3413 break;
3414 /* FALLTHRU */
3415 case GIMPLE_OMP_PARALLEL:
3416 case GIMPLE_OMP_TEAMS:
3417 error_at (gimple_location (stmt),
3418 "%<ordered%> region must be closely nested inside "
3419 "a loop region with an %<ordered%> clause");
3420 return false;
3421 default:
3422 break;
3423 }
3424 break;
3425 case GIMPLE_OMP_CRITICAL:
3426 {
3427 tree this_stmt_name
3428 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3429 for (; ctx != NULL; ctx = ctx->outer)
3430 if (gomp_critical *other_crit
3431 = dyn_cast <gomp_critical *> (ctx->stmt))
3432 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3433 {
3434 error_at (gimple_location (stmt),
3435 "%<critical%> region may not be nested inside "
3436 "a %<critical%> region with the same name");
3437 return false;
3438 }
3439 }
3440 break;
3441 case GIMPLE_OMP_TEAMS:
3442 if (ctx == NULL)
3443 break;
3444 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3445 || (gimple_omp_target_kind (ctx->stmt)
3446 != GF_OMP_TARGET_KIND_REGION))
3447 {
3448 /* Teams construct can appear either strictly nested inside of
3449 target construct with no intervening stmts, or can be encountered
3450 only by initial task (so must not appear inside any OpenMP
3451 construct. */
3452 error_at (gimple_location (stmt),
3453 "%<teams%> construct must be closely nested inside of "
3454 "%<target%> construct or not nested in any OpenMP "
3455 "construct");
3456 return false;
3457 }
3458 break;
3459 case GIMPLE_OMP_TARGET:
3460 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3461 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3462 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3463 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3464 {
3465 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3466 error_at (OMP_CLAUSE_LOCATION (c),
3467 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3468 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3469 return false;
3470 }
3471 if (is_gimple_omp_offloaded (stmt)
3472 && oacc_get_fn_attrib (cfun->decl) != NULL)
3473 {
3474 error_at (gimple_location (stmt),
3475 "OpenACC region inside of OpenACC routine, nested "
3476 "parallelism not supported yet");
3477 return false;
3478 }
3479 for (; ctx != NULL; ctx = ctx->outer)
3480 {
3481 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3482 {
3483 if (is_gimple_omp (stmt)
3484 && is_gimple_omp_oacc (stmt)
3485 && is_gimple_omp (ctx->stmt))
3486 {
3487 error_at (gimple_location (stmt),
3488 "OpenACC construct inside of non-OpenACC region");
3489 return false;
3490 }
3491 continue;
3492 }
3493
3494 const char *stmt_name, *ctx_stmt_name;
3495 switch (gimple_omp_target_kind (stmt))
3496 {
3497 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3498 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3499 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3500 case GF_OMP_TARGET_KIND_ENTER_DATA:
3501 stmt_name = "target enter data"; break;
3502 case GF_OMP_TARGET_KIND_EXIT_DATA:
3503 stmt_name = "target exit data"; break;
3504 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3505 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3506 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3507 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3508 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3509 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3510 stmt_name = "enter/exit data"; break;
3511 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3512 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3513 break;
3514 default: gcc_unreachable ();
3515 }
3516 switch (gimple_omp_target_kind (ctx->stmt))
3517 {
3518 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3519 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3520 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3521 ctx_stmt_name = "parallel"; break;
3522 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3523 ctx_stmt_name = "kernels"; break;
3524 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3525 ctx_stmt_name = "serial"; break;
3526 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3527 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3528 ctx_stmt_name = "host_data"; break;
3529 default: gcc_unreachable ();
3530 }
3531
3532 /* OpenACC/OpenMP mismatch? */
3533 if (is_gimple_omp_oacc (stmt)
3534 != is_gimple_omp_oacc (ctx->stmt))
3535 {
3536 error_at (gimple_location (stmt),
3537 "%s %qs construct inside of %s %qs region",
3538 (is_gimple_omp_oacc (stmt)
3539 ? "OpenACC" : "OpenMP"), stmt_name,
3540 (is_gimple_omp_oacc (ctx->stmt)
3541 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3542 return false;
3543 }
3544 if (is_gimple_omp_offloaded (ctx->stmt))
3545 {
3546 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3547 if (is_gimple_omp_oacc (ctx->stmt))
3548 {
3549 error_at (gimple_location (stmt),
3550 "%qs construct inside of %qs region",
3551 stmt_name, ctx_stmt_name);
3552 return false;
3553 }
3554 else
3555 {
3556 warning_at (gimple_location (stmt), 0,
3557 "%qs construct inside of %qs region",
3558 stmt_name, ctx_stmt_name);
3559 }
3560 }
3561 }
3562 break;
3563 default:
3564 break;
3565 }
3566 return true;
3567 }
3568
3569
3570 /* Helper function scan_omp.
3571
3572 Callback for walk_tree or operators in walk_gimple_stmt used to
3573 scan for OMP directives in TP. */
3574
3575 static tree
3576 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3577 {
3578 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3579 omp_context *ctx = (omp_context *) wi->info;
3580 tree t = *tp;
3581
3582 switch (TREE_CODE (t))
3583 {
3584 case VAR_DECL:
3585 case PARM_DECL:
3586 case LABEL_DECL:
3587 case RESULT_DECL:
3588 if (ctx)
3589 {
3590 tree repl = remap_decl (t, &ctx->cb);
3591 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3592 *tp = repl;
3593 }
3594 break;
3595
3596 default:
3597 if (ctx && TYPE_P (t))
3598 *tp = remap_type (t, &ctx->cb);
3599 else if (!DECL_P (t))
3600 {
3601 *walk_subtrees = 1;
3602 if (ctx)
3603 {
3604 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3605 if (tem != TREE_TYPE (t))
3606 {
3607 if (TREE_CODE (t) == INTEGER_CST)
3608 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3609 else
3610 TREE_TYPE (t) = tem;
3611 }
3612 }
3613 }
3614 break;
3615 }
3616
3617 return NULL_TREE;
3618 }
3619
3620 /* Return true if FNDECL is a setjmp or a longjmp. */
3621
3622 static bool
3623 setjmp_or_longjmp_p (const_tree fndecl)
3624 {
3625 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3626 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3627 return true;
3628
3629 tree declname = DECL_NAME (fndecl);
3630 if (!declname
3631 || (DECL_CONTEXT (fndecl) != NULL_TREE
3632 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3633 || !TREE_PUBLIC (fndecl))
3634 return false;
3635
3636 const char *name = IDENTIFIER_POINTER (declname);
3637 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3638 }
3639
3640 /* Return true if FNDECL is an omp_* runtime API call. */
3641
3642 static bool
3643 omp_runtime_api_call (const_tree fndecl)
3644 {
3645 tree declname = DECL_NAME (fndecl);
3646 if (!declname
3647 || (DECL_CONTEXT (fndecl) != NULL_TREE
3648 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3649 || !TREE_PUBLIC (fndecl))
3650 return false;
3651
3652 const char *name = IDENTIFIER_POINTER (declname);
3653 if (strncmp (name, "omp_", 4) != 0)
3654 return false;
3655
3656 static const char *omp_runtime_apis[] =
3657 {
3658 /* This array has 3 sections. First omp_* calls that don't
3659 have any suffixes. */
3660 "target_alloc",
3661 "target_associate_ptr",
3662 "target_disassociate_ptr",
3663 "target_free",
3664 "target_is_present",
3665 "target_memcpy",
3666 "target_memcpy_rect",
3667 NULL,
3668 /* Now omp_* calls that are available as omp_* and omp_*_. */
3669 "capture_affinity",
3670 "destroy_lock",
3671 "destroy_nest_lock",
3672 "display_affinity",
3673 "get_active_level",
3674 "get_affinity_format",
3675 "get_cancellation",
3676 "get_default_device",
3677 "get_dynamic",
3678 "get_initial_device",
3679 "get_level",
3680 "get_max_active_levels",
3681 "get_max_task_priority",
3682 "get_max_threads",
3683 "get_nested",
3684 "get_num_devices",
3685 "get_num_places",
3686 "get_num_procs",
3687 "get_num_teams",
3688 "get_num_threads",
3689 "get_partition_num_places",
3690 "get_place_num",
3691 "get_proc_bind",
3692 "get_team_num",
3693 "get_thread_limit",
3694 "get_thread_num",
3695 "get_wtick",
3696 "get_wtime",
3697 "in_final",
3698 "in_parallel",
3699 "init_lock",
3700 "init_nest_lock",
3701 "is_initial_device",
3702 "pause_resource",
3703 "pause_resource_all",
3704 "set_affinity_format",
3705 "set_lock",
3706 "set_nest_lock",
3707 "test_lock",
3708 "test_nest_lock",
3709 "unset_lock",
3710 "unset_nest_lock",
3711 NULL,
3712 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3713 "get_ancestor_thread_num",
3714 "get_partition_place_nums",
3715 "get_place_num_procs",
3716 "get_place_proc_ids",
3717 "get_schedule",
3718 "get_team_size",
3719 "set_default_device",
3720 "set_dynamic",
3721 "set_max_active_levels",
3722 "set_nested",
3723 "set_num_threads",
3724 "set_schedule"
3725 };
3726
3727 int mode = 0;
3728 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3729 {
3730 if (omp_runtime_apis[i] == NULL)
3731 {
3732 mode++;
3733 continue;
3734 }
3735 size_t len = strlen (omp_runtime_apis[i]);
3736 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3737 && (name[4 + len] == '\0'
3738 || (mode > 0
3739 && name[4 + len] == '_'
3740 && (name[4 + len + 1] == '\0'
3741 || (mode > 1
3742 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3743 return true;
3744 }
3745 return false;
3746 }
3747
3748 /* Helper function for scan_omp.
3749
3750 Callback for walk_gimple_stmt used to scan for OMP directives in
3751 the current statement in GSI. */
3752
3753 static tree
3754 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3755 struct walk_stmt_info *wi)
3756 {
3757 gimple *stmt = gsi_stmt (*gsi);
3758 omp_context *ctx = (omp_context *) wi->info;
3759
3760 if (gimple_has_location (stmt))
3761 input_location = gimple_location (stmt);
3762
3763 /* Check the nesting restrictions. */
3764 bool remove = false;
3765 if (is_gimple_omp (stmt))
3766 remove = !check_omp_nesting_restrictions (stmt, ctx);
3767 else if (is_gimple_call (stmt))
3768 {
3769 tree fndecl = gimple_call_fndecl (stmt);
3770 if (fndecl)
3771 {
3772 if (ctx
3773 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3774 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3775 && setjmp_or_longjmp_p (fndecl)
3776 && !ctx->loop_p)
3777 {
3778 remove = true;
3779 error_at (gimple_location (stmt),
3780 "setjmp/longjmp inside %<simd%> construct");
3781 }
3782 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3783 switch (DECL_FUNCTION_CODE (fndecl))
3784 {
3785 case BUILT_IN_GOMP_BARRIER:
3786 case BUILT_IN_GOMP_CANCEL:
3787 case BUILT_IN_GOMP_CANCELLATION_POINT:
3788 case BUILT_IN_GOMP_TASKYIELD:
3789 case BUILT_IN_GOMP_TASKWAIT:
3790 case BUILT_IN_GOMP_TASKGROUP_START:
3791 case BUILT_IN_GOMP_TASKGROUP_END:
3792 remove = !check_omp_nesting_restrictions (stmt, ctx);
3793 break;
3794 default:
3795 break;
3796 }
3797 else if (ctx)
3798 {
3799 omp_context *octx = ctx;
3800 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3801 octx = ctx->outer;
3802 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3803 {
3804 remove = true;
3805 error_at (gimple_location (stmt),
3806 "OpenMP runtime API call %qD in a region with "
3807 "%<order(concurrent)%> clause", fndecl);
3808 }
3809 }
3810 }
3811 }
3812 if (remove)
3813 {
3814 stmt = gimple_build_nop ();
3815 gsi_replace (gsi, stmt, false);
3816 }
3817
3818 *handled_ops_p = true;
3819
3820 switch (gimple_code (stmt))
3821 {
3822 case GIMPLE_OMP_PARALLEL:
3823 taskreg_nesting_level++;
3824 scan_omp_parallel (gsi, ctx);
3825 taskreg_nesting_level--;
3826 break;
3827
3828 case GIMPLE_OMP_TASK:
3829 taskreg_nesting_level++;
3830 scan_omp_task (gsi, ctx);
3831 taskreg_nesting_level--;
3832 break;
3833
3834 case GIMPLE_OMP_FOR:
3835 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3836 == GF_OMP_FOR_KIND_SIMD)
3837 && gimple_omp_for_combined_into_p (stmt)
3838 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3839 {
3840 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3841 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3842 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3843 {
3844 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3845 break;
3846 }
3847 }
3848 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3849 == GF_OMP_FOR_KIND_SIMD)
3850 && omp_maybe_offloaded_ctx (ctx)
3851 && omp_max_simt_vf ()
3852 && gimple_omp_for_collapse (stmt) == 1)
3853 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3854 else
3855 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3856 break;
3857
3858 case GIMPLE_OMP_SECTIONS:
3859 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3860 break;
3861
3862 case GIMPLE_OMP_SINGLE:
3863 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3864 break;
3865
3866 case GIMPLE_OMP_SCAN:
3867 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3868 {
3869 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3870 ctx->scan_inclusive = true;
3871 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3872 ctx->scan_exclusive = true;
3873 }
3874 /* FALLTHRU */
3875 case GIMPLE_OMP_SECTION:
3876 case GIMPLE_OMP_MASTER:
3877 case GIMPLE_OMP_ORDERED:
3878 case GIMPLE_OMP_CRITICAL:
3879 ctx = new_omp_context (stmt, ctx);
3880 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3881 break;
3882
3883 case GIMPLE_OMP_TASKGROUP:
3884 ctx = new_omp_context (stmt, ctx);
3885 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3886 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3887 break;
3888
3889 case GIMPLE_OMP_TARGET:
3890 if (is_gimple_omp_offloaded (stmt))
3891 {
3892 taskreg_nesting_level++;
3893 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3894 taskreg_nesting_level--;
3895 }
3896 else
3897 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3898 break;
3899
3900 case GIMPLE_OMP_TEAMS:
3901 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3902 {
3903 taskreg_nesting_level++;
3904 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3905 taskreg_nesting_level--;
3906 }
3907 else
3908 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3909 break;
3910
3911 case GIMPLE_BIND:
3912 {
3913 tree var;
3914
3915 *handled_ops_p = false;
3916 if (ctx)
3917 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3918 var ;
3919 var = DECL_CHAIN (var))
3920 insert_decl_map (&ctx->cb, var, var);
3921 }
3922 break;
3923 default:
3924 *handled_ops_p = false;
3925 break;
3926 }
3927
3928 return NULL_TREE;
3929 }
3930
3931
3932 /* Scan all the statements starting at the current statement. CTX
3933 contains context information about the OMP directives and
3934 clauses found during the scan. */
3935
3936 static void
3937 scan_omp (gimple_seq *body_p, omp_context *ctx)
3938 {
3939 location_t saved_location;
3940 struct walk_stmt_info wi;
3941
3942 memset (&wi, 0, sizeof (wi));
3943 wi.info = ctx;
3944 wi.want_locations = true;
3945
3946 saved_location = input_location;
3947 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3948 input_location = saved_location;
3949 }
3950 \f
3951 /* Re-gimplification and code generation routines. */
3952
3953 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3954 of BIND if in a method. */
3955
3956 static void
3957 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3958 {
3959 if (DECL_ARGUMENTS (current_function_decl)
3960 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3961 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3962 == POINTER_TYPE))
3963 {
3964 tree vars = gimple_bind_vars (bind);
3965 for (tree *pvar = &vars; *pvar; )
3966 if (omp_member_access_dummy_var (*pvar))
3967 *pvar = DECL_CHAIN (*pvar);
3968 else
3969 pvar = &DECL_CHAIN (*pvar);
3970 gimple_bind_set_vars (bind, vars);
3971 }
3972 }
3973
3974 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3975 block and its subblocks. */
3976
3977 static void
3978 remove_member_access_dummy_vars (tree block)
3979 {
3980 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3981 if (omp_member_access_dummy_var (*pvar))
3982 *pvar = DECL_CHAIN (*pvar);
3983 else
3984 pvar = &DECL_CHAIN (*pvar);
3985
3986 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3987 remove_member_access_dummy_vars (block);
3988 }
3989
3990 /* If a context was created for STMT when it was scanned, return it. */
3991
3992 static omp_context *
3993 maybe_lookup_ctx (gimple *stmt)
3994 {
3995 splay_tree_node n;
3996 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3997 return n ? (omp_context *) n->value : NULL;
3998 }
3999
4000
4001 /* Find the mapping for DECL in CTX or the immediately enclosing
4002 context that has a mapping for DECL.
4003
4004 If CTX is a nested parallel directive, we may have to use the decl
4005 mappings created in CTX's parent context. Suppose that we have the
4006 following parallel nesting (variable UIDs showed for clarity):
4007
4008 iD.1562 = 0;
4009 #omp parallel shared(iD.1562) -> outer parallel
4010 iD.1562 = iD.1562 + 1;
4011
4012 #omp parallel shared (iD.1562) -> inner parallel
4013 iD.1562 = iD.1562 - 1;
4014
4015 Each parallel structure will create a distinct .omp_data_s structure
4016 for copying iD.1562 in/out of the directive:
4017
4018 outer parallel .omp_data_s.1.i -> iD.1562
4019 inner parallel .omp_data_s.2.i -> iD.1562
4020
4021 A shared variable mapping will produce a copy-out operation before
4022 the parallel directive and a copy-in operation after it. So, in
4023 this case we would have:
4024
4025 iD.1562 = 0;
4026 .omp_data_o.1.i = iD.1562;
4027 #omp parallel shared(iD.1562) -> outer parallel
4028 .omp_data_i.1 = &.omp_data_o.1
4029 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4030
4031 .omp_data_o.2.i = iD.1562; -> **
4032 #omp parallel shared(iD.1562) -> inner parallel
4033 .omp_data_i.2 = &.omp_data_o.2
4034 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4035
4036
4037 ** This is a problem. The symbol iD.1562 cannot be referenced
4038 inside the body of the outer parallel region. But since we are
4039 emitting this copy operation while expanding the inner parallel
4040 directive, we need to access the CTX structure of the outer
4041 parallel directive to get the correct mapping:
4042
4043 .omp_data_o.2.i = .omp_data_i.1->i
4044
4045 Since there may be other workshare or parallel directives enclosing
4046 the parallel directive, it may be necessary to walk up the context
4047 parent chain. This is not a problem in general because nested
4048 parallelism happens only rarely. */
4049
4050 static tree
4051 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4052 {
4053 tree t;
4054 omp_context *up;
4055
4056 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4057 t = maybe_lookup_decl (decl, up);
4058
4059 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4060
4061 return t ? t : decl;
4062 }
4063
4064
4065 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4066 in outer contexts. */
4067
4068 static tree
4069 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4070 {
4071 tree t = NULL;
4072 omp_context *up;
4073
4074 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4075 t = maybe_lookup_decl (decl, up);
4076
4077 return t ? t : decl;
4078 }
4079
4080
4081 /* Construct the initialization value for reduction operation OP. */
4082
4083 tree
4084 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4085 {
4086 switch (op)
4087 {
4088 case PLUS_EXPR:
4089 case MINUS_EXPR:
4090 case BIT_IOR_EXPR:
4091 case BIT_XOR_EXPR:
4092 case TRUTH_OR_EXPR:
4093 case TRUTH_ORIF_EXPR:
4094 case TRUTH_XOR_EXPR:
4095 case NE_EXPR:
4096 return build_zero_cst (type);
4097
4098 case MULT_EXPR:
4099 case TRUTH_AND_EXPR:
4100 case TRUTH_ANDIF_EXPR:
4101 case EQ_EXPR:
4102 return fold_convert_loc (loc, type, integer_one_node);
4103
4104 case BIT_AND_EXPR:
4105 return fold_convert_loc (loc, type, integer_minus_one_node);
4106
4107 case MAX_EXPR:
4108 if (SCALAR_FLOAT_TYPE_P (type))
4109 {
4110 REAL_VALUE_TYPE max, min;
4111 if (HONOR_INFINITIES (type))
4112 {
4113 real_inf (&max);
4114 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4115 }
4116 else
4117 real_maxval (&min, 1, TYPE_MODE (type));
4118 return build_real (type, min);
4119 }
4120 else if (POINTER_TYPE_P (type))
4121 {
4122 wide_int min
4123 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4124 return wide_int_to_tree (type, min);
4125 }
4126 else
4127 {
4128 gcc_assert (INTEGRAL_TYPE_P (type));
4129 return TYPE_MIN_VALUE (type);
4130 }
4131
4132 case MIN_EXPR:
4133 if (SCALAR_FLOAT_TYPE_P (type))
4134 {
4135 REAL_VALUE_TYPE max;
4136 if (HONOR_INFINITIES (type))
4137 real_inf (&max);
4138 else
4139 real_maxval (&max, 0, TYPE_MODE (type));
4140 return build_real (type, max);
4141 }
4142 else if (POINTER_TYPE_P (type))
4143 {
4144 wide_int max
4145 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4146 return wide_int_to_tree (type, max);
4147 }
4148 else
4149 {
4150 gcc_assert (INTEGRAL_TYPE_P (type));
4151 return TYPE_MAX_VALUE (type);
4152 }
4153
4154 default:
4155 gcc_unreachable ();
4156 }
4157 }
4158
4159 /* Construct the initialization value for reduction CLAUSE. */
4160
4161 tree
4162 omp_reduction_init (tree clause, tree type)
4163 {
4164 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4165 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4166 }
4167
4168 /* Return alignment to be assumed for var in CLAUSE, which should be
4169 OMP_CLAUSE_ALIGNED. */
4170
4171 static tree
4172 omp_clause_aligned_alignment (tree clause)
4173 {
4174 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4175 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4176
4177 /* Otherwise return implementation defined alignment. */
4178 unsigned int al = 1;
4179 opt_scalar_mode mode_iter;
4180 auto_vector_modes modes;
4181 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4182 static enum mode_class classes[]
4183 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4184 for (int i = 0; i < 4; i += 2)
4185 /* The for loop above dictates that we only walk through scalar classes. */
4186 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4187 {
4188 scalar_mode mode = mode_iter.require ();
4189 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4190 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4191 continue;
4192 machine_mode alt_vmode;
4193 for (unsigned int j = 0; j < modes.length (); ++j)
4194 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4195 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4196 vmode = alt_vmode;
4197
4198 tree type = lang_hooks.types.type_for_mode (mode, 1);
4199 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4200 continue;
4201 type = build_vector_type_for_mode (type, vmode);
4202 if (TYPE_MODE (type) != vmode)
4203 continue;
4204 if (TYPE_ALIGN_UNIT (type) > al)
4205 al = TYPE_ALIGN_UNIT (type);
4206 }
4207 return build_int_cst (integer_type_node, al);
4208 }
4209
4210
4211 /* This structure is part of the interface between lower_rec_simd_input_clauses
4212 and lower_rec_input_clauses. */
4213
4214 class omplow_simd_context {
4215 public:
4216 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4217 tree idx;
4218 tree lane;
4219 tree lastlane;
4220 vec<tree, va_heap> simt_eargs;
4221 gimple_seq simt_dlist;
4222 poly_uint64_pod max_vf;
4223 bool is_simt;
4224 };
4225
4226 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4227 privatization. */
4228
4229 static bool
4230 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4231 omplow_simd_context *sctx, tree &ivar,
4232 tree &lvar, tree *rvar = NULL,
4233 tree *rvar2 = NULL)
4234 {
4235 if (known_eq (sctx->max_vf, 0U))
4236 {
4237 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4238 if (maybe_gt (sctx->max_vf, 1U))
4239 {
4240 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4241 OMP_CLAUSE_SAFELEN);
4242 if (c)
4243 {
4244 poly_uint64 safe_len;
4245 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4246 || maybe_lt (safe_len, 1U))
4247 sctx->max_vf = 1;
4248 else
4249 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4250 }
4251 }
4252 if (maybe_gt (sctx->max_vf, 1U))
4253 {
4254 sctx->idx = create_tmp_var (unsigned_type_node);
4255 sctx->lane = create_tmp_var (unsigned_type_node);
4256 }
4257 }
4258 if (known_eq (sctx->max_vf, 1U))
4259 return false;
4260
4261 if (sctx->is_simt)
4262 {
4263 if (is_gimple_reg (new_var))
4264 {
4265 ivar = lvar = new_var;
4266 return true;
4267 }
4268 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4269 ivar = lvar = create_tmp_var (type);
4270 TREE_ADDRESSABLE (ivar) = 1;
4271 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4272 NULL, DECL_ATTRIBUTES (ivar));
4273 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4274 tree clobber = build_clobber (type);
4275 gimple *g = gimple_build_assign (ivar, clobber);
4276 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4277 }
4278 else
4279 {
4280 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4281 tree avar = create_tmp_var_raw (atype);
4282 if (TREE_ADDRESSABLE (new_var))
4283 TREE_ADDRESSABLE (avar) = 1;
4284 DECL_ATTRIBUTES (avar)
4285 = tree_cons (get_identifier ("omp simd array"), NULL,
4286 DECL_ATTRIBUTES (avar));
4287 gimple_add_tmp_var (avar);
4288 tree iavar = avar;
4289 if (rvar && !ctx->for_simd_scan_phase)
4290 {
4291 /* For inscan reductions, create another array temporary,
4292 which will hold the reduced value. */
4293 iavar = create_tmp_var_raw (atype);
4294 if (TREE_ADDRESSABLE (new_var))
4295 TREE_ADDRESSABLE (iavar) = 1;
4296 DECL_ATTRIBUTES (iavar)
4297 = tree_cons (get_identifier ("omp simd array"), NULL,
4298 tree_cons (get_identifier ("omp simd inscan"), NULL,
4299 DECL_ATTRIBUTES (iavar)));
4300 gimple_add_tmp_var (iavar);
4301 ctx->cb.decl_map->put (avar, iavar);
4302 if (sctx->lastlane == NULL_TREE)
4303 sctx->lastlane = create_tmp_var (unsigned_type_node);
4304 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4305 sctx->lastlane, NULL_TREE, NULL_TREE);
4306 TREE_THIS_NOTRAP (*rvar) = 1;
4307
4308 if (ctx->scan_exclusive)
4309 {
4310 /* And for exclusive scan yet another one, which will
4311 hold the value during the scan phase. */
4312 tree savar = create_tmp_var_raw (atype);
4313 if (TREE_ADDRESSABLE (new_var))
4314 TREE_ADDRESSABLE (savar) = 1;
4315 DECL_ATTRIBUTES (savar)
4316 = tree_cons (get_identifier ("omp simd array"), NULL,
4317 tree_cons (get_identifier ("omp simd inscan "
4318 "exclusive"), NULL,
4319 DECL_ATTRIBUTES (savar)));
4320 gimple_add_tmp_var (savar);
4321 ctx->cb.decl_map->put (iavar, savar);
4322 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4323 sctx->idx, NULL_TREE, NULL_TREE);
4324 TREE_THIS_NOTRAP (*rvar2) = 1;
4325 }
4326 }
4327 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4328 NULL_TREE, NULL_TREE);
4329 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4330 NULL_TREE, NULL_TREE);
4331 TREE_THIS_NOTRAP (ivar) = 1;
4332 TREE_THIS_NOTRAP (lvar) = 1;
4333 }
4334 if (DECL_P (new_var))
4335 {
4336 SET_DECL_VALUE_EXPR (new_var, lvar);
4337 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4338 }
4339 return true;
4340 }
4341
4342 /* Helper function of lower_rec_input_clauses. For a reference
4343 in simd reduction, add an underlying variable it will reference. */
4344
4345 static void
4346 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4347 {
4348 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4349 if (TREE_CONSTANT (z))
4350 {
4351 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4352 get_name (new_vard));
4353 gimple_add_tmp_var (z);
4354 TREE_ADDRESSABLE (z) = 1;
4355 z = build_fold_addr_expr_loc (loc, z);
4356 gimplify_assign (new_vard, z, ilist);
4357 }
4358 }
4359
4360 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4361 code to emit (type) (tskred_temp[idx]). */
4362
4363 static tree
4364 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4365 unsigned idx)
4366 {
4367 unsigned HOST_WIDE_INT sz
4368 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4369 tree r = build2 (MEM_REF, pointer_sized_int_node,
4370 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4371 idx * sz));
4372 tree v = create_tmp_var (pointer_sized_int_node);
4373 gimple *g = gimple_build_assign (v, r);
4374 gimple_seq_add_stmt (ilist, g);
4375 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4376 {
4377 v = create_tmp_var (type);
4378 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4379 gimple_seq_add_stmt (ilist, g);
4380 }
4381 return v;
4382 }
4383
4384 /* Lower early initialization of privatized variable NEW_VAR
4385 if it needs an allocator (has allocate clause). */
4386
4387 static bool
4388 lower_private_allocate (tree var, tree new_var, tree &allocator,
4389 tree &allocate_ptr, gimple_seq *ilist,
4390 omp_context *ctx, bool is_ref, tree size)
4391 {
4392 if (allocator)
4393 return false;
4394 gcc_assert (allocate_ptr == NULL_TREE);
4395 if (ctx->allocate_map && DECL_P (new_var))
4396 if (tree *allocatorp = ctx->allocate_map->get (var))
4397 allocator = *allocatorp;
4398 if (allocator == NULL_TREE)
4399 return false;
4400 if (!is_ref && omp_is_reference (var))
4401 return false;
4402
4403 if (TREE_CODE (allocator) != INTEGER_CST)
4404 allocator = build_outer_var_ref (allocator, ctx);
4405 allocator = fold_convert (pointer_sized_int_node, allocator);
4406 if (TREE_CODE (allocator) != INTEGER_CST)
4407 {
4408 tree var = create_tmp_var (TREE_TYPE (allocator));
4409 gimplify_assign (var, allocator, ilist);
4410 allocator = var;
4411 }
4412
4413 tree ptr_type, align, sz;
4414 if (is_ref)
4415 {
4416 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4417 align = build_int_cst (size_type_node,
4418 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4419 sz = size;
4420 }
4421 else
4422 {
4423 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4424 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (new_var));
4425 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4426 }
4427 if (TREE_CODE (sz) != INTEGER_CST)
4428 {
4429 tree szvar = create_tmp_var (size_type_node);
4430 gimplify_assign (szvar, sz, ilist);
4431 sz = szvar;
4432 }
4433 allocate_ptr = create_tmp_var (ptr_type);
4434 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4435 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4436 gimple_call_set_lhs (g, allocate_ptr);
4437 gimple_seq_add_stmt (ilist, g);
4438 if (!is_ref)
4439 {
4440 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (allocate_ptr));
4441 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4442 }
4443 return true;
4444 }
4445
4446 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4447 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4448 private variables. Initialization statements go in ILIST, while calls
4449 to destructors go in DLIST. */
4450
4451 static void
4452 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4453 omp_context *ctx, struct omp_for_data *fd)
4454 {
4455 tree c, copyin_seq, x, ptr;
4456 bool copyin_by_ref = false;
4457 bool lastprivate_firstprivate = false;
4458 bool reduction_omp_orig_ref = false;
4459 int pass;
4460 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4461 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4462 omplow_simd_context sctx = omplow_simd_context ();
4463 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4464 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4465 gimple_seq llist[4] = { };
4466 tree nonconst_simd_if = NULL_TREE;
4467
4468 copyin_seq = NULL;
4469 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4470
4471 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4472 with data sharing clauses referencing variable sized vars. That
4473 is unnecessarily hard to support and very unlikely to result in
4474 vectorized code anyway. */
4475 if (is_simd)
4476 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4477 switch (OMP_CLAUSE_CODE (c))
4478 {
4479 case OMP_CLAUSE_LINEAR:
4480 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4481 sctx.max_vf = 1;
4482 /* FALLTHRU */
4483 case OMP_CLAUSE_PRIVATE:
4484 case OMP_CLAUSE_FIRSTPRIVATE:
4485 case OMP_CLAUSE_LASTPRIVATE:
4486 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4487 sctx.max_vf = 1;
4488 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4489 {
4490 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4491 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4492 sctx.max_vf = 1;
4493 }
4494 break;
4495 case OMP_CLAUSE_REDUCTION:
4496 case OMP_CLAUSE_IN_REDUCTION:
4497 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4498 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4499 sctx.max_vf = 1;
4500 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4501 {
4502 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4503 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4504 sctx.max_vf = 1;
4505 }
4506 break;
4507 case OMP_CLAUSE_IF:
4508 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4509 sctx.max_vf = 1;
4510 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4511 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4512 break;
4513 case OMP_CLAUSE_SIMDLEN:
4514 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4515 sctx.max_vf = 1;
4516 break;
4517 case OMP_CLAUSE__CONDTEMP_:
4518 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4519 if (sctx.is_simt)
4520 sctx.max_vf = 1;
4521 break;
4522 default:
4523 continue;
4524 }
4525
4526 /* Add a placeholder for simduid. */
4527 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4528 sctx.simt_eargs.safe_push (NULL_TREE);
4529
4530 unsigned task_reduction_cnt = 0;
4531 unsigned task_reduction_cntorig = 0;
4532 unsigned task_reduction_cnt_full = 0;
4533 unsigned task_reduction_cntorig_full = 0;
4534 unsigned task_reduction_other_cnt = 0;
4535 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4536 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4537 /* Do all the fixed sized types in the first pass, and the variable sized
4538 types in the second pass. This makes sure that the scalar arguments to
4539 the variable sized types are processed before we use them in the
4540 variable sized operations. For task reductions we use 4 passes, in the
4541 first two we ignore them, in the third one gather arguments for
4542 GOMP_task_reduction_remap call and in the last pass actually handle
4543 the task reductions. */
4544 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4545 ? 4 : 2); ++pass)
4546 {
4547 if (pass == 2 && task_reduction_cnt)
4548 {
4549 tskred_atype
4550 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4551 + task_reduction_cntorig);
4552 tskred_avar = create_tmp_var_raw (tskred_atype);
4553 gimple_add_tmp_var (tskred_avar);
4554 TREE_ADDRESSABLE (tskred_avar) = 1;
4555 task_reduction_cnt_full = task_reduction_cnt;
4556 task_reduction_cntorig_full = task_reduction_cntorig;
4557 }
4558 else if (pass == 3 && task_reduction_cnt)
4559 {
4560 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4561 gimple *g
4562 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4563 size_int (task_reduction_cntorig),
4564 build_fold_addr_expr (tskred_avar));
4565 gimple_seq_add_stmt (ilist, g);
4566 }
4567 if (pass == 3 && task_reduction_other_cnt)
4568 {
4569 /* For reduction clauses, build
4570 tskred_base = (void *) tskred_temp[2]
4571 + omp_get_thread_num () * tskred_temp[1]
4572 or if tskred_temp[1] is known to be constant, that constant
4573 directly. This is the start of the private reduction copy block
4574 for the current thread. */
4575 tree v = create_tmp_var (integer_type_node);
4576 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4577 gimple *g = gimple_build_call (x, 0);
4578 gimple_call_set_lhs (g, v);
4579 gimple_seq_add_stmt (ilist, g);
4580 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4581 tskred_temp = OMP_CLAUSE_DECL (c);
4582 if (is_taskreg_ctx (ctx))
4583 tskred_temp = lookup_decl (tskred_temp, ctx);
4584 tree v2 = create_tmp_var (sizetype);
4585 g = gimple_build_assign (v2, NOP_EXPR, v);
4586 gimple_seq_add_stmt (ilist, g);
4587 if (ctx->task_reductions[0])
4588 v = fold_convert (sizetype, ctx->task_reductions[0]);
4589 else
4590 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4591 tree v3 = create_tmp_var (sizetype);
4592 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4593 gimple_seq_add_stmt (ilist, g);
4594 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4595 tskred_base = create_tmp_var (ptr_type_node);
4596 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4597 gimple_seq_add_stmt (ilist, g);
4598 }
4599 task_reduction_cnt = 0;
4600 task_reduction_cntorig = 0;
4601 task_reduction_other_cnt = 0;
4602 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4603 {
4604 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4605 tree var, new_var;
4606 bool by_ref;
4607 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4608 bool task_reduction_p = false;
4609 bool task_reduction_needs_orig_p = false;
4610 tree cond = NULL_TREE;
4611 tree allocator, allocate_ptr;
4612
4613 switch (c_kind)
4614 {
4615 case OMP_CLAUSE_PRIVATE:
4616 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4617 continue;
4618 break;
4619 case OMP_CLAUSE_SHARED:
4620 /* Ignore shared directives in teams construct inside
4621 of target construct. */
4622 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4623 && !is_host_teams_ctx (ctx))
4624 continue;
4625 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4626 {
4627 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4628 || is_global_var (OMP_CLAUSE_DECL (c)));
4629 continue;
4630 }
4631 case OMP_CLAUSE_FIRSTPRIVATE:
4632 case OMP_CLAUSE_COPYIN:
4633 break;
4634 case OMP_CLAUSE_LINEAR:
4635 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4636 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4637 lastprivate_firstprivate = true;
4638 break;
4639 case OMP_CLAUSE_REDUCTION:
4640 case OMP_CLAUSE_IN_REDUCTION:
4641 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4642 {
4643 task_reduction_p = true;
4644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4645 {
4646 task_reduction_other_cnt++;
4647 if (pass == 2)
4648 continue;
4649 }
4650 else
4651 task_reduction_cnt++;
4652 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4653 {
4654 var = OMP_CLAUSE_DECL (c);
4655 /* If var is a global variable that isn't privatized
4656 in outer contexts, we don't need to look up the
4657 original address, it is always the address of the
4658 global variable itself. */
4659 if (!DECL_P (var)
4660 || omp_is_reference (var)
4661 || !is_global_var
4662 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4663 {
4664 task_reduction_needs_orig_p = true;
4665 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4666 task_reduction_cntorig++;
4667 }
4668 }
4669 }
4670 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4671 reduction_omp_orig_ref = true;
4672 break;
4673 case OMP_CLAUSE__REDUCTEMP_:
4674 if (!is_taskreg_ctx (ctx))
4675 continue;
4676 /* FALLTHRU */
4677 case OMP_CLAUSE__LOOPTEMP_:
4678 /* Handle _looptemp_/_reductemp_ clauses only on
4679 parallel/task. */
4680 if (fd)
4681 continue;
4682 break;
4683 case OMP_CLAUSE_LASTPRIVATE:
4684 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4685 {
4686 lastprivate_firstprivate = true;
4687 if (pass != 0 || is_taskloop_ctx (ctx))
4688 continue;
4689 }
4690 /* Even without corresponding firstprivate, if
4691 decl is Fortran allocatable, it needs outer var
4692 reference. */
4693 else if (pass == 0
4694 && lang_hooks.decls.omp_private_outer_ref
4695 (OMP_CLAUSE_DECL (c)))
4696 lastprivate_firstprivate = true;
4697 break;
4698 case OMP_CLAUSE_ALIGNED:
4699 if (pass != 1)
4700 continue;
4701 var = OMP_CLAUSE_DECL (c);
4702 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4703 && !is_global_var (var))
4704 {
4705 new_var = maybe_lookup_decl (var, ctx);
4706 if (new_var == NULL_TREE)
4707 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4708 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4709 tree alarg = omp_clause_aligned_alignment (c);
4710 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4711 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4712 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4713 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4714 gimplify_and_add (x, ilist);
4715 }
4716 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4717 && is_global_var (var))
4718 {
4719 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4720 new_var = lookup_decl (var, ctx);
4721 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4722 t = build_fold_addr_expr_loc (clause_loc, t);
4723 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4724 tree alarg = omp_clause_aligned_alignment (c);
4725 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4726 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4727 t = fold_convert_loc (clause_loc, ptype, t);
4728 x = create_tmp_var (ptype);
4729 t = build2 (MODIFY_EXPR, ptype, x, t);
4730 gimplify_and_add (t, ilist);
4731 t = build_simple_mem_ref_loc (clause_loc, x);
4732 SET_DECL_VALUE_EXPR (new_var, t);
4733 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4734 }
4735 continue;
4736 case OMP_CLAUSE__CONDTEMP_:
4737 if (is_parallel_ctx (ctx)
4738 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4739 break;
4740 continue;
4741 default:
4742 continue;
4743 }
4744
4745 if (task_reduction_p != (pass >= 2))
4746 continue;
4747
4748 allocator = NULL_TREE;
4749 allocate_ptr = NULL_TREE;
4750 new_var = var = OMP_CLAUSE_DECL (c);
4751 if ((c_kind == OMP_CLAUSE_REDUCTION
4752 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4753 && TREE_CODE (var) == MEM_REF)
4754 {
4755 var = TREE_OPERAND (var, 0);
4756 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4757 var = TREE_OPERAND (var, 0);
4758 if (TREE_CODE (var) == INDIRECT_REF
4759 || TREE_CODE (var) == ADDR_EXPR)
4760 var = TREE_OPERAND (var, 0);
4761 if (is_variable_sized (var))
4762 {
4763 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4764 var = DECL_VALUE_EXPR (var);
4765 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4766 var = TREE_OPERAND (var, 0);
4767 gcc_assert (DECL_P (var));
4768 }
4769 new_var = var;
4770 }
4771 if (c_kind != OMP_CLAUSE_COPYIN)
4772 new_var = lookup_decl (var, ctx);
4773
4774 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4775 {
4776 if (pass != 0)
4777 continue;
4778 }
4779 /* C/C++ array section reductions. */
4780 else if ((c_kind == OMP_CLAUSE_REDUCTION
4781 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4782 && var != OMP_CLAUSE_DECL (c))
4783 {
4784 if (pass == 0)
4785 continue;
4786
4787 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4788 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4789
4790 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4791 {
4792 tree b = TREE_OPERAND (orig_var, 1);
4793 b = maybe_lookup_decl (b, ctx);
4794 if (b == NULL)
4795 {
4796 b = TREE_OPERAND (orig_var, 1);
4797 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4798 }
4799 if (integer_zerop (bias))
4800 bias = b;
4801 else
4802 {
4803 bias = fold_convert_loc (clause_loc,
4804 TREE_TYPE (b), bias);
4805 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4806 TREE_TYPE (b), b, bias);
4807 }
4808 orig_var = TREE_OPERAND (orig_var, 0);
4809 }
4810 if (pass == 2)
4811 {
4812 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4813 if (is_global_var (out)
4814 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4815 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4816 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4817 != POINTER_TYPE)))
4818 x = var;
4819 else
4820 {
4821 bool by_ref = use_pointer_for_field (var, NULL);
4822 x = build_receiver_ref (var, by_ref, ctx);
4823 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4824 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4825 == POINTER_TYPE))
4826 x = build_fold_addr_expr (x);
4827 }
4828 if (TREE_CODE (orig_var) == INDIRECT_REF)
4829 x = build_simple_mem_ref (x);
4830 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4831 {
4832 if (var == TREE_OPERAND (orig_var, 0))
4833 x = build_fold_addr_expr (x);
4834 }
4835 bias = fold_convert (sizetype, bias);
4836 x = fold_convert (ptr_type_node, x);
4837 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4838 TREE_TYPE (x), x, bias);
4839 unsigned cnt = task_reduction_cnt - 1;
4840 if (!task_reduction_needs_orig_p)
4841 cnt += (task_reduction_cntorig_full
4842 - task_reduction_cntorig);
4843 else
4844 cnt = task_reduction_cntorig - 1;
4845 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4846 size_int (cnt), NULL_TREE, NULL_TREE);
4847 gimplify_assign (r, x, ilist);
4848 continue;
4849 }
4850
4851 if (TREE_CODE (orig_var) == INDIRECT_REF
4852 || TREE_CODE (orig_var) == ADDR_EXPR)
4853 orig_var = TREE_OPERAND (orig_var, 0);
4854 tree d = OMP_CLAUSE_DECL (c);
4855 tree type = TREE_TYPE (d);
4856 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4857 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4858 const char *name = get_name (orig_var);
4859 if (pass == 3)
4860 {
4861 tree xv = create_tmp_var (ptr_type_node);
4862 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4863 {
4864 unsigned cnt = task_reduction_cnt - 1;
4865 if (!task_reduction_needs_orig_p)
4866 cnt += (task_reduction_cntorig_full
4867 - task_reduction_cntorig);
4868 else
4869 cnt = task_reduction_cntorig - 1;
4870 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4871 size_int (cnt), NULL_TREE, NULL_TREE);
4872
4873 gimple *g = gimple_build_assign (xv, x);
4874 gimple_seq_add_stmt (ilist, g);
4875 }
4876 else
4877 {
4878 unsigned int idx = *ctx->task_reduction_map->get (c);
4879 tree off;
4880 if (ctx->task_reductions[1 + idx])
4881 off = fold_convert (sizetype,
4882 ctx->task_reductions[1 + idx]);
4883 else
4884 off = task_reduction_read (ilist, tskred_temp, sizetype,
4885 7 + 3 * idx + 1);
4886 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4887 tskred_base, off);
4888 gimple_seq_add_stmt (ilist, g);
4889 }
4890 x = fold_convert (build_pointer_type (boolean_type_node),
4891 xv);
4892 if (TREE_CONSTANT (v))
4893 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4894 TYPE_SIZE_UNIT (type));
4895 else
4896 {
4897 tree t = maybe_lookup_decl (v, ctx);
4898 if (t)
4899 v = t;
4900 else
4901 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4902 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4903 fb_rvalue);
4904 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4905 TREE_TYPE (v), v,
4906 build_int_cst (TREE_TYPE (v), 1));
4907 t = fold_build2_loc (clause_loc, MULT_EXPR,
4908 TREE_TYPE (v), t,
4909 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4910 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4911 }
4912 cond = create_tmp_var (TREE_TYPE (x));
4913 gimplify_assign (cond, x, ilist);
4914 x = xv;
4915 }
4916 else if (TREE_CONSTANT (v))
4917 {
4918 x = create_tmp_var_raw (type, name);
4919 gimple_add_tmp_var (x);
4920 TREE_ADDRESSABLE (x) = 1;
4921 x = build_fold_addr_expr_loc (clause_loc, x);
4922 }
4923 else
4924 {
4925 tree atmp
4926 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4927 tree t = maybe_lookup_decl (v, ctx);
4928 if (t)
4929 v = t;
4930 else
4931 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4932 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4933 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4934 TREE_TYPE (v), v,
4935 build_int_cst (TREE_TYPE (v), 1));
4936 t = fold_build2_loc (clause_loc, MULT_EXPR,
4937 TREE_TYPE (v), t,
4938 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4939 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4940 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4941 }
4942
4943 tree ptype = build_pointer_type (TREE_TYPE (type));
4944 x = fold_convert_loc (clause_loc, ptype, x);
4945 tree y = create_tmp_var (ptype, name);
4946 gimplify_assign (y, x, ilist);
4947 x = y;
4948 tree yb = y;
4949
4950 if (!integer_zerop (bias))
4951 {
4952 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4953 bias);
4954 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4955 x);
4956 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4957 pointer_sized_int_node, yb, bias);
4958 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4959 yb = create_tmp_var (ptype, name);
4960 gimplify_assign (yb, x, ilist);
4961 x = yb;
4962 }
4963
4964 d = TREE_OPERAND (d, 0);
4965 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4966 d = TREE_OPERAND (d, 0);
4967 if (TREE_CODE (d) == ADDR_EXPR)
4968 {
4969 if (orig_var != var)
4970 {
4971 gcc_assert (is_variable_sized (orig_var));
4972 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4973 x);
4974 gimplify_assign (new_var, x, ilist);
4975 tree new_orig_var = lookup_decl (orig_var, ctx);
4976 tree t = build_fold_indirect_ref (new_var);
4977 DECL_IGNORED_P (new_var) = 0;
4978 TREE_THIS_NOTRAP (t) = 1;
4979 SET_DECL_VALUE_EXPR (new_orig_var, t);
4980 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4981 }
4982 else
4983 {
4984 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4985 build_int_cst (ptype, 0));
4986 SET_DECL_VALUE_EXPR (new_var, x);
4987 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4988 }
4989 }
4990 else
4991 {
4992 gcc_assert (orig_var == var);
4993 if (TREE_CODE (d) == INDIRECT_REF)
4994 {
4995 x = create_tmp_var (ptype, name);
4996 TREE_ADDRESSABLE (x) = 1;
4997 gimplify_assign (x, yb, ilist);
4998 x = build_fold_addr_expr_loc (clause_loc, x);
4999 }
5000 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5001 gimplify_assign (new_var, x, ilist);
5002 }
5003 /* GOMP_taskgroup_reduction_register memsets the whole
5004 array to zero. If the initializer is zero, we don't
5005 need to initialize it again, just mark it as ever
5006 used unconditionally, i.e. cond = true. */
5007 if (cond
5008 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5009 && initializer_zerop (omp_reduction_init (c,
5010 TREE_TYPE (type))))
5011 {
5012 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5013 boolean_true_node);
5014 gimple_seq_add_stmt (ilist, g);
5015 continue;
5016 }
5017 tree end = create_artificial_label (UNKNOWN_LOCATION);
5018 if (cond)
5019 {
5020 gimple *g;
5021 if (!is_parallel_ctx (ctx))
5022 {
5023 tree condv = create_tmp_var (boolean_type_node);
5024 g = gimple_build_assign (condv,
5025 build_simple_mem_ref (cond));
5026 gimple_seq_add_stmt (ilist, g);
5027 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5028 g = gimple_build_cond (NE_EXPR, condv,
5029 boolean_false_node, end, lab1);
5030 gimple_seq_add_stmt (ilist, g);
5031 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5032 }
5033 g = gimple_build_assign (build_simple_mem_ref (cond),
5034 boolean_true_node);
5035 gimple_seq_add_stmt (ilist, g);
5036 }
5037
5038 tree y1 = create_tmp_var (ptype);
5039 gimplify_assign (y1, y, ilist);
5040 tree i2 = NULL_TREE, y2 = NULL_TREE;
5041 tree body2 = NULL_TREE, end2 = NULL_TREE;
5042 tree y3 = NULL_TREE, y4 = NULL_TREE;
5043 if (task_reduction_needs_orig_p)
5044 {
5045 y3 = create_tmp_var (ptype);
5046 tree ref;
5047 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5048 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5049 size_int (task_reduction_cnt_full
5050 + task_reduction_cntorig - 1),
5051 NULL_TREE, NULL_TREE);
5052 else
5053 {
5054 unsigned int idx = *ctx->task_reduction_map->get (c);
5055 ref = task_reduction_read (ilist, tskred_temp, ptype,
5056 7 + 3 * idx);
5057 }
5058 gimplify_assign (y3, ref, ilist);
5059 }
5060 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5061 {
5062 if (pass != 3)
5063 {
5064 y2 = create_tmp_var (ptype);
5065 gimplify_assign (y2, y, ilist);
5066 }
5067 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5068 {
5069 tree ref = build_outer_var_ref (var, ctx);
5070 /* For ref build_outer_var_ref already performs this. */
5071 if (TREE_CODE (d) == INDIRECT_REF)
5072 gcc_assert (omp_is_reference (var));
5073 else if (TREE_CODE (d) == ADDR_EXPR)
5074 ref = build_fold_addr_expr (ref);
5075 else if (omp_is_reference (var))
5076 ref = build_fold_addr_expr (ref);
5077 ref = fold_convert_loc (clause_loc, ptype, ref);
5078 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5079 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5080 {
5081 y3 = create_tmp_var (ptype);
5082 gimplify_assign (y3, unshare_expr (ref), ilist);
5083 }
5084 if (is_simd)
5085 {
5086 y4 = create_tmp_var (ptype);
5087 gimplify_assign (y4, ref, dlist);
5088 }
5089 }
5090 }
5091 tree i = create_tmp_var (TREE_TYPE (v));
5092 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5093 tree body = create_artificial_label (UNKNOWN_LOCATION);
5094 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5095 if (y2)
5096 {
5097 i2 = create_tmp_var (TREE_TYPE (v));
5098 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5099 body2 = create_artificial_label (UNKNOWN_LOCATION);
5100 end2 = create_artificial_label (UNKNOWN_LOCATION);
5101 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5102 }
5103 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5104 {
5105 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5106 tree decl_placeholder
5107 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5108 SET_DECL_VALUE_EXPR (decl_placeholder,
5109 build_simple_mem_ref (y1));
5110 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5111 SET_DECL_VALUE_EXPR (placeholder,
5112 y3 ? build_simple_mem_ref (y3)
5113 : error_mark_node);
5114 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5115 x = lang_hooks.decls.omp_clause_default_ctor
5116 (c, build_simple_mem_ref (y1),
5117 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5118 if (x)
5119 gimplify_and_add (x, ilist);
5120 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5121 {
5122 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5123 lower_omp (&tseq, ctx);
5124 gimple_seq_add_seq (ilist, tseq);
5125 }
5126 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5127 if (is_simd)
5128 {
5129 SET_DECL_VALUE_EXPR (decl_placeholder,
5130 build_simple_mem_ref (y2));
5131 SET_DECL_VALUE_EXPR (placeholder,
5132 build_simple_mem_ref (y4));
5133 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5134 lower_omp (&tseq, ctx);
5135 gimple_seq_add_seq (dlist, tseq);
5136 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5137 }
5138 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5139 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5140 if (y2)
5141 {
5142 x = lang_hooks.decls.omp_clause_dtor
5143 (c, build_simple_mem_ref (y2));
5144 if (x)
5145 gimplify_and_add (x, dlist);
5146 }
5147 }
5148 else
5149 {
5150 x = omp_reduction_init (c, TREE_TYPE (type));
5151 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5152
5153 /* reduction(-:var) sums up the partial results, so it
5154 acts identically to reduction(+:var). */
5155 if (code == MINUS_EXPR)
5156 code = PLUS_EXPR;
5157
5158 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5159 if (is_simd)
5160 {
5161 x = build2 (code, TREE_TYPE (type),
5162 build_simple_mem_ref (y4),
5163 build_simple_mem_ref (y2));
5164 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5165 }
5166 }
5167 gimple *g
5168 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5169 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5170 gimple_seq_add_stmt (ilist, g);
5171 if (y3)
5172 {
5173 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5174 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5175 gimple_seq_add_stmt (ilist, g);
5176 }
5177 g = gimple_build_assign (i, PLUS_EXPR, i,
5178 build_int_cst (TREE_TYPE (i), 1));
5179 gimple_seq_add_stmt (ilist, g);
5180 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5181 gimple_seq_add_stmt (ilist, g);
5182 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5183 if (y2)
5184 {
5185 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5186 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5187 gimple_seq_add_stmt (dlist, g);
5188 if (y4)
5189 {
5190 g = gimple_build_assign
5191 (y4, POINTER_PLUS_EXPR, y4,
5192 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5193 gimple_seq_add_stmt (dlist, g);
5194 }
5195 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5196 build_int_cst (TREE_TYPE (i2), 1));
5197 gimple_seq_add_stmt (dlist, g);
5198 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5199 gimple_seq_add_stmt (dlist, g);
5200 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5201 }
5202 continue;
5203 }
5204 else if (pass == 2)
5205 {
5206 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5207 x = var;
5208 else
5209 {
5210 bool by_ref = use_pointer_for_field (var, ctx);
5211 x = build_receiver_ref (var, by_ref, ctx);
5212 }
5213 if (!omp_is_reference (var))
5214 x = build_fold_addr_expr (x);
5215 x = fold_convert (ptr_type_node, x);
5216 unsigned cnt = task_reduction_cnt - 1;
5217 if (!task_reduction_needs_orig_p)
5218 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5219 else
5220 cnt = task_reduction_cntorig - 1;
5221 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5222 size_int (cnt), NULL_TREE, NULL_TREE);
5223 gimplify_assign (r, x, ilist);
5224 continue;
5225 }
5226 else if (pass == 3)
5227 {
5228 tree type = TREE_TYPE (new_var);
5229 if (!omp_is_reference (var))
5230 type = build_pointer_type (type);
5231 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5232 {
5233 unsigned cnt = task_reduction_cnt - 1;
5234 if (!task_reduction_needs_orig_p)
5235 cnt += (task_reduction_cntorig_full
5236 - task_reduction_cntorig);
5237 else
5238 cnt = task_reduction_cntorig - 1;
5239 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5240 size_int (cnt), NULL_TREE, NULL_TREE);
5241 }
5242 else
5243 {
5244 unsigned int idx = *ctx->task_reduction_map->get (c);
5245 tree off;
5246 if (ctx->task_reductions[1 + idx])
5247 off = fold_convert (sizetype,
5248 ctx->task_reductions[1 + idx]);
5249 else
5250 off = task_reduction_read (ilist, tskred_temp, sizetype,
5251 7 + 3 * idx + 1);
5252 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5253 tskred_base, off);
5254 }
5255 x = fold_convert (type, x);
5256 tree t;
5257 if (omp_is_reference (var))
5258 {
5259 gimplify_assign (new_var, x, ilist);
5260 t = new_var;
5261 new_var = build_simple_mem_ref (new_var);
5262 }
5263 else
5264 {
5265 t = create_tmp_var (type);
5266 gimplify_assign (t, x, ilist);
5267 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5268 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5269 }
5270 t = fold_convert (build_pointer_type (boolean_type_node), t);
5271 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5272 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5273 cond = create_tmp_var (TREE_TYPE (t));
5274 gimplify_assign (cond, t, ilist);
5275 }
5276 else if (is_variable_sized (var))
5277 {
5278 /* For variable sized types, we need to allocate the
5279 actual storage here. Call alloca and store the
5280 result in the pointer decl that we created elsewhere. */
5281 if (pass == 0)
5282 continue;
5283
5284 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5285 {
5286 tree tmp;
5287
5288 ptr = DECL_VALUE_EXPR (new_var);
5289 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5290 ptr = TREE_OPERAND (ptr, 0);
5291 gcc_assert (DECL_P (ptr));
5292 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5293
5294 if (lower_private_allocate (var, new_var, allocator,
5295 allocate_ptr, ilist, ctx,
5296 false, x))
5297 tmp = allocate_ptr;
5298 else
5299 {
5300 /* void *tmp = __builtin_alloca */
5301 tree atmp
5302 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5303 gcall *stmt
5304 = gimple_build_call (atmp, 2, x,
5305 size_int (DECL_ALIGN (var)));
5306 cfun->calls_alloca = 1;
5307 tmp = create_tmp_var_raw (ptr_type_node);
5308 gimple_add_tmp_var (tmp);
5309 gimple_call_set_lhs (stmt, tmp);
5310
5311 gimple_seq_add_stmt (ilist, stmt);
5312 }
5313
5314 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5315 gimplify_assign (ptr, x, ilist);
5316 }
5317 }
5318 else if (omp_is_reference (var)
5319 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5320 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5321 {
5322 /* For references that are being privatized for Fortran,
5323 allocate new backing storage for the new pointer
5324 variable. This allows us to avoid changing all the
5325 code that expects a pointer to something that expects
5326 a direct variable. */
5327 if (pass == 0)
5328 continue;
5329
5330 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5331 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5332 {
5333 x = build_receiver_ref (var, false, ctx);
5334 x = build_fold_addr_expr_loc (clause_loc, x);
5335 }
5336 else if (lower_private_allocate (var, new_var, allocator,
5337 allocate_ptr,
5338 ilist, ctx, true, x))
5339 x = allocate_ptr;
5340 else if (TREE_CONSTANT (x))
5341 {
5342 /* For reduction in SIMD loop, defer adding the
5343 initialization of the reference, because if we decide
5344 to use SIMD array for it, the initilization could cause
5345 expansion ICE. Ditto for other privatization clauses. */
5346 if (is_simd)
5347 x = NULL_TREE;
5348 else
5349 {
5350 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5351 get_name (var));
5352 gimple_add_tmp_var (x);
5353 TREE_ADDRESSABLE (x) = 1;
5354 x = build_fold_addr_expr_loc (clause_loc, x);
5355 }
5356 }
5357 else
5358 {
5359 tree atmp
5360 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5361 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5362 tree al = size_int (TYPE_ALIGN (rtype));
5363 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5364 }
5365
5366 if (x)
5367 {
5368 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5369 gimplify_assign (new_var, x, ilist);
5370 }
5371
5372 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5373 }
5374 else if ((c_kind == OMP_CLAUSE_REDUCTION
5375 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5376 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5377 {
5378 if (pass == 0)
5379 continue;
5380 }
5381 else if (pass != 0)
5382 continue;
5383
5384 switch (OMP_CLAUSE_CODE (c))
5385 {
5386 case OMP_CLAUSE_SHARED:
5387 /* Ignore shared directives in teams construct inside
5388 target construct. */
5389 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5390 && !is_host_teams_ctx (ctx))
5391 continue;
5392 /* Shared global vars are just accessed directly. */
5393 if (is_global_var (new_var))
5394 break;
5395 /* For taskloop firstprivate/lastprivate, represented
5396 as firstprivate and shared clause on the task, new_var
5397 is the firstprivate var. */
5398 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5399 break;
5400 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5401 needs to be delayed until after fixup_child_record_type so
5402 that we get the correct type during the dereference. */
5403 by_ref = use_pointer_for_field (var, ctx);
5404 x = build_receiver_ref (var, by_ref, ctx);
5405 SET_DECL_VALUE_EXPR (new_var, x);
5406 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5407
5408 /* ??? If VAR is not passed by reference, and the variable
5409 hasn't been initialized yet, then we'll get a warning for
5410 the store into the omp_data_s structure. Ideally, we'd be
5411 able to notice this and not store anything at all, but
5412 we're generating code too early. Suppress the warning. */
5413 if (!by_ref)
5414 TREE_NO_WARNING (var) = 1;
5415 break;
5416
5417 case OMP_CLAUSE__CONDTEMP_:
5418 if (is_parallel_ctx (ctx))
5419 {
5420 x = build_receiver_ref (var, false, ctx);
5421 SET_DECL_VALUE_EXPR (new_var, x);
5422 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5423 }
5424 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5425 {
5426 x = build_zero_cst (TREE_TYPE (var));
5427 goto do_private;
5428 }
5429 break;
5430
5431 case OMP_CLAUSE_LASTPRIVATE:
5432 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5433 break;
5434 /* FALLTHRU */
5435
5436 case OMP_CLAUSE_PRIVATE:
5437 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5438 x = build_outer_var_ref (var, ctx);
5439 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5440 {
5441 if (is_task_ctx (ctx))
5442 x = build_receiver_ref (var, false, ctx);
5443 else
5444 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5445 }
5446 else
5447 x = NULL;
5448 do_private:
5449 tree nx;
5450 bool copy_ctor;
5451 copy_ctor = false;
5452 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5453 ilist, ctx, false, NULL_TREE);
5454 nx = unshare_expr (new_var);
5455 if (is_simd
5456 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5457 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5458 copy_ctor = true;
5459 if (copy_ctor)
5460 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5461 else
5462 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5463 if (is_simd)
5464 {
5465 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5466 if ((TREE_ADDRESSABLE (new_var) || nx || y
5467 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5468 && (gimple_omp_for_collapse (ctx->stmt) != 1
5469 || (gimple_omp_for_index (ctx->stmt, 0)
5470 != new_var)))
5471 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5472 || omp_is_reference (var))
5473 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5474 ivar, lvar))
5475 {
5476 if (omp_is_reference (var))
5477 {
5478 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5479 tree new_vard = TREE_OPERAND (new_var, 0);
5480 gcc_assert (DECL_P (new_vard));
5481 SET_DECL_VALUE_EXPR (new_vard,
5482 build_fold_addr_expr (lvar));
5483 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5484 }
5485
5486 if (nx)
5487 {
5488 tree iv = unshare_expr (ivar);
5489 if (copy_ctor)
5490 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5491 x);
5492 else
5493 x = lang_hooks.decls.omp_clause_default_ctor (c,
5494 iv,
5495 x);
5496 }
5497 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5498 {
5499 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5500 unshare_expr (ivar), x);
5501 nx = x;
5502 }
5503 if (nx && x)
5504 gimplify_and_add (x, &llist[0]);
5505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5506 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5507 {
5508 tree v = new_var;
5509 if (!DECL_P (v))
5510 {
5511 gcc_assert (TREE_CODE (v) == MEM_REF);
5512 v = TREE_OPERAND (v, 0);
5513 gcc_assert (DECL_P (v));
5514 }
5515 v = *ctx->lastprivate_conditional_map->get (v);
5516 tree t = create_tmp_var (TREE_TYPE (v));
5517 tree z = build_zero_cst (TREE_TYPE (v));
5518 tree orig_v
5519 = build_outer_var_ref (var, ctx,
5520 OMP_CLAUSE_LASTPRIVATE);
5521 gimple_seq_add_stmt (dlist,
5522 gimple_build_assign (t, z));
5523 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5524 tree civar = DECL_VALUE_EXPR (v);
5525 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5526 civar = unshare_expr (civar);
5527 TREE_OPERAND (civar, 1) = sctx.idx;
5528 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5529 unshare_expr (civar));
5530 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5531 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5532 orig_v, unshare_expr (ivar)));
5533 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5534 civar);
5535 x = build3 (COND_EXPR, void_type_node, cond, x,
5536 void_node);
5537 gimple_seq tseq = NULL;
5538 gimplify_and_add (x, &tseq);
5539 if (ctx->outer)
5540 lower_omp (&tseq, ctx->outer);
5541 gimple_seq_add_seq (&llist[1], tseq);
5542 }
5543 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5544 && ctx->for_simd_scan_phase)
5545 {
5546 x = unshare_expr (ivar);
5547 tree orig_v
5548 = build_outer_var_ref (var, ctx,
5549 OMP_CLAUSE_LASTPRIVATE);
5550 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5551 orig_v);
5552 gimplify_and_add (x, &llist[0]);
5553 }
5554 if (y)
5555 {
5556 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5557 if (y)
5558 gimplify_and_add (y, &llist[1]);
5559 }
5560 break;
5561 }
5562 if (omp_is_reference (var))
5563 {
5564 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5565 tree new_vard = TREE_OPERAND (new_var, 0);
5566 gcc_assert (DECL_P (new_vard));
5567 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5568 x = TYPE_SIZE_UNIT (type);
5569 if (TREE_CONSTANT (x))
5570 {
5571 x = create_tmp_var_raw (type, get_name (var));
5572 gimple_add_tmp_var (x);
5573 TREE_ADDRESSABLE (x) = 1;
5574 x = build_fold_addr_expr_loc (clause_loc, x);
5575 x = fold_convert_loc (clause_loc,
5576 TREE_TYPE (new_vard), x);
5577 gimplify_assign (new_vard, x, ilist);
5578 }
5579 }
5580 }
5581 if (nx)
5582 gimplify_and_add (nx, ilist);
5583 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5584 && is_simd
5585 && ctx->for_simd_scan_phase)
5586 {
5587 tree orig_v = build_outer_var_ref (var, ctx,
5588 OMP_CLAUSE_LASTPRIVATE);
5589 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5590 orig_v);
5591 gimplify_and_add (x, ilist);
5592 }
5593 /* FALLTHRU */
5594
5595 do_dtor:
5596 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5597 if (x)
5598 gimplify_and_add (x, dlist);
5599 if (allocator)
5600 {
5601 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5602 gimple *g
5603 = gimple_build_call (f, 2, allocate_ptr, allocator);
5604 gimple_seq_add_stmt (dlist, g);
5605 }
5606 break;
5607
5608 case OMP_CLAUSE_LINEAR:
5609 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5610 goto do_firstprivate;
5611 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5612 x = NULL;
5613 else
5614 x = build_outer_var_ref (var, ctx);
5615 goto do_private;
5616
5617 case OMP_CLAUSE_FIRSTPRIVATE:
5618 if (is_task_ctx (ctx))
5619 {
5620 if ((omp_is_reference (var)
5621 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5622 || is_variable_sized (var))
5623 goto do_dtor;
5624 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5625 ctx))
5626 || use_pointer_for_field (var, NULL))
5627 {
5628 x = build_receiver_ref (var, false, ctx);
5629 SET_DECL_VALUE_EXPR (new_var, x);
5630 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5631 goto do_dtor;
5632 }
5633 }
5634 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5635 && omp_is_reference (var))
5636 {
5637 x = build_outer_var_ref (var, ctx);
5638 gcc_assert (TREE_CODE (x) == MEM_REF
5639 && integer_zerop (TREE_OPERAND (x, 1)));
5640 x = TREE_OPERAND (x, 0);
5641 x = lang_hooks.decls.omp_clause_copy_ctor
5642 (c, unshare_expr (new_var), x);
5643 gimplify_and_add (x, ilist);
5644 goto do_dtor;
5645 }
5646 do_firstprivate:
5647 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5648 ilist, ctx, false, NULL_TREE);
5649 x = build_outer_var_ref (var, ctx);
5650 if (is_simd)
5651 {
5652 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5653 && gimple_omp_for_combined_into_p (ctx->stmt))
5654 {
5655 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5656 tree stept = TREE_TYPE (t);
5657 tree ct = omp_find_clause (clauses,
5658 OMP_CLAUSE__LOOPTEMP_);
5659 gcc_assert (ct);
5660 tree l = OMP_CLAUSE_DECL (ct);
5661 tree n1 = fd->loop.n1;
5662 tree step = fd->loop.step;
5663 tree itype = TREE_TYPE (l);
5664 if (POINTER_TYPE_P (itype))
5665 itype = signed_type_for (itype);
5666 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5667 if (TYPE_UNSIGNED (itype)
5668 && fd->loop.cond_code == GT_EXPR)
5669 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5670 fold_build1 (NEGATE_EXPR, itype, l),
5671 fold_build1 (NEGATE_EXPR,
5672 itype, step));
5673 else
5674 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5675 t = fold_build2 (MULT_EXPR, stept,
5676 fold_convert (stept, l), t);
5677
5678 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5679 {
5680 if (omp_is_reference (var))
5681 {
5682 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5683 tree new_vard = TREE_OPERAND (new_var, 0);
5684 gcc_assert (DECL_P (new_vard));
5685 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5686 nx = TYPE_SIZE_UNIT (type);
5687 if (TREE_CONSTANT (nx))
5688 {
5689 nx = create_tmp_var_raw (type,
5690 get_name (var));
5691 gimple_add_tmp_var (nx);
5692 TREE_ADDRESSABLE (nx) = 1;
5693 nx = build_fold_addr_expr_loc (clause_loc,
5694 nx);
5695 nx = fold_convert_loc (clause_loc,
5696 TREE_TYPE (new_vard),
5697 nx);
5698 gimplify_assign (new_vard, nx, ilist);
5699 }
5700 }
5701
5702 x = lang_hooks.decls.omp_clause_linear_ctor
5703 (c, new_var, x, t);
5704 gimplify_and_add (x, ilist);
5705 goto do_dtor;
5706 }
5707
5708 if (POINTER_TYPE_P (TREE_TYPE (x)))
5709 x = fold_build2 (POINTER_PLUS_EXPR,
5710 TREE_TYPE (x), x, t);
5711 else
5712 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5713 }
5714
5715 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5716 || TREE_ADDRESSABLE (new_var)
5717 || omp_is_reference (var))
5718 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5719 ivar, lvar))
5720 {
5721 if (omp_is_reference (var))
5722 {
5723 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5724 tree new_vard = TREE_OPERAND (new_var, 0);
5725 gcc_assert (DECL_P (new_vard));
5726 SET_DECL_VALUE_EXPR (new_vard,
5727 build_fold_addr_expr (lvar));
5728 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5729 }
5730 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5731 {
5732 tree iv = create_tmp_var (TREE_TYPE (new_var));
5733 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5734 gimplify_and_add (x, ilist);
5735 gimple_stmt_iterator gsi
5736 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5737 gassign *g
5738 = gimple_build_assign (unshare_expr (lvar), iv);
5739 gsi_insert_before_without_update (&gsi, g,
5740 GSI_SAME_STMT);
5741 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5742 enum tree_code code = PLUS_EXPR;
5743 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5744 code = POINTER_PLUS_EXPR;
5745 g = gimple_build_assign (iv, code, iv, t);
5746 gsi_insert_before_without_update (&gsi, g,
5747 GSI_SAME_STMT);
5748 break;
5749 }
5750 x = lang_hooks.decls.omp_clause_copy_ctor
5751 (c, unshare_expr (ivar), x);
5752 gimplify_and_add (x, &llist[0]);
5753 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5754 if (x)
5755 gimplify_and_add (x, &llist[1]);
5756 break;
5757 }
5758 if (omp_is_reference (var))
5759 {
5760 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5761 tree new_vard = TREE_OPERAND (new_var, 0);
5762 gcc_assert (DECL_P (new_vard));
5763 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5764 nx = TYPE_SIZE_UNIT (type);
5765 if (TREE_CONSTANT (nx))
5766 {
5767 nx = create_tmp_var_raw (type, get_name (var));
5768 gimple_add_tmp_var (nx);
5769 TREE_ADDRESSABLE (nx) = 1;
5770 nx = build_fold_addr_expr_loc (clause_loc, nx);
5771 nx = fold_convert_loc (clause_loc,
5772 TREE_TYPE (new_vard), nx);
5773 gimplify_assign (new_vard, nx, ilist);
5774 }
5775 }
5776 }
5777 x = lang_hooks.decls.omp_clause_copy_ctor
5778 (c, unshare_expr (new_var), x);
5779 gimplify_and_add (x, ilist);
5780 goto do_dtor;
5781
5782 case OMP_CLAUSE__LOOPTEMP_:
5783 case OMP_CLAUSE__REDUCTEMP_:
5784 gcc_assert (is_taskreg_ctx (ctx));
5785 x = build_outer_var_ref (var, ctx);
5786 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5787 gimplify_and_add (x, ilist);
5788 break;
5789
5790 case OMP_CLAUSE_COPYIN:
5791 by_ref = use_pointer_for_field (var, NULL);
5792 x = build_receiver_ref (var, by_ref, ctx);
5793 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5794 append_to_statement_list (x, &copyin_seq);
5795 copyin_by_ref |= by_ref;
5796 break;
5797
5798 case OMP_CLAUSE_REDUCTION:
5799 case OMP_CLAUSE_IN_REDUCTION:
5800 /* OpenACC reductions are initialized using the
5801 GOACC_REDUCTION internal function. */
5802 if (is_gimple_omp_oacc (ctx->stmt))
5803 break;
5804 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5805 {
5806 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5807 gimple *tseq;
5808 tree ptype = TREE_TYPE (placeholder);
5809 if (cond)
5810 {
5811 x = error_mark_node;
5812 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5813 && !task_reduction_needs_orig_p)
5814 x = var;
5815 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5816 {
5817 tree pptype = build_pointer_type (ptype);
5818 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5819 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5820 size_int (task_reduction_cnt_full
5821 + task_reduction_cntorig - 1),
5822 NULL_TREE, NULL_TREE);
5823 else
5824 {
5825 unsigned int idx
5826 = *ctx->task_reduction_map->get (c);
5827 x = task_reduction_read (ilist, tskred_temp,
5828 pptype, 7 + 3 * idx);
5829 }
5830 x = fold_convert (pptype, x);
5831 x = build_simple_mem_ref (x);
5832 }
5833 }
5834 else
5835 {
5836 lower_private_allocate (var, new_var, allocator,
5837 allocate_ptr, ilist, ctx, false,
5838 NULL_TREE);
5839 x = build_outer_var_ref (var, ctx);
5840
5841 if (omp_is_reference (var)
5842 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5843 x = build_fold_addr_expr_loc (clause_loc, x);
5844 }
5845 SET_DECL_VALUE_EXPR (placeholder, x);
5846 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5847 tree new_vard = new_var;
5848 if (omp_is_reference (var))
5849 {
5850 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5851 new_vard = TREE_OPERAND (new_var, 0);
5852 gcc_assert (DECL_P (new_vard));
5853 }
5854 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5855 if (is_simd
5856 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5857 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5858 rvarp = &rvar;
5859 if (is_simd
5860 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5861 ivar, lvar, rvarp,
5862 &rvar2))
5863 {
5864 if (new_vard == new_var)
5865 {
5866 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5867 SET_DECL_VALUE_EXPR (new_var, ivar);
5868 }
5869 else
5870 {
5871 SET_DECL_VALUE_EXPR (new_vard,
5872 build_fold_addr_expr (ivar));
5873 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5874 }
5875 x = lang_hooks.decls.omp_clause_default_ctor
5876 (c, unshare_expr (ivar),
5877 build_outer_var_ref (var, ctx));
5878 if (rvarp && ctx->for_simd_scan_phase)
5879 {
5880 if (x)
5881 gimplify_and_add (x, &llist[0]);
5882 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5883 if (x)
5884 gimplify_and_add (x, &llist[1]);
5885 break;
5886 }
5887 else if (rvarp)
5888 {
5889 if (x)
5890 {
5891 gimplify_and_add (x, &llist[0]);
5892
5893 tree ivar2 = unshare_expr (lvar);
5894 TREE_OPERAND (ivar2, 1) = sctx.idx;
5895 x = lang_hooks.decls.omp_clause_default_ctor
5896 (c, ivar2, build_outer_var_ref (var, ctx));
5897 gimplify_and_add (x, &llist[0]);
5898
5899 if (rvar2)
5900 {
5901 x = lang_hooks.decls.omp_clause_default_ctor
5902 (c, unshare_expr (rvar2),
5903 build_outer_var_ref (var, ctx));
5904 gimplify_and_add (x, &llist[0]);
5905 }
5906
5907 /* For types that need construction, add another
5908 private var which will be default constructed
5909 and optionally initialized with
5910 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5911 loop we want to assign this value instead of
5912 constructing and destructing it in each
5913 iteration. */
5914 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5915 gimple_add_tmp_var (nv);
5916 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5917 ? rvar2
5918 : ivar, 0),
5919 nv);
5920 x = lang_hooks.decls.omp_clause_default_ctor
5921 (c, nv, build_outer_var_ref (var, ctx));
5922 gimplify_and_add (x, ilist);
5923
5924 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5925 {
5926 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5927 x = DECL_VALUE_EXPR (new_vard);
5928 tree vexpr = nv;
5929 if (new_vard != new_var)
5930 vexpr = build_fold_addr_expr (nv);
5931 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5932 lower_omp (&tseq, ctx);
5933 SET_DECL_VALUE_EXPR (new_vard, x);
5934 gimple_seq_add_seq (ilist, tseq);
5935 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5936 }
5937
5938 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5939 if (x)
5940 gimplify_and_add (x, dlist);
5941 }
5942
5943 tree ref = build_outer_var_ref (var, ctx);
5944 x = unshare_expr (ivar);
5945 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5946 ref);
5947 gimplify_and_add (x, &llist[0]);
5948
5949 ref = build_outer_var_ref (var, ctx);
5950 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5951 rvar);
5952 gimplify_and_add (x, &llist[3]);
5953
5954 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5955 if (new_vard == new_var)
5956 SET_DECL_VALUE_EXPR (new_var, lvar);
5957 else
5958 SET_DECL_VALUE_EXPR (new_vard,
5959 build_fold_addr_expr (lvar));
5960
5961 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5962 if (x)
5963 gimplify_and_add (x, &llist[1]);
5964
5965 tree ivar2 = unshare_expr (lvar);
5966 TREE_OPERAND (ivar2, 1) = sctx.idx;
5967 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5968 if (x)
5969 gimplify_and_add (x, &llist[1]);
5970
5971 if (rvar2)
5972 {
5973 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5974 if (x)
5975 gimplify_and_add (x, &llist[1]);
5976 }
5977 break;
5978 }
5979 if (x)
5980 gimplify_and_add (x, &llist[0]);
5981 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5982 {
5983 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5984 lower_omp (&tseq, ctx);
5985 gimple_seq_add_seq (&llist[0], tseq);
5986 }
5987 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5988 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5989 lower_omp (&tseq, ctx);
5990 gimple_seq_add_seq (&llist[1], tseq);
5991 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5992 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5993 if (new_vard == new_var)
5994 SET_DECL_VALUE_EXPR (new_var, lvar);
5995 else
5996 SET_DECL_VALUE_EXPR (new_vard,
5997 build_fold_addr_expr (lvar));
5998 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5999 if (x)
6000 gimplify_and_add (x, &llist[1]);
6001 break;
6002 }
6003 /* If this is a reference to constant size reduction var
6004 with placeholder, we haven't emitted the initializer
6005 for it because it is undesirable if SIMD arrays are used.
6006 But if they aren't used, we need to emit the deferred
6007 initialization now. */
6008 else if (omp_is_reference (var) && is_simd)
6009 handle_simd_reference (clause_loc, new_vard, ilist);
6010
6011 tree lab2 = NULL_TREE;
6012 if (cond)
6013 {
6014 gimple *g;
6015 if (!is_parallel_ctx (ctx))
6016 {
6017 tree condv = create_tmp_var (boolean_type_node);
6018 tree m = build_simple_mem_ref (cond);
6019 g = gimple_build_assign (condv, m);
6020 gimple_seq_add_stmt (ilist, g);
6021 tree lab1
6022 = create_artificial_label (UNKNOWN_LOCATION);
6023 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6024 g = gimple_build_cond (NE_EXPR, condv,
6025 boolean_false_node,
6026 lab2, lab1);
6027 gimple_seq_add_stmt (ilist, g);
6028 gimple_seq_add_stmt (ilist,
6029 gimple_build_label (lab1));
6030 }
6031 g = gimple_build_assign (build_simple_mem_ref (cond),
6032 boolean_true_node);
6033 gimple_seq_add_stmt (ilist, g);
6034 }
6035 x = lang_hooks.decls.omp_clause_default_ctor
6036 (c, unshare_expr (new_var),
6037 cond ? NULL_TREE
6038 : build_outer_var_ref (var, ctx));
6039 if (x)
6040 gimplify_and_add (x, ilist);
6041
6042 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6043 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6044 {
6045 if (ctx->for_simd_scan_phase)
6046 goto do_dtor;
6047 if (x || (!is_simd
6048 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6049 {
6050 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6051 gimple_add_tmp_var (nv);
6052 ctx->cb.decl_map->put (new_vard, nv);
6053 x = lang_hooks.decls.omp_clause_default_ctor
6054 (c, nv, build_outer_var_ref (var, ctx));
6055 if (x)
6056 gimplify_and_add (x, ilist);
6057 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6058 {
6059 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6060 tree vexpr = nv;
6061 if (new_vard != new_var)
6062 vexpr = build_fold_addr_expr (nv);
6063 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6064 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6065 lower_omp (&tseq, ctx);
6066 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6067 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6068 gimple_seq_add_seq (ilist, tseq);
6069 }
6070 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6071 if (is_simd && ctx->scan_exclusive)
6072 {
6073 tree nv2
6074 = create_tmp_var_raw (TREE_TYPE (new_var));
6075 gimple_add_tmp_var (nv2);
6076 ctx->cb.decl_map->put (nv, nv2);
6077 x = lang_hooks.decls.omp_clause_default_ctor
6078 (c, nv2, build_outer_var_ref (var, ctx));
6079 gimplify_and_add (x, ilist);
6080 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6081 if (x)
6082 gimplify_and_add (x, dlist);
6083 }
6084 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6085 if (x)
6086 gimplify_and_add (x, dlist);
6087 }
6088 else if (is_simd
6089 && ctx->scan_exclusive
6090 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6091 {
6092 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6093 gimple_add_tmp_var (nv2);
6094 ctx->cb.decl_map->put (new_vard, nv2);
6095 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6096 if (x)
6097 gimplify_and_add (x, dlist);
6098 }
6099 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6100 goto do_dtor;
6101 }
6102
6103 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6104 {
6105 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6106 lower_omp (&tseq, ctx);
6107 gimple_seq_add_seq (ilist, tseq);
6108 }
6109 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6110 if (is_simd)
6111 {
6112 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6113 lower_omp (&tseq, ctx);
6114 gimple_seq_add_seq (dlist, tseq);
6115 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6116 }
6117 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6118 if (cond)
6119 {
6120 if (lab2)
6121 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6122 break;
6123 }
6124 goto do_dtor;
6125 }
6126 else
6127 {
6128 x = omp_reduction_init (c, TREE_TYPE (new_var));
6129 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6130 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6131
6132 if (cond)
6133 {
6134 gimple *g;
6135 tree lab2 = NULL_TREE;
6136 /* GOMP_taskgroup_reduction_register memsets the whole
6137 array to zero. If the initializer is zero, we don't
6138 need to initialize it again, just mark it as ever
6139 used unconditionally, i.e. cond = true. */
6140 if (initializer_zerop (x))
6141 {
6142 g = gimple_build_assign (build_simple_mem_ref (cond),
6143 boolean_true_node);
6144 gimple_seq_add_stmt (ilist, g);
6145 break;
6146 }
6147
6148 /* Otherwise, emit
6149 if (!cond) { cond = true; new_var = x; } */
6150 if (!is_parallel_ctx (ctx))
6151 {
6152 tree condv = create_tmp_var (boolean_type_node);
6153 tree m = build_simple_mem_ref (cond);
6154 g = gimple_build_assign (condv, m);
6155 gimple_seq_add_stmt (ilist, g);
6156 tree lab1
6157 = create_artificial_label (UNKNOWN_LOCATION);
6158 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6159 g = gimple_build_cond (NE_EXPR, condv,
6160 boolean_false_node,
6161 lab2, lab1);
6162 gimple_seq_add_stmt (ilist, g);
6163 gimple_seq_add_stmt (ilist,
6164 gimple_build_label (lab1));
6165 }
6166 g = gimple_build_assign (build_simple_mem_ref (cond),
6167 boolean_true_node);
6168 gimple_seq_add_stmt (ilist, g);
6169 gimplify_assign (new_var, x, ilist);
6170 if (lab2)
6171 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6172 break;
6173 }
6174
6175 /* reduction(-:var) sums up the partial results, so it
6176 acts identically to reduction(+:var). */
6177 if (code == MINUS_EXPR)
6178 code = PLUS_EXPR;
6179
6180 tree new_vard = new_var;
6181 if (is_simd && omp_is_reference (var))
6182 {
6183 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6184 new_vard = TREE_OPERAND (new_var, 0);
6185 gcc_assert (DECL_P (new_vard));
6186 }
6187 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6188 if (is_simd
6189 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6190 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6191 rvarp = &rvar;
6192 if (is_simd
6193 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6194 ivar, lvar, rvarp,
6195 &rvar2))
6196 {
6197 if (new_vard != new_var)
6198 {
6199 SET_DECL_VALUE_EXPR (new_vard,
6200 build_fold_addr_expr (lvar));
6201 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6202 }
6203
6204 tree ref = build_outer_var_ref (var, ctx);
6205
6206 if (rvarp)
6207 {
6208 if (ctx->for_simd_scan_phase)
6209 break;
6210 gimplify_assign (ivar, ref, &llist[0]);
6211 ref = build_outer_var_ref (var, ctx);
6212 gimplify_assign (ref, rvar, &llist[3]);
6213 break;
6214 }
6215
6216 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6217
6218 if (sctx.is_simt)
6219 {
6220 if (!simt_lane)
6221 simt_lane = create_tmp_var (unsigned_type_node);
6222 x = build_call_expr_internal_loc
6223 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6224 TREE_TYPE (ivar), 2, ivar, simt_lane);
6225 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6226 gimplify_assign (ivar, x, &llist[2]);
6227 }
6228 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6229 ref = build_outer_var_ref (var, ctx);
6230 gimplify_assign (ref, x, &llist[1]);
6231
6232 }
6233 else
6234 {
6235 lower_private_allocate (var, new_var, allocator,
6236 allocate_ptr, ilist, ctx,
6237 false, NULL_TREE);
6238 if (omp_is_reference (var) && is_simd)
6239 handle_simd_reference (clause_loc, new_vard, ilist);
6240 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6241 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6242 break;
6243 gimplify_assign (new_var, x, ilist);
6244 if (is_simd)
6245 {
6246 tree ref = build_outer_var_ref (var, ctx);
6247
6248 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6249 ref = build_outer_var_ref (var, ctx);
6250 gimplify_assign (ref, x, dlist);
6251 }
6252 if (allocator)
6253 goto do_dtor;
6254 }
6255 }
6256 break;
6257
6258 default:
6259 gcc_unreachable ();
6260 }
6261 }
6262 }
6263 if (tskred_avar)
6264 {
6265 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6266 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6267 }
6268
6269 if (known_eq (sctx.max_vf, 1U))
6270 {
6271 sctx.is_simt = false;
6272 if (ctx->lastprivate_conditional_map)
6273 {
6274 if (gimple_omp_for_combined_into_p (ctx->stmt))
6275 {
6276 /* Signal to lower_omp_1 that it should use parent context. */
6277 ctx->combined_into_simd_safelen1 = true;
6278 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6279 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6280 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6281 {
6282 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6283 omp_context *outer = ctx->outer;
6284 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6285 outer = outer->outer;
6286 tree *v = ctx->lastprivate_conditional_map->get (o);
6287 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6288 tree *pv = outer->lastprivate_conditional_map->get (po);
6289 *v = *pv;
6290 }
6291 }
6292 else
6293 {
6294 /* When not vectorized, treat lastprivate(conditional:) like
6295 normal lastprivate, as there will be just one simd lane
6296 writing the privatized variable. */
6297 delete ctx->lastprivate_conditional_map;
6298 ctx->lastprivate_conditional_map = NULL;
6299 }
6300 }
6301 }
6302
6303 if (nonconst_simd_if)
6304 {
6305 if (sctx.lane == NULL_TREE)
6306 {
6307 sctx.idx = create_tmp_var (unsigned_type_node);
6308 sctx.lane = create_tmp_var (unsigned_type_node);
6309 }
6310 /* FIXME: For now. */
6311 sctx.is_simt = false;
6312 }
6313
6314 if (sctx.lane || sctx.is_simt)
6315 {
6316 uid = create_tmp_var (ptr_type_node, "simduid");
6317 /* Don't want uninit warnings on simduid, it is always uninitialized,
6318 but we use it not for the value, but for the DECL_UID only. */
6319 TREE_NO_WARNING (uid) = 1;
6320 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6321 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6322 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6323 gimple_omp_for_set_clauses (ctx->stmt, c);
6324 }
6325 /* Emit calls denoting privatized variables and initializing a pointer to
6326 structure that holds private variables as fields after ompdevlow pass. */
6327 if (sctx.is_simt)
6328 {
6329 sctx.simt_eargs[0] = uid;
6330 gimple *g
6331 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6332 gimple_call_set_lhs (g, uid);
6333 gimple_seq_add_stmt (ilist, g);
6334 sctx.simt_eargs.release ();
6335
6336 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6337 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6338 gimple_call_set_lhs (g, simtrec);
6339 gimple_seq_add_stmt (ilist, g);
6340 }
6341 if (sctx.lane)
6342 {
6343 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6344 2 + (nonconst_simd_if != NULL),
6345 uid, integer_zero_node,
6346 nonconst_simd_if);
6347 gimple_call_set_lhs (g, sctx.lane);
6348 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6349 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6350 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6351 build_int_cst (unsigned_type_node, 0));
6352 gimple_seq_add_stmt (ilist, g);
6353 if (sctx.lastlane)
6354 {
6355 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6356 2, uid, sctx.lane);
6357 gimple_call_set_lhs (g, sctx.lastlane);
6358 gimple_seq_add_stmt (dlist, g);
6359 gimple_seq_add_seq (dlist, llist[3]);
6360 }
6361 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6362 if (llist[2])
6363 {
6364 tree simt_vf = create_tmp_var (unsigned_type_node);
6365 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6366 gimple_call_set_lhs (g, simt_vf);
6367 gimple_seq_add_stmt (dlist, g);
6368
6369 tree t = build_int_cst (unsigned_type_node, 1);
6370 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6371 gimple_seq_add_stmt (dlist, g);
6372
6373 t = build_int_cst (unsigned_type_node, 0);
6374 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6375 gimple_seq_add_stmt (dlist, g);
6376
6377 tree body = create_artificial_label (UNKNOWN_LOCATION);
6378 tree header = create_artificial_label (UNKNOWN_LOCATION);
6379 tree end = create_artificial_label (UNKNOWN_LOCATION);
6380 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6381 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6382
6383 gimple_seq_add_seq (dlist, llist[2]);
6384
6385 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6386 gimple_seq_add_stmt (dlist, g);
6387
6388 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6389 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6390 gimple_seq_add_stmt (dlist, g);
6391
6392 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6393 }
6394 for (int i = 0; i < 2; i++)
6395 if (llist[i])
6396 {
6397 tree vf = create_tmp_var (unsigned_type_node);
6398 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6399 gimple_call_set_lhs (g, vf);
6400 gimple_seq *seq = i == 0 ? ilist : dlist;
6401 gimple_seq_add_stmt (seq, g);
6402 tree t = build_int_cst (unsigned_type_node, 0);
6403 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6404 gimple_seq_add_stmt (seq, g);
6405 tree body = create_artificial_label (UNKNOWN_LOCATION);
6406 tree header = create_artificial_label (UNKNOWN_LOCATION);
6407 tree end = create_artificial_label (UNKNOWN_LOCATION);
6408 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6409 gimple_seq_add_stmt (seq, gimple_build_label (body));
6410 gimple_seq_add_seq (seq, llist[i]);
6411 t = build_int_cst (unsigned_type_node, 1);
6412 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6413 gimple_seq_add_stmt (seq, g);
6414 gimple_seq_add_stmt (seq, gimple_build_label (header));
6415 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6416 gimple_seq_add_stmt (seq, g);
6417 gimple_seq_add_stmt (seq, gimple_build_label (end));
6418 }
6419 }
6420 if (sctx.is_simt)
6421 {
6422 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6423 gimple *g
6424 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6425 gimple_seq_add_stmt (dlist, g);
6426 }
6427
6428 /* The copyin sequence is not to be executed by the main thread, since
6429 that would result in self-copies. Perhaps not visible to scalars,
6430 but it certainly is to C++ operator=. */
6431 if (copyin_seq)
6432 {
6433 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6434 0);
6435 x = build2 (NE_EXPR, boolean_type_node, x,
6436 build_int_cst (TREE_TYPE (x), 0));
6437 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6438 gimplify_and_add (x, ilist);
6439 }
6440
6441 /* If any copyin variable is passed by reference, we must ensure the
6442 master thread doesn't modify it before it is copied over in all
6443 threads. Similarly for variables in both firstprivate and
6444 lastprivate clauses we need to ensure the lastprivate copying
6445 happens after firstprivate copying in all threads. And similarly
6446 for UDRs if initializer expression refers to omp_orig. */
6447 if (copyin_by_ref || lastprivate_firstprivate
6448 || (reduction_omp_orig_ref
6449 && !ctx->scan_inclusive
6450 && !ctx->scan_exclusive))
6451 {
6452 /* Don't add any barrier for #pragma omp simd or
6453 #pragma omp distribute. */
6454 if (!is_task_ctx (ctx)
6455 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6456 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6457 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6458 }
6459
6460 /* If max_vf is non-zero, then we can use only a vectorization factor
6461 up to the max_vf we chose. So stick it into the safelen clause. */
6462 if (maybe_ne (sctx.max_vf, 0U))
6463 {
6464 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6465 OMP_CLAUSE_SAFELEN);
6466 poly_uint64 safe_len;
6467 if (c == NULL_TREE
6468 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6469 && maybe_gt (safe_len, sctx.max_vf)))
6470 {
6471 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6472 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6473 sctx.max_vf);
6474 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6475 gimple_omp_for_set_clauses (ctx->stmt, c);
6476 }
6477 }
6478 }
6479
6480 /* Create temporary variables for lastprivate(conditional:) implementation
6481 in context CTX with CLAUSES. */
6482
6483 static void
6484 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6485 {
6486 tree iter_type = NULL_TREE;
6487 tree cond_ptr = NULL_TREE;
6488 tree iter_var = NULL_TREE;
6489 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6490 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6491 tree next = *clauses;
6492 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6493 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6494 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6495 {
6496 if (is_simd)
6497 {
6498 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6499 gcc_assert (cc);
6500 if (iter_type == NULL_TREE)
6501 {
6502 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6503 iter_var = create_tmp_var_raw (iter_type);
6504 DECL_CONTEXT (iter_var) = current_function_decl;
6505 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6506 DECL_CHAIN (iter_var) = ctx->block_vars;
6507 ctx->block_vars = iter_var;
6508 tree c3
6509 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6510 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6511 OMP_CLAUSE_DECL (c3) = iter_var;
6512 OMP_CLAUSE_CHAIN (c3) = *clauses;
6513 *clauses = c3;
6514 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6515 }
6516 next = OMP_CLAUSE_CHAIN (cc);
6517 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6518 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6519 ctx->lastprivate_conditional_map->put (o, v);
6520 continue;
6521 }
6522 if (iter_type == NULL)
6523 {
6524 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6525 {
6526 struct omp_for_data fd;
6527 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6528 NULL);
6529 iter_type = unsigned_type_for (fd.iter_type);
6530 }
6531 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6532 iter_type = unsigned_type_node;
6533 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6534 if (c2)
6535 {
6536 cond_ptr
6537 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6538 OMP_CLAUSE_DECL (c2) = cond_ptr;
6539 }
6540 else
6541 {
6542 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6543 DECL_CONTEXT (cond_ptr) = current_function_decl;
6544 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6545 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6546 ctx->block_vars = cond_ptr;
6547 c2 = build_omp_clause (UNKNOWN_LOCATION,
6548 OMP_CLAUSE__CONDTEMP_);
6549 OMP_CLAUSE_DECL (c2) = cond_ptr;
6550 OMP_CLAUSE_CHAIN (c2) = *clauses;
6551 *clauses = c2;
6552 }
6553 iter_var = create_tmp_var_raw (iter_type);
6554 DECL_CONTEXT (iter_var) = current_function_decl;
6555 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6556 DECL_CHAIN (iter_var) = ctx->block_vars;
6557 ctx->block_vars = iter_var;
6558 tree c3
6559 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6560 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6561 OMP_CLAUSE_DECL (c3) = iter_var;
6562 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6563 OMP_CLAUSE_CHAIN (c2) = c3;
6564 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6565 }
6566 tree v = create_tmp_var_raw (iter_type);
6567 DECL_CONTEXT (v) = current_function_decl;
6568 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6569 DECL_CHAIN (v) = ctx->block_vars;
6570 ctx->block_vars = v;
6571 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6572 ctx->lastprivate_conditional_map->put (o, v);
6573 }
6574 }
6575
6576
6577 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6578 both parallel and workshare constructs. PREDICATE may be NULL if it's
6579 always true. BODY_P is the sequence to insert early initialization
6580 if needed, STMT_LIST is where the non-conditional lastprivate handling
6581 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6582 section. */
6583
6584 static void
6585 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6586 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6587 omp_context *ctx)
6588 {
6589 tree x, c, label = NULL, orig_clauses = clauses;
6590 bool par_clauses = false;
6591 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6592 unsigned HOST_WIDE_INT conditional_off = 0;
6593 gimple_seq post_stmt_list = NULL;
6594
6595 /* Early exit if there are no lastprivate or linear clauses. */
6596 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6597 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6598 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6599 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6600 break;
6601 if (clauses == NULL)
6602 {
6603 /* If this was a workshare clause, see if it had been combined
6604 with its parallel. In that case, look for the clauses on the
6605 parallel statement itself. */
6606 if (is_parallel_ctx (ctx))
6607 return;
6608
6609 ctx = ctx->outer;
6610 if (ctx == NULL || !is_parallel_ctx (ctx))
6611 return;
6612
6613 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6614 OMP_CLAUSE_LASTPRIVATE);
6615 if (clauses == NULL)
6616 return;
6617 par_clauses = true;
6618 }
6619
6620 bool maybe_simt = false;
6621 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6622 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6623 {
6624 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6625 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6626 if (simduid)
6627 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6628 }
6629
6630 if (predicate)
6631 {
6632 gcond *stmt;
6633 tree label_true, arm1, arm2;
6634 enum tree_code pred_code = TREE_CODE (predicate);
6635
6636 label = create_artificial_label (UNKNOWN_LOCATION);
6637 label_true = create_artificial_label (UNKNOWN_LOCATION);
6638 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6639 {
6640 arm1 = TREE_OPERAND (predicate, 0);
6641 arm2 = TREE_OPERAND (predicate, 1);
6642 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6643 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6644 }
6645 else
6646 {
6647 arm1 = predicate;
6648 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6649 arm2 = boolean_false_node;
6650 pred_code = NE_EXPR;
6651 }
6652 if (maybe_simt)
6653 {
6654 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6655 c = fold_convert (integer_type_node, c);
6656 simtcond = create_tmp_var (integer_type_node);
6657 gimplify_assign (simtcond, c, stmt_list);
6658 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6659 1, simtcond);
6660 c = create_tmp_var (integer_type_node);
6661 gimple_call_set_lhs (g, c);
6662 gimple_seq_add_stmt (stmt_list, g);
6663 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6664 label_true, label);
6665 }
6666 else
6667 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6668 gimple_seq_add_stmt (stmt_list, stmt);
6669 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6670 }
6671
6672 tree cond_ptr = NULL_TREE;
6673 for (c = clauses; c ;)
6674 {
6675 tree var, new_var;
6676 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6677 gimple_seq *this_stmt_list = stmt_list;
6678 tree lab2 = NULL_TREE;
6679
6680 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6681 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6682 && ctx->lastprivate_conditional_map
6683 && !ctx->combined_into_simd_safelen1)
6684 {
6685 gcc_assert (body_p);
6686 if (simduid)
6687 goto next;
6688 if (cond_ptr == NULL_TREE)
6689 {
6690 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6691 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6692 }
6693 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6694 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6695 tree v = *ctx->lastprivate_conditional_map->get (o);
6696 gimplify_assign (v, build_zero_cst (type), body_p);
6697 this_stmt_list = cstmt_list;
6698 tree mem;
6699 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6700 {
6701 mem = build2 (MEM_REF, type, cond_ptr,
6702 build_int_cst (TREE_TYPE (cond_ptr),
6703 conditional_off));
6704 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6705 }
6706 else
6707 mem = build4 (ARRAY_REF, type, cond_ptr,
6708 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6709 tree mem2 = copy_node (mem);
6710 gimple_seq seq = NULL;
6711 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6712 gimple_seq_add_seq (this_stmt_list, seq);
6713 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6714 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6715 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6716 gimple_seq_add_stmt (this_stmt_list, g);
6717 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6718 gimplify_assign (mem2, v, this_stmt_list);
6719 }
6720 else if (predicate
6721 && ctx->combined_into_simd_safelen1
6722 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6723 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6724 && ctx->lastprivate_conditional_map)
6725 this_stmt_list = &post_stmt_list;
6726
6727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6728 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6729 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6730 {
6731 var = OMP_CLAUSE_DECL (c);
6732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6733 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6734 && is_taskloop_ctx (ctx))
6735 {
6736 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6737 new_var = lookup_decl (var, ctx->outer);
6738 }
6739 else
6740 {
6741 new_var = lookup_decl (var, ctx);
6742 /* Avoid uninitialized warnings for lastprivate and
6743 for linear iterators. */
6744 if (predicate
6745 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6746 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6747 TREE_NO_WARNING (new_var) = 1;
6748 }
6749
6750 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6751 {
6752 tree val = DECL_VALUE_EXPR (new_var);
6753 if (TREE_CODE (val) == ARRAY_REF
6754 && VAR_P (TREE_OPERAND (val, 0))
6755 && lookup_attribute ("omp simd array",
6756 DECL_ATTRIBUTES (TREE_OPERAND (val,
6757 0))))
6758 {
6759 if (lastlane == NULL)
6760 {
6761 lastlane = create_tmp_var (unsigned_type_node);
6762 gcall *g
6763 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6764 2, simduid,
6765 TREE_OPERAND (val, 1));
6766 gimple_call_set_lhs (g, lastlane);
6767 gimple_seq_add_stmt (this_stmt_list, g);
6768 }
6769 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6770 TREE_OPERAND (val, 0), lastlane,
6771 NULL_TREE, NULL_TREE);
6772 TREE_THIS_NOTRAP (new_var) = 1;
6773 }
6774 }
6775 else if (maybe_simt)
6776 {
6777 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6778 ? DECL_VALUE_EXPR (new_var)
6779 : new_var);
6780 if (simtlast == NULL)
6781 {
6782 simtlast = create_tmp_var (unsigned_type_node);
6783 gcall *g = gimple_build_call_internal
6784 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6785 gimple_call_set_lhs (g, simtlast);
6786 gimple_seq_add_stmt (this_stmt_list, g);
6787 }
6788 x = build_call_expr_internal_loc
6789 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6790 TREE_TYPE (val), 2, val, simtlast);
6791 new_var = unshare_expr (new_var);
6792 gimplify_assign (new_var, x, this_stmt_list);
6793 new_var = unshare_expr (new_var);
6794 }
6795
6796 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6797 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6798 {
6799 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6800 gimple_seq_add_seq (this_stmt_list,
6801 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6802 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6803 }
6804 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6805 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6806 {
6807 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6808 gimple_seq_add_seq (this_stmt_list,
6809 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6810 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6811 }
6812
6813 x = NULL_TREE;
6814 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6815 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6816 && is_taskloop_ctx (ctx))
6817 {
6818 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6819 ctx->outer->outer);
6820 if (is_global_var (ovar))
6821 x = ovar;
6822 }
6823 if (!x)
6824 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6825 if (omp_is_reference (var))
6826 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6827 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6828 gimplify_and_add (x, this_stmt_list);
6829
6830 if (lab2)
6831 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6832 }
6833
6834 next:
6835 c = OMP_CLAUSE_CHAIN (c);
6836 if (c == NULL && !par_clauses)
6837 {
6838 /* If this was a workshare clause, see if it had been combined
6839 with its parallel. In that case, continue looking for the
6840 clauses also on the parallel statement itself. */
6841 if (is_parallel_ctx (ctx))
6842 break;
6843
6844 ctx = ctx->outer;
6845 if (ctx == NULL || !is_parallel_ctx (ctx))
6846 break;
6847
6848 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6849 OMP_CLAUSE_LASTPRIVATE);
6850 par_clauses = true;
6851 }
6852 }
6853
6854 if (label)
6855 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6856 gimple_seq_add_seq (stmt_list, post_stmt_list);
6857 }
6858
6859 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6860 (which might be a placeholder). INNER is true if this is an inner
6861 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6862 join markers. Generate the before-loop forking sequence in
6863 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6864 general form of these sequences is
6865
6866 GOACC_REDUCTION_SETUP
6867 GOACC_FORK
6868 GOACC_REDUCTION_INIT
6869 ...
6870 GOACC_REDUCTION_FINI
6871 GOACC_JOIN
6872 GOACC_REDUCTION_TEARDOWN. */
6873
6874 static void
6875 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6876 gcall *fork, gcall *join, gimple_seq *fork_seq,
6877 gimple_seq *join_seq, omp_context *ctx)
6878 {
6879 gimple_seq before_fork = NULL;
6880 gimple_seq after_fork = NULL;
6881 gimple_seq before_join = NULL;
6882 gimple_seq after_join = NULL;
6883 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6884 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6885 unsigned offset = 0;
6886
6887 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6888 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6889 {
6890 tree orig = OMP_CLAUSE_DECL (c);
6891 tree var = maybe_lookup_decl (orig, ctx);
6892 tree ref_to_res = NULL_TREE;
6893 tree incoming, outgoing, v1, v2, v3;
6894 bool is_private = false;
6895
6896 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6897 if (rcode == MINUS_EXPR)
6898 rcode = PLUS_EXPR;
6899 else if (rcode == TRUTH_ANDIF_EXPR)
6900 rcode = BIT_AND_EXPR;
6901 else if (rcode == TRUTH_ORIF_EXPR)
6902 rcode = BIT_IOR_EXPR;
6903 tree op = build_int_cst (unsigned_type_node, rcode);
6904
6905 if (!var)
6906 var = orig;
6907
6908 incoming = outgoing = var;
6909
6910 if (!inner)
6911 {
6912 /* See if an outer construct also reduces this variable. */
6913 omp_context *outer = ctx;
6914
6915 while (omp_context *probe = outer->outer)
6916 {
6917 enum gimple_code type = gimple_code (probe->stmt);
6918 tree cls;
6919
6920 switch (type)
6921 {
6922 case GIMPLE_OMP_FOR:
6923 cls = gimple_omp_for_clauses (probe->stmt);
6924 break;
6925
6926 case GIMPLE_OMP_TARGET:
6927 if ((gimple_omp_target_kind (probe->stmt)
6928 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6929 && (gimple_omp_target_kind (probe->stmt)
6930 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6931 goto do_lookup;
6932
6933 cls = gimple_omp_target_clauses (probe->stmt);
6934 break;
6935
6936 default:
6937 goto do_lookup;
6938 }
6939
6940 outer = probe;
6941 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6942 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6943 && orig == OMP_CLAUSE_DECL (cls))
6944 {
6945 incoming = outgoing = lookup_decl (orig, probe);
6946 goto has_outer_reduction;
6947 }
6948 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6949 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6950 && orig == OMP_CLAUSE_DECL (cls))
6951 {
6952 is_private = true;
6953 goto do_lookup;
6954 }
6955 }
6956
6957 do_lookup:
6958 /* This is the outermost construct with this reduction,
6959 see if there's a mapping for it. */
6960 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6961 && maybe_lookup_field (orig, outer) && !is_private)
6962 {
6963 ref_to_res = build_receiver_ref (orig, false, outer);
6964 if (omp_is_reference (orig))
6965 ref_to_res = build_simple_mem_ref (ref_to_res);
6966
6967 tree type = TREE_TYPE (var);
6968 if (POINTER_TYPE_P (type))
6969 type = TREE_TYPE (type);
6970
6971 outgoing = var;
6972 incoming = omp_reduction_init_op (loc, rcode, type);
6973 }
6974 else
6975 {
6976 /* Try to look at enclosing contexts for reduction var,
6977 use original if no mapping found. */
6978 tree t = NULL_TREE;
6979 omp_context *c = ctx->outer;
6980 while (c && !t)
6981 {
6982 t = maybe_lookup_decl (orig, c);
6983 c = c->outer;
6984 }
6985 incoming = outgoing = (t ? t : orig);
6986 }
6987
6988 has_outer_reduction:;
6989 }
6990
6991 if (!ref_to_res)
6992 ref_to_res = integer_zero_node;
6993
6994 if (omp_is_reference (orig))
6995 {
6996 tree type = TREE_TYPE (var);
6997 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6998
6999 if (!inner)
7000 {
7001 tree x = create_tmp_var (TREE_TYPE (type), id);
7002 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7003 }
7004
7005 v1 = create_tmp_var (type, id);
7006 v2 = create_tmp_var (type, id);
7007 v3 = create_tmp_var (type, id);
7008
7009 gimplify_assign (v1, var, fork_seq);
7010 gimplify_assign (v2, var, fork_seq);
7011 gimplify_assign (v3, var, fork_seq);
7012
7013 var = build_simple_mem_ref (var);
7014 v1 = build_simple_mem_ref (v1);
7015 v2 = build_simple_mem_ref (v2);
7016 v3 = build_simple_mem_ref (v3);
7017 outgoing = build_simple_mem_ref (outgoing);
7018
7019 if (!TREE_CONSTANT (incoming))
7020 incoming = build_simple_mem_ref (incoming);
7021 }
7022 else
7023 v1 = v2 = v3 = var;
7024
7025 /* Determine position in reduction buffer, which may be used
7026 by target. The parser has ensured that this is not a
7027 variable-sized type. */
7028 fixed_size_mode mode
7029 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7030 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7031 offset = (offset + align - 1) & ~(align - 1);
7032 tree off = build_int_cst (sizetype, offset);
7033 offset += GET_MODE_SIZE (mode);
7034
7035 if (!init_code)
7036 {
7037 init_code = build_int_cst (integer_type_node,
7038 IFN_GOACC_REDUCTION_INIT);
7039 fini_code = build_int_cst (integer_type_node,
7040 IFN_GOACC_REDUCTION_FINI);
7041 setup_code = build_int_cst (integer_type_node,
7042 IFN_GOACC_REDUCTION_SETUP);
7043 teardown_code = build_int_cst (integer_type_node,
7044 IFN_GOACC_REDUCTION_TEARDOWN);
7045 }
7046
7047 tree setup_call
7048 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7049 TREE_TYPE (var), 6, setup_code,
7050 unshare_expr (ref_to_res),
7051 incoming, level, op, off);
7052 tree init_call
7053 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7054 TREE_TYPE (var), 6, init_code,
7055 unshare_expr (ref_to_res),
7056 v1, level, op, off);
7057 tree fini_call
7058 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7059 TREE_TYPE (var), 6, fini_code,
7060 unshare_expr (ref_to_res),
7061 v2, level, op, off);
7062 tree teardown_call
7063 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7064 TREE_TYPE (var), 6, teardown_code,
7065 ref_to_res, v3, level, op, off);
7066
7067 gimplify_assign (v1, setup_call, &before_fork);
7068 gimplify_assign (v2, init_call, &after_fork);
7069 gimplify_assign (v3, fini_call, &before_join);
7070 gimplify_assign (outgoing, teardown_call, &after_join);
7071 }
7072
7073 /* Now stitch things together. */
7074 gimple_seq_add_seq (fork_seq, before_fork);
7075 if (fork)
7076 gimple_seq_add_stmt (fork_seq, fork);
7077 gimple_seq_add_seq (fork_seq, after_fork);
7078
7079 gimple_seq_add_seq (join_seq, before_join);
7080 if (join)
7081 gimple_seq_add_stmt (join_seq, join);
7082 gimple_seq_add_seq (join_seq, after_join);
7083 }
7084
7085 /* Generate code to implement the REDUCTION clauses, append it
7086 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7087 that should be emitted also inside of the critical section,
7088 in that case clear *CLIST afterwards, otherwise leave it as is
7089 and let the caller emit it itself. */
7090
7091 static void
7092 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7093 gimple_seq *clist, omp_context *ctx)
7094 {
7095 gimple_seq sub_seq = NULL;
7096 gimple *stmt;
7097 tree x, c;
7098 int count = 0;
7099
7100 /* OpenACC loop reductions are handled elsewhere. */
7101 if (is_gimple_omp_oacc (ctx->stmt))
7102 return;
7103
7104 /* SIMD reductions are handled in lower_rec_input_clauses. */
7105 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7106 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7107 return;
7108
7109 /* inscan reductions are handled elsewhere. */
7110 if (ctx->scan_inclusive || ctx->scan_exclusive)
7111 return;
7112
7113 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7114 update in that case, otherwise use a lock. */
7115 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7116 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7117 && !OMP_CLAUSE_REDUCTION_TASK (c))
7118 {
7119 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7120 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7121 {
7122 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7123 count = -1;
7124 break;
7125 }
7126 count++;
7127 }
7128
7129 if (count == 0)
7130 return;
7131
7132 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7133 {
7134 tree var, ref, new_var, orig_var;
7135 enum tree_code code;
7136 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7137
7138 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7139 || OMP_CLAUSE_REDUCTION_TASK (c))
7140 continue;
7141
7142 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7143 orig_var = var = OMP_CLAUSE_DECL (c);
7144 if (TREE_CODE (var) == MEM_REF)
7145 {
7146 var = TREE_OPERAND (var, 0);
7147 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7148 var = TREE_OPERAND (var, 0);
7149 if (TREE_CODE (var) == ADDR_EXPR)
7150 var = TREE_OPERAND (var, 0);
7151 else
7152 {
7153 /* If this is a pointer or referenced based array
7154 section, the var could be private in the outer
7155 context e.g. on orphaned loop construct. Pretend this
7156 is private variable's outer reference. */
7157 ccode = OMP_CLAUSE_PRIVATE;
7158 if (TREE_CODE (var) == INDIRECT_REF)
7159 var = TREE_OPERAND (var, 0);
7160 }
7161 orig_var = var;
7162 if (is_variable_sized (var))
7163 {
7164 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7165 var = DECL_VALUE_EXPR (var);
7166 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7167 var = TREE_OPERAND (var, 0);
7168 gcc_assert (DECL_P (var));
7169 }
7170 }
7171 new_var = lookup_decl (var, ctx);
7172 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7173 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7174 ref = build_outer_var_ref (var, ctx, ccode);
7175 code = OMP_CLAUSE_REDUCTION_CODE (c);
7176
7177 /* reduction(-:var) sums up the partial results, so it acts
7178 identically to reduction(+:var). */
7179 if (code == MINUS_EXPR)
7180 code = PLUS_EXPR;
7181
7182 if (count == 1)
7183 {
7184 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7185
7186 addr = save_expr (addr);
7187 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7188 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
7189 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7190 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7191 gimplify_and_add (x, stmt_seqp);
7192 return;
7193 }
7194 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7195 {
7196 tree d = OMP_CLAUSE_DECL (c);
7197 tree type = TREE_TYPE (d);
7198 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7199 tree i = create_tmp_var (TREE_TYPE (v));
7200 tree ptype = build_pointer_type (TREE_TYPE (type));
7201 tree bias = TREE_OPERAND (d, 1);
7202 d = TREE_OPERAND (d, 0);
7203 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7204 {
7205 tree b = TREE_OPERAND (d, 1);
7206 b = maybe_lookup_decl (b, ctx);
7207 if (b == NULL)
7208 {
7209 b = TREE_OPERAND (d, 1);
7210 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7211 }
7212 if (integer_zerop (bias))
7213 bias = b;
7214 else
7215 {
7216 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7217 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7218 TREE_TYPE (b), b, bias);
7219 }
7220 d = TREE_OPERAND (d, 0);
7221 }
7222 /* For ref build_outer_var_ref already performs this, so
7223 only new_var needs a dereference. */
7224 if (TREE_CODE (d) == INDIRECT_REF)
7225 {
7226 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7227 gcc_assert (omp_is_reference (var) && var == orig_var);
7228 }
7229 else if (TREE_CODE (d) == ADDR_EXPR)
7230 {
7231 if (orig_var == var)
7232 {
7233 new_var = build_fold_addr_expr (new_var);
7234 ref = build_fold_addr_expr (ref);
7235 }
7236 }
7237 else
7238 {
7239 gcc_assert (orig_var == var);
7240 if (omp_is_reference (var))
7241 ref = build_fold_addr_expr (ref);
7242 }
7243 if (DECL_P (v))
7244 {
7245 tree t = maybe_lookup_decl (v, ctx);
7246 if (t)
7247 v = t;
7248 else
7249 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7250 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7251 }
7252 if (!integer_zerop (bias))
7253 {
7254 bias = fold_convert_loc (clause_loc, sizetype, bias);
7255 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7256 TREE_TYPE (new_var), new_var,
7257 unshare_expr (bias));
7258 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7259 TREE_TYPE (ref), ref, bias);
7260 }
7261 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7262 ref = fold_convert_loc (clause_loc, ptype, ref);
7263 tree m = create_tmp_var (ptype);
7264 gimplify_assign (m, new_var, stmt_seqp);
7265 new_var = m;
7266 m = create_tmp_var (ptype);
7267 gimplify_assign (m, ref, stmt_seqp);
7268 ref = m;
7269 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7270 tree body = create_artificial_label (UNKNOWN_LOCATION);
7271 tree end = create_artificial_label (UNKNOWN_LOCATION);
7272 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7273 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7274 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7275 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7276 {
7277 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7278 tree decl_placeholder
7279 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7280 SET_DECL_VALUE_EXPR (placeholder, out);
7281 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7282 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7283 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7284 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7285 gimple_seq_add_seq (&sub_seq,
7286 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7287 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7288 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7289 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7290 }
7291 else
7292 {
7293 x = build2 (code, TREE_TYPE (out), out, priv);
7294 out = unshare_expr (out);
7295 gimplify_assign (out, x, &sub_seq);
7296 }
7297 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7298 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7299 gimple_seq_add_stmt (&sub_seq, g);
7300 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7301 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7302 gimple_seq_add_stmt (&sub_seq, g);
7303 g = gimple_build_assign (i, PLUS_EXPR, i,
7304 build_int_cst (TREE_TYPE (i), 1));
7305 gimple_seq_add_stmt (&sub_seq, g);
7306 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7307 gimple_seq_add_stmt (&sub_seq, g);
7308 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7309 }
7310 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7311 {
7312 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7313
7314 if (omp_is_reference (var)
7315 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7316 TREE_TYPE (ref)))
7317 ref = build_fold_addr_expr_loc (clause_loc, ref);
7318 SET_DECL_VALUE_EXPR (placeholder, ref);
7319 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7320 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7321 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7322 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7323 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7324 }
7325 else
7326 {
7327 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7328 ref = build_outer_var_ref (var, ctx);
7329 gimplify_assign (ref, x, &sub_seq);
7330 }
7331 }
7332
7333 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7334 0);
7335 gimple_seq_add_stmt (stmt_seqp, stmt);
7336
7337 gimple_seq_add_seq (stmt_seqp, sub_seq);
7338
7339 if (clist)
7340 {
7341 gimple_seq_add_seq (stmt_seqp, *clist);
7342 *clist = NULL;
7343 }
7344
7345 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7346 0);
7347 gimple_seq_add_stmt (stmt_seqp, stmt);
7348 }
7349
7350
7351 /* Generate code to implement the COPYPRIVATE clauses. */
7352
7353 static void
7354 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7355 omp_context *ctx)
7356 {
7357 tree c;
7358
7359 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7360 {
7361 tree var, new_var, ref, x;
7362 bool by_ref;
7363 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7364
7365 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7366 continue;
7367
7368 var = OMP_CLAUSE_DECL (c);
7369 by_ref = use_pointer_for_field (var, NULL);
7370
7371 ref = build_sender_ref (var, ctx);
7372 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7373 if (by_ref)
7374 {
7375 x = build_fold_addr_expr_loc (clause_loc, new_var);
7376 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7377 }
7378 gimplify_assign (ref, x, slist);
7379
7380 ref = build_receiver_ref (var, false, ctx);
7381 if (by_ref)
7382 {
7383 ref = fold_convert_loc (clause_loc,
7384 build_pointer_type (TREE_TYPE (new_var)),
7385 ref);
7386 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7387 }
7388 if (omp_is_reference (var))
7389 {
7390 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7391 ref = build_simple_mem_ref_loc (clause_loc, ref);
7392 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7393 }
7394 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7395 gimplify_and_add (x, rlist);
7396 }
7397 }
7398
7399
7400 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7401 and REDUCTION from the sender (aka parent) side. */
7402
7403 static void
7404 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7405 omp_context *ctx)
7406 {
7407 tree c, t;
7408 int ignored_looptemp = 0;
7409 bool is_taskloop = false;
7410
7411 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7412 by GOMP_taskloop. */
7413 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7414 {
7415 ignored_looptemp = 2;
7416 is_taskloop = true;
7417 }
7418
7419 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7420 {
7421 tree val, ref, x, var;
7422 bool by_ref, do_in = false, do_out = false;
7423 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7424
7425 switch (OMP_CLAUSE_CODE (c))
7426 {
7427 case OMP_CLAUSE_PRIVATE:
7428 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7429 break;
7430 continue;
7431 case OMP_CLAUSE_FIRSTPRIVATE:
7432 case OMP_CLAUSE_COPYIN:
7433 case OMP_CLAUSE_LASTPRIVATE:
7434 case OMP_CLAUSE_IN_REDUCTION:
7435 case OMP_CLAUSE__REDUCTEMP_:
7436 break;
7437 case OMP_CLAUSE_REDUCTION:
7438 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7439 continue;
7440 break;
7441 case OMP_CLAUSE_SHARED:
7442 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7443 break;
7444 continue;
7445 case OMP_CLAUSE__LOOPTEMP_:
7446 if (ignored_looptemp)
7447 {
7448 ignored_looptemp--;
7449 continue;
7450 }
7451 break;
7452 default:
7453 continue;
7454 }
7455
7456 val = OMP_CLAUSE_DECL (c);
7457 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7458 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7459 && TREE_CODE (val) == MEM_REF)
7460 {
7461 val = TREE_OPERAND (val, 0);
7462 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7463 val = TREE_OPERAND (val, 0);
7464 if (TREE_CODE (val) == INDIRECT_REF
7465 || TREE_CODE (val) == ADDR_EXPR)
7466 val = TREE_OPERAND (val, 0);
7467 if (is_variable_sized (val))
7468 continue;
7469 }
7470
7471 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7472 outer taskloop region. */
7473 omp_context *ctx_for_o = ctx;
7474 if (is_taskloop
7475 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7476 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7477 ctx_for_o = ctx->outer;
7478
7479 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7480
7481 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7482 && is_global_var (var)
7483 && (val == OMP_CLAUSE_DECL (c)
7484 || !is_task_ctx (ctx)
7485 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7486 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7487 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7488 != POINTER_TYPE)))))
7489 continue;
7490
7491 t = omp_member_access_dummy_var (var);
7492 if (t)
7493 {
7494 var = DECL_VALUE_EXPR (var);
7495 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7496 if (o != t)
7497 var = unshare_and_remap (var, t, o);
7498 else
7499 var = unshare_expr (var);
7500 }
7501
7502 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7503 {
7504 /* Handle taskloop firstprivate/lastprivate, where the
7505 lastprivate on GIMPLE_OMP_TASK is represented as
7506 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7507 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7508 x = omp_build_component_ref (ctx->sender_decl, f);
7509 if (use_pointer_for_field (val, ctx))
7510 var = build_fold_addr_expr (var);
7511 gimplify_assign (x, var, ilist);
7512 DECL_ABSTRACT_ORIGIN (f) = NULL;
7513 continue;
7514 }
7515
7516 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7517 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7518 || val == OMP_CLAUSE_DECL (c))
7519 && is_variable_sized (val))
7520 continue;
7521 by_ref = use_pointer_for_field (val, NULL);
7522
7523 switch (OMP_CLAUSE_CODE (c))
7524 {
7525 case OMP_CLAUSE_FIRSTPRIVATE:
7526 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7527 && !by_ref
7528 && is_task_ctx (ctx))
7529 TREE_NO_WARNING (var) = 1;
7530 do_in = true;
7531 break;
7532
7533 case OMP_CLAUSE_PRIVATE:
7534 case OMP_CLAUSE_COPYIN:
7535 case OMP_CLAUSE__LOOPTEMP_:
7536 case OMP_CLAUSE__REDUCTEMP_:
7537 do_in = true;
7538 break;
7539
7540 case OMP_CLAUSE_LASTPRIVATE:
7541 if (by_ref || omp_is_reference (val))
7542 {
7543 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7544 continue;
7545 do_in = true;
7546 }
7547 else
7548 {
7549 do_out = true;
7550 if (lang_hooks.decls.omp_private_outer_ref (val))
7551 do_in = true;
7552 }
7553 break;
7554
7555 case OMP_CLAUSE_REDUCTION:
7556 case OMP_CLAUSE_IN_REDUCTION:
7557 do_in = true;
7558 if (val == OMP_CLAUSE_DECL (c))
7559 {
7560 if (is_task_ctx (ctx))
7561 by_ref = use_pointer_for_field (val, ctx);
7562 else
7563 do_out = !(by_ref || omp_is_reference (val));
7564 }
7565 else
7566 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7567 break;
7568
7569 default:
7570 gcc_unreachable ();
7571 }
7572
7573 if (do_in)
7574 {
7575 ref = build_sender_ref (val, ctx);
7576 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7577 gimplify_assign (ref, x, ilist);
7578 if (is_task_ctx (ctx))
7579 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7580 }
7581
7582 if (do_out)
7583 {
7584 ref = build_sender_ref (val, ctx);
7585 gimplify_assign (var, ref, olist);
7586 }
7587 }
7588 }
7589
7590 /* Generate code to implement SHARED from the sender (aka parent)
7591 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7592 list things that got automatically shared. */
7593
7594 static void
7595 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7596 {
7597 tree var, ovar, nvar, t, f, x, record_type;
7598
7599 if (ctx->record_type == NULL)
7600 return;
7601
7602 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7603 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7604 {
7605 ovar = DECL_ABSTRACT_ORIGIN (f);
7606 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7607 continue;
7608
7609 nvar = maybe_lookup_decl (ovar, ctx);
7610 if (!nvar
7611 || !DECL_HAS_VALUE_EXPR_P (nvar)
7612 || (ctx->allocate_map
7613 && ctx->allocate_map->get (ovar)))
7614 continue;
7615
7616 /* If CTX is a nested parallel directive. Find the immediately
7617 enclosing parallel or workshare construct that contains a
7618 mapping for OVAR. */
7619 var = lookup_decl_in_outer_ctx (ovar, ctx);
7620
7621 t = omp_member_access_dummy_var (var);
7622 if (t)
7623 {
7624 var = DECL_VALUE_EXPR (var);
7625 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7626 if (o != t)
7627 var = unshare_and_remap (var, t, o);
7628 else
7629 var = unshare_expr (var);
7630 }
7631
7632 if (use_pointer_for_field (ovar, ctx))
7633 {
7634 x = build_sender_ref (ovar, ctx);
7635 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7636 && TREE_TYPE (f) == TREE_TYPE (ovar))
7637 {
7638 gcc_assert (is_parallel_ctx (ctx)
7639 && DECL_ARTIFICIAL (ovar));
7640 /* _condtemp_ clause. */
7641 var = build_constructor (TREE_TYPE (x), NULL);
7642 }
7643 else
7644 var = build_fold_addr_expr (var);
7645 gimplify_assign (x, var, ilist);
7646 }
7647 else
7648 {
7649 x = build_sender_ref (ovar, ctx);
7650 gimplify_assign (x, var, ilist);
7651
7652 if (!TREE_READONLY (var)
7653 /* We don't need to receive a new reference to a result
7654 or parm decl. In fact we may not store to it as we will
7655 invalidate any pending RSO and generate wrong gimple
7656 during inlining. */
7657 && !((TREE_CODE (var) == RESULT_DECL
7658 || TREE_CODE (var) == PARM_DECL)
7659 && DECL_BY_REFERENCE (var)))
7660 {
7661 x = build_sender_ref (ovar, ctx);
7662 gimplify_assign (var, x, olist);
7663 }
7664 }
7665 }
7666 }
7667
7668 /* Emit an OpenACC head marker call, encapulating the partitioning and
7669 other information that must be processed by the target compiler.
7670 Return the maximum number of dimensions the associated loop might
7671 be partitioned over. */
7672
7673 static unsigned
7674 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7675 gimple_seq *seq, omp_context *ctx)
7676 {
7677 unsigned levels = 0;
7678 unsigned tag = 0;
7679 tree gang_static = NULL_TREE;
7680 auto_vec<tree, 5> args;
7681
7682 args.quick_push (build_int_cst
7683 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7684 args.quick_push (ddvar);
7685 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7686 {
7687 switch (OMP_CLAUSE_CODE (c))
7688 {
7689 case OMP_CLAUSE_GANG:
7690 tag |= OLF_DIM_GANG;
7691 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7692 /* static:* is represented by -1, and we can ignore it, as
7693 scheduling is always static. */
7694 if (gang_static && integer_minus_onep (gang_static))
7695 gang_static = NULL_TREE;
7696 levels++;
7697 break;
7698
7699 case OMP_CLAUSE_WORKER:
7700 tag |= OLF_DIM_WORKER;
7701 levels++;
7702 break;
7703
7704 case OMP_CLAUSE_VECTOR:
7705 tag |= OLF_DIM_VECTOR;
7706 levels++;
7707 break;
7708
7709 case OMP_CLAUSE_SEQ:
7710 tag |= OLF_SEQ;
7711 break;
7712
7713 case OMP_CLAUSE_AUTO:
7714 tag |= OLF_AUTO;
7715 break;
7716
7717 case OMP_CLAUSE_INDEPENDENT:
7718 tag |= OLF_INDEPENDENT;
7719 break;
7720
7721 case OMP_CLAUSE_TILE:
7722 tag |= OLF_TILE;
7723 break;
7724
7725 default:
7726 continue;
7727 }
7728 }
7729
7730 if (gang_static)
7731 {
7732 if (DECL_P (gang_static))
7733 gang_static = build_outer_var_ref (gang_static, ctx);
7734 tag |= OLF_GANG_STATIC;
7735 }
7736
7737 /* In a parallel region, loops are implicitly INDEPENDENT. */
7738 omp_context *tgt = enclosing_target_ctx (ctx);
7739 if (!tgt || is_oacc_parallel_or_serial (tgt))
7740 tag |= OLF_INDEPENDENT;
7741
7742 if (tag & OLF_TILE)
7743 /* Tiling could use all 3 levels. */
7744 levels = 3;
7745 else
7746 {
7747 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7748 Ensure at least one level, or 2 for possible auto
7749 partitioning */
7750 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7751 << OLF_DIM_BASE) | OLF_SEQ));
7752
7753 if (levels < 1u + maybe_auto)
7754 levels = 1u + maybe_auto;
7755 }
7756
7757 args.quick_push (build_int_cst (integer_type_node, levels));
7758 args.quick_push (build_int_cst (integer_type_node, tag));
7759 if (gang_static)
7760 args.quick_push (gang_static);
7761
7762 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7763 gimple_set_location (call, loc);
7764 gimple_set_lhs (call, ddvar);
7765 gimple_seq_add_stmt (seq, call);
7766
7767 return levels;
7768 }
7769
7770 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7771 partitioning level of the enclosed region. */
7772
7773 static void
7774 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7775 tree tofollow, gimple_seq *seq)
7776 {
7777 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7778 : IFN_UNIQUE_OACC_TAIL_MARK);
7779 tree marker = build_int_cst (integer_type_node, marker_kind);
7780 int nargs = 2 + (tofollow != NULL_TREE);
7781 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7782 marker, ddvar, tofollow);
7783 gimple_set_location (call, loc);
7784 gimple_set_lhs (call, ddvar);
7785 gimple_seq_add_stmt (seq, call);
7786 }
7787
7788 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7789 the loop clauses, from which we extract reductions. Initialize
7790 HEAD and TAIL. */
7791
7792 static void
7793 lower_oacc_head_tail (location_t loc, tree clauses,
7794 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7795 {
7796 bool inner = false;
7797 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7798 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7799
7800 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7801 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7802 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7803
7804 gcc_assert (count);
7805 for (unsigned done = 1; count; count--, done++)
7806 {
7807 gimple_seq fork_seq = NULL;
7808 gimple_seq join_seq = NULL;
7809
7810 tree place = build_int_cst (integer_type_node, -1);
7811 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7812 fork_kind, ddvar, place);
7813 gimple_set_location (fork, loc);
7814 gimple_set_lhs (fork, ddvar);
7815
7816 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7817 join_kind, ddvar, place);
7818 gimple_set_location (join, loc);
7819 gimple_set_lhs (join, ddvar);
7820
7821 /* Mark the beginning of this level sequence. */
7822 if (inner)
7823 lower_oacc_loop_marker (loc, ddvar, true,
7824 build_int_cst (integer_type_node, count),
7825 &fork_seq);
7826 lower_oacc_loop_marker (loc, ddvar, false,
7827 build_int_cst (integer_type_node, done),
7828 &join_seq);
7829
7830 lower_oacc_reductions (loc, clauses, place, inner,
7831 fork, join, &fork_seq, &join_seq, ctx);
7832
7833 /* Append this level to head. */
7834 gimple_seq_add_seq (head, fork_seq);
7835 /* Prepend it to tail. */
7836 gimple_seq_add_seq (&join_seq, *tail);
7837 *tail = join_seq;
7838
7839 inner = true;
7840 }
7841
7842 /* Mark the end of the sequence. */
7843 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7844 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7845 }
7846
7847 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7848 catch handler and return it. This prevents programs from violating the
7849 structured block semantics with throws. */
7850
7851 static gimple_seq
7852 maybe_catch_exception (gimple_seq body)
7853 {
7854 gimple *g;
7855 tree decl;
7856
7857 if (!flag_exceptions)
7858 return body;
7859
7860 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7861 decl = lang_hooks.eh_protect_cleanup_actions ();
7862 else
7863 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7864
7865 g = gimple_build_eh_must_not_throw (decl);
7866 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7867 GIMPLE_TRY_CATCH);
7868
7869 return gimple_seq_alloc_with_stmt (g);
7870 }
7871
7872 \f
7873 /* Routines to lower OMP directives into OMP-GIMPLE. */
7874
7875 /* If ctx is a worksharing context inside of a cancellable parallel
7876 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7877 and conditional branch to parallel's cancel_label to handle
7878 cancellation in the implicit barrier. */
7879
7880 static void
7881 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7882 gimple_seq *body)
7883 {
7884 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7885 if (gimple_omp_return_nowait_p (omp_return))
7886 return;
7887 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7888 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7889 && outer->cancellable)
7890 {
7891 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7892 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7893 tree lhs = create_tmp_var (c_bool_type);
7894 gimple_omp_return_set_lhs (omp_return, lhs);
7895 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7896 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7897 fold_convert (c_bool_type,
7898 boolean_false_node),
7899 outer->cancel_label, fallthru_label);
7900 gimple_seq_add_stmt (body, g);
7901 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7902 }
7903 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7904 return;
7905 }
7906
7907 /* Find the first task_reduction or reduction clause or return NULL
7908 if there are none. */
7909
7910 static inline tree
7911 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7912 enum omp_clause_code ccode)
7913 {
7914 while (1)
7915 {
7916 clauses = omp_find_clause (clauses, ccode);
7917 if (clauses == NULL_TREE)
7918 return NULL_TREE;
7919 if (ccode != OMP_CLAUSE_REDUCTION
7920 || code == OMP_TASKLOOP
7921 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7922 return clauses;
7923 clauses = OMP_CLAUSE_CHAIN (clauses);
7924 }
7925 }
7926
7927 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7928 gimple_seq *, gimple_seq *);
7929
7930 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7931 CTX is the enclosing OMP context for the current statement. */
7932
7933 static void
7934 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7935 {
7936 tree block, control;
7937 gimple_stmt_iterator tgsi;
7938 gomp_sections *stmt;
7939 gimple *t;
7940 gbind *new_stmt, *bind;
7941 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7942
7943 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7944
7945 push_gimplify_context ();
7946
7947 dlist = NULL;
7948 ilist = NULL;
7949
7950 tree rclauses
7951 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7952 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7953 tree rtmp = NULL_TREE;
7954 if (rclauses)
7955 {
7956 tree type = build_pointer_type (pointer_sized_int_node);
7957 tree temp = create_tmp_var (type);
7958 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7959 OMP_CLAUSE_DECL (c) = temp;
7960 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7961 gimple_omp_sections_set_clauses (stmt, c);
7962 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7963 gimple_omp_sections_clauses (stmt),
7964 &ilist, &tred_dlist);
7965 rclauses = c;
7966 rtmp = make_ssa_name (type);
7967 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7968 }
7969
7970 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7971 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7972
7973 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7974 &ilist, &dlist, ctx, NULL);
7975
7976 control = create_tmp_var (unsigned_type_node, ".section");
7977 gimple_omp_sections_set_control (stmt, control);
7978
7979 new_body = gimple_omp_body (stmt);
7980 gimple_omp_set_body (stmt, NULL);
7981 tgsi = gsi_start (new_body);
7982 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7983 {
7984 omp_context *sctx;
7985 gimple *sec_start;
7986
7987 sec_start = gsi_stmt (tgsi);
7988 sctx = maybe_lookup_ctx (sec_start);
7989 gcc_assert (sctx);
7990
7991 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7992 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7993 GSI_CONTINUE_LINKING);
7994 gimple_omp_set_body (sec_start, NULL);
7995
7996 if (gsi_one_before_end_p (tgsi))
7997 {
7998 gimple_seq l = NULL;
7999 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8000 &ilist, &l, &clist, ctx);
8001 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8002 gimple_omp_section_set_last (sec_start);
8003 }
8004
8005 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8006 GSI_CONTINUE_LINKING);
8007 }
8008
8009 block = make_node (BLOCK);
8010 bind = gimple_build_bind (NULL, new_body, block);
8011
8012 olist = NULL;
8013 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8014 &clist, ctx);
8015 if (clist)
8016 {
8017 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8018 gcall *g = gimple_build_call (fndecl, 0);
8019 gimple_seq_add_stmt (&olist, g);
8020 gimple_seq_add_seq (&olist, clist);
8021 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8022 g = gimple_build_call (fndecl, 0);
8023 gimple_seq_add_stmt (&olist, g);
8024 }
8025
8026 block = make_node (BLOCK);
8027 new_stmt = gimple_build_bind (NULL, NULL, block);
8028 gsi_replace (gsi_p, new_stmt, true);
8029
8030 pop_gimplify_context (new_stmt);
8031 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8032 BLOCK_VARS (block) = gimple_bind_vars (bind);
8033 if (BLOCK_VARS (block))
8034 TREE_USED (block) = 1;
8035
8036 new_body = NULL;
8037 gimple_seq_add_seq (&new_body, ilist);
8038 gimple_seq_add_stmt (&new_body, stmt);
8039 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8040 gimple_seq_add_stmt (&new_body, bind);
8041
8042 t = gimple_build_omp_continue (control, control);
8043 gimple_seq_add_stmt (&new_body, t);
8044
8045 gimple_seq_add_seq (&new_body, olist);
8046 if (ctx->cancellable)
8047 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8048 gimple_seq_add_seq (&new_body, dlist);
8049
8050 new_body = maybe_catch_exception (new_body);
8051
8052 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8053 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8054 t = gimple_build_omp_return (nowait);
8055 gimple_seq_add_stmt (&new_body, t);
8056 gimple_seq_add_seq (&new_body, tred_dlist);
8057 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8058
8059 if (rclauses)
8060 OMP_CLAUSE_DECL (rclauses) = rtmp;
8061
8062 gimple_bind_set_body (new_stmt, new_body);
8063 }
8064
8065
8066 /* A subroutine of lower_omp_single. Expand the simple form of
8067 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8068
8069 if (GOMP_single_start ())
8070 BODY;
8071 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8072
8073 FIXME. It may be better to delay expanding the logic of this until
8074 pass_expand_omp. The expanded logic may make the job more difficult
8075 to a synchronization analysis pass. */
8076
8077 static void
8078 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8079 {
8080 location_t loc = gimple_location (single_stmt);
8081 tree tlabel = create_artificial_label (loc);
8082 tree flabel = create_artificial_label (loc);
8083 gimple *call, *cond;
8084 tree lhs, decl;
8085
8086 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8087 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8088 call = gimple_build_call (decl, 0);
8089 gimple_call_set_lhs (call, lhs);
8090 gimple_seq_add_stmt (pre_p, call);
8091
8092 cond = gimple_build_cond (EQ_EXPR, lhs,
8093 fold_convert_loc (loc, TREE_TYPE (lhs),
8094 boolean_true_node),
8095 tlabel, flabel);
8096 gimple_seq_add_stmt (pre_p, cond);
8097 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8098 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8099 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8100 }
8101
8102
8103 /* A subroutine of lower_omp_single. Expand the simple form of
8104 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8105
8106 #pragma omp single copyprivate (a, b, c)
8107
8108 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8109
8110 {
8111 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8112 {
8113 BODY;
8114 copyout.a = a;
8115 copyout.b = b;
8116 copyout.c = c;
8117 GOMP_single_copy_end (&copyout);
8118 }
8119 else
8120 {
8121 a = copyout_p->a;
8122 b = copyout_p->b;
8123 c = copyout_p->c;
8124 }
8125 GOMP_barrier ();
8126 }
8127
8128 FIXME. It may be better to delay expanding the logic of this until
8129 pass_expand_omp. The expanded logic may make the job more difficult
8130 to a synchronization analysis pass. */
8131
8132 static void
8133 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8134 omp_context *ctx)
8135 {
8136 tree ptr_type, t, l0, l1, l2, bfn_decl;
8137 gimple_seq copyin_seq;
8138 location_t loc = gimple_location (single_stmt);
8139
8140 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8141
8142 ptr_type = build_pointer_type (ctx->record_type);
8143 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8144
8145 l0 = create_artificial_label (loc);
8146 l1 = create_artificial_label (loc);
8147 l2 = create_artificial_label (loc);
8148
8149 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8150 t = build_call_expr_loc (loc, bfn_decl, 0);
8151 t = fold_convert_loc (loc, ptr_type, t);
8152 gimplify_assign (ctx->receiver_decl, t, pre_p);
8153
8154 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8155 build_int_cst (ptr_type, 0));
8156 t = build3 (COND_EXPR, void_type_node, t,
8157 build_and_jump (&l0), build_and_jump (&l1));
8158 gimplify_and_add (t, pre_p);
8159
8160 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8161
8162 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8163
8164 copyin_seq = NULL;
8165 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8166 &copyin_seq, ctx);
8167
8168 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8169 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8170 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8171 gimplify_and_add (t, pre_p);
8172
8173 t = build_and_jump (&l2);
8174 gimplify_and_add (t, pre_p);
8175
8176 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8177
8178 gimple_seq_add_seq (pre_p, copyin_seq);
8179
8180 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8181 }
8182
8183
8184 /* Expand code for an OpenMP single directive. */
8185
8186 static void
8187 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8188 {
8189 tree block;
8190 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8191 gbind *bind;
8192 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8193
8194 push_gimplify_context ();
8195
8196 block = make_node (BLOCK);
8197 bind = gimple_build_bind (NULL, NULL, block);
8198 gsi_replace (gsi_p, bind, true);
8199 bind_body = NULL;
8200 dlist = NULL;
8201 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8202 &bind_body, &dlist, ctx, NULL);
8203 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8204
8205 gimple_seq_add_stmt (&bind_body, single_stmt);
8206
8207 if (ctx->record_type)
8208 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8209 else
8210 lower_omp_single_simple (single_stmt, &bind_body);
8211
8212 gimple_omp_set_body (single_stmt, NULL);
8213
8214 gimple_seq_add_seq (&bind_body, dlist);
8215
8216 bind_body = maybe_catch_exception (bind_body);
8217
8218 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8219 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8220 gimple *g = gimple_build_omp_return (nowait);
8221 gimple_seq_add_stmt (&bind_body_tail, g);
8222 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8223 if (ctx->record_type)
8224 {
8225 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8226 tree clobber = build_clobber (ctx->record_type);
8227 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8228 clobber), GSI_SAME_STMT);
8229 }
8230 gimple_seq_add_seq (&bind_body, bind_body_tail);
8231 gimple_bind_set_body (bind, bind_body);
8232
8233 pop_gimplify_context (bind);
8234
8235 gimple_bind_append_vars (bind, ctx->block_vars);
8236 BLOCK_VARS (block) = ctx->block_vars;
8237 if (BLOCK_VARS (block))
8238 TREE_USED (block) = 1;
8239 }
8240
8241
8242 /* Expand code for an OpenMP master directive. */
8243
8244 static void
8245 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8246 {
8247 tree block, lab = NULL, x, bfn_decl;
8248 gimple *stmt = gsi_stmt (*gsi_p);
8249 gbind *bind;
8250 location_t loc = gimple_location (stmt);
8251 gimple_seq tseq;
8252
8253 push_gimplify_context ();
8254
8255 block = make_node (BLOCK);
8256 bind = gimple_build_bind (NULL, NULL, block);
8257 gsi_replace (gsi_p, bind, true);
8258 gimple_bind_add_stmt (bind, stmt);
8259
8260 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8261 x = build_call_expr_loc (loc, bfn_decl, 0);
8262 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8263 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8264 tseq = NULL;
8265 gimplify_and_add (x, &tseq);
8266 gimple_bind_add_seq (bind, tseq);
8267
8268 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8269 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8270 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8271 gimple_omp_set_body (stmt, NULL);
8272
8273 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8274
8275 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8276
8277 pop_gimplify_context (bind);
8278
8279 gimple_bind_append_vars (bind, ctx->block_vars);
8280 BLOCK_VARS (block) = ctx->block_vars;
8281 }
8282
8283 /* Helper function for lower_omp_task_reductions. For a specific PASS
8284 find out the current clause it should be processed, or return false
8285 if all have been processed already. */
8286
8287 static inline bool
8288 omp_task_reduction_iterate (int pass, enum tree_code code,
8289 enum omp_clause_code ccode, tree *c, tree *decl,
8290 tree *type, tree *next)
8291 {
8292 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8293 {
8294 if (ccode == OMP_CLAUSE_REDUCTION
8295 && code != OMP_TASKLOOP
8296 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8297 continue;
8298 *decl = OMP_CLAUSE_DECL (*c);
8299 *type = TREE_TYPE (*decl);
8300 if (TREE_CODE (*decl) == MEM_REF)
8301 {
8302 if (pass != 1)
8303 continue;
8304 }
8305 else
8306 {
8307 if (omp_is_reference (*decl))
8308 *type = TREE_TYPE (*type);
8309 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8310 continue;
8311 }
8312 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8313 return true;
8314 }
8315 *decl = NULL_TREE;
8316 *type = NULL_TREE;
8317 *next = NULL_TREE;
8318 return false;
8319 }
8320
8321 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8322 OMP_TASKGROUP only with task modifier). Register mapping of those in
8323 START sequence and reducing them and unregister them in the END sequence. */
8324
8325 static void
8326 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8327 gimple_seq *start, gimple_seq *end)
8328 {
8329 enum omp_clause_code ccode
8330 = (code == OMP_TASKGROUP
8331 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8332 tree cancellable = NULL_TREE;
8333 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8334 if (clauses == NULL_TREE)
8335 return;
8336 if (code == OMP_FOR || code == OMP_SECTIONS)
8337 {
8338 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8339 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8340 && outer->cancellable)
8341 {
8342 cancellable = error_mark_node;
8343 break;
8344 }
8345 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8346 break;
8347 }
8348 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8349 tree *last = &TYPE_FIELDS (record_type);
8350 unsigned cnt = 0;
8351 if (cancellable)
8352 {
8353 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8354 ptr_type_node);
8355 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8356 integer_type_node);
8357 *last = field;
8358 DECL_CHAIN (field) = ifield;
8359 last = &DECL_CHAIN (ifield);
8360 DECL_CONTEXT (field) = record_type;
8361 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8362 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8363 DECL_CONTEXT (ifield) = record_type;
8364 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8365 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8366 }
8367 for (int pass = 0; pass < 2; pass++)
8368 {
8369 tree decl, type, next;
8370 for (tree c = clauses;
8371 omp_task_reduction_iterate (pass, code, ccode,
8372 &c, &decl, &type, &next); c = next)
8373 {
8374 ++cnt;
8375 tree new_type = type;
8376 if (ctx->outer)
8377 new_type = remap_type (type, &ctx->outer->cb);
8378 tree field
8379 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8380 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8381 new_type);
8382 if (DECL_P (decl) && type == TREE_TYPE (decl))
8383 {
8384 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8385 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8386 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8387 }
8388 else
8389 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8390 DECL_CONTEXT (field) = record_type;
8391 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8392 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8393 *last = field;
8394 last = &DECL_CHAIN (field);
8395 tree bfield
8396 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8397 boolean_type_node);
8398 DECL_CONTEXT (bfield) = record_type;
8399 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8400 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8401 *last = bfield;
8402 last = &DECL_CHAIN (bfield);
8403 }
8404 }
8405 *last = NULL_TREE;
8406 layout_type (record_type);
8407
8408 /* Build up an array which registers with the runtime all the reductions
8409 and deregisters them at the end. Format documented in libgomp/task.c. */
8410 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8411 tree avar = create_tmp_var_raw (atype);
8412 gimple_add_tmp_var (avar);
8413 TREE_ADDRESSABLE (avar) = 1;
8414 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8415 NULL_TREE, NULL_TREE);
8416 tree t = build_int_cst (pointer_sized_int_node, cnt);
8417 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8418 gimple_seq seq = NULL;
8419 tree sz = fold_convert (pointer_sized_int_node,
8420 TYPE_SIZE_UNIT (record_type));
8421 int cachesz = 64;
8422 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8423 build_int_cst (pointer_sized_int_node, cachesz - 1));
8424 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8425 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8426 ctx->task_reductions.create (1 + cnt);
8427 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8428 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8429 ? sz : NULL_TREE);
8430 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8431 gimple_seq_add_seq (start, seq);
8432 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8433 NULL_TREE, NULL_TREE);
8434 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8435 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8436 NULL_TREE, NULL_TREE);
8437 t = build_int_cst (pointer_sized_int_node,
8438 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8439 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8440 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8441 NULL_TREE, NULL_TREE);
8442 t = build_int_cst (pointer_sized_int_node, -1);
8443 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8444 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8445 NULL_TREE, NULL_TREE);
8446 t = build_int_cst (pointer_sized_int_node, 0);
8447 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8448
8449 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8450 and for each task reduction checks a bool right after the private variable
8451 within that thread's chunk; if the bool is clear, it hasn't been
8452 initialized and thus isn't going to be reduced nor destructed, otherwise
8453 reduce and destruct it. */
8454 tree idx = create_tmp_var (size_type_node);
8455 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8456 tree num_thr_sz = create_tmp_var (size_type_node);
8457 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8458 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8459 tree lab3 = NULL_TREE;
8460 gimple *g;
8461 if (code == OMP_FOR || code == OMP_SECTIONS)
8462 {
8463 /* For worksharing constructs, only perform it in the master thread,
8464 with the exception of cancelled implicit barriers - then only handle
8465 the current thread. */
8466 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8467 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8468 tree thr_num = create_tmp_var (integer_type_node);
8469 g = gimple_build_call (t, 0);
8470 gimple_call_set_lhs (g, thr_num);
8471 gimple_seq_add_stmt (end, g);
8472 if (cancellable)
8473 {
8474 tree c;
8475 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8476 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8477 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8478 if (code == OMP_FOR)
8479 c = gimple_omp_for_clauses (ctx->stmt);
8480 else /* if (code == OMP_SECTIONS) */
8481 c = gimple_omp_sections_clauses (ctx->stmt);
8482 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8483 cancellable = c;
8484 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8485 lab5, lab6);
8486 gimple_seq_add_stmt (end, g);
8487 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8488 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8489 gimple_seq_add_stmt (end, g);
8490 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8491 build_one_cst (TREE_TYPE (idx)));
8492 gimple_seq_add_stmt (end, g);
8493 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8494 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8495 }
8496 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8497 gimple_seq_add_stmt (end, g);
8498 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8499 }
8500 if (code != OMP_PARALLEL)
8501 {
8502 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8503 tree num_thr = create_tmp_var (integer_type_node);
8504 g = gimple_build_call (t, 0);
8505 gimple_call_set_lhs (g, num_thr);
8506 gimple_seq_add_stmt (end, g);
8507 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8508 gimple_seq_add_stmt (end, g);
8509 if (cancellable)
8510 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8511 }
8512 else
8513 {
8514 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8515 OMP_CLAUSE__REDUCTEMP_);
8516 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8517 t = fold_convert (size_type_node, t);
8518 gimplify_assign (num_thr_sz, t, end);
8519 }
8520 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8521 NULL_TREE, NULL_TREE);
8522 tree data = create_tmp_var (pointer_sized_int_node);
8523 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8524 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8525 tree ptr;
8526 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8527 ptr = create_tmp_var (build_pointer_type (record_type));
8528 else
8529 ptr = create_tmp_var (ptr_type_node);
8530 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8531
8532 tree field = TYPE_FIELDS (record_type);
8533 cnt = 0;
8534 if (cancellable)
8535 field = DECL_CHAIN (DECL_CHAIN (field));
8536 for (int pass = 0; pass < 2; pass++)
8537 {
8538 tree decl, type, next;
8539 for (tree c = clauses;
8540 omp_task_reduction_iterate (pass, code, ccode,
8541 &c, &decl, &type, &next); c = next)
8542 {
8543 tree var = decl, ref;
8544 if (TREE_CODE (decl) == MEM_REF)
8545 {
8546 var = TREE_OPERAND (var, 0);
8547 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8548 var = TREE_OPERAND (var, 0);
8549 tree v = var;
8550 if (TREE_CODE (var) == ADDR_EXPR)
8551 var = TREE_OPERAND (var, 0);
8552 else if (TREE_CODE (var) == INDIRECT_REF)
8553 var = TREE_OPERAND (var, 0);
8554 tree orig_var = var;
8555 if (is_variable_sized (var))
8556 {
8557 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8558 var = DECL_VALUE_EXPR (var);
8559 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8560 var = TREE_OPERAND (var, 0);
8561 gcc_assert (DECL_P (var));
8562 }
8563 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8564 if (orig_var != var)
8565 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8566 else if (TREE_CODE (v) == ADDR_EXPR)
8567 t = build_fold_addr_expr (t);
8568 else if (TREE_CODE (v) == INDIRECT_REF)
8569 t = build_fold_indirect_ref (t);
8570 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8571 {
8572 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8573 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8574 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8575 }
8576 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8577 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8578 fold_convert (size_type_node,
8579 TREE_OPERAND (decl, 1)));
8580 }
8581 else
8582 {
8583 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8584 if (!omp_is_reference (decl))
8585 t = build_fold_addr_expr (t);
8586 }
8587 t = fold_convert (pointer_sized_int_node, t);
8588 seq = NULL;
8589 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8590 gimple_seq_add_seq (start, seq);
8591 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8592 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8593 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8594 t = unshare_expr (byte_position (field));
8595 t = fold_convert (pointer_sized_int_node, t);
8596 ctx->task_reduction_map->put (c, cnt);
8597 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8598 ? t : NULL_TREE);
8599 seq = NULL;
8600 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8601 gimple_seq_add_seq (start, seq);
8602 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8603 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8604 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8605
8606 tree bfield = DECL_CHAIN (field);
8607 tree cond;
8608 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8609 /* In parallel or worksharing all threads unconditionally
8610 initialize all their task reduction private variables. */
8611 cond = boolean_true_node;
8612 else if (TREE_TYPE (ptr) == ptr_type_node)
8613 {
8614 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8615 unshare_expr (byte_position (bfield)));
8616 seq = NULL;
8617 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8618 gimple_seq_add_seq (end, seq);
8619 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8620 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8621 build_int_cst (pbool, 0));
8622 }
8623 else
8624 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8625 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8626 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8627 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8628 tree condv = create_tmp_var (boolean_type_node);
8629 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8630 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8631 lab3, lab4);
8632 gimple_seq_add_stmt (end, g);
8633 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8634 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8635 {
8636 /* If this reduction doesn't need destruction and parallel
8637 has been cancelled, there is nothing to do for this
8638 reduction, so jump around the merge operation. */
8639 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8640 g = gimple_build_cond (NE_EXPR, cancellable,
8641 build_zero_cst (TREE_TYPE (cancellable)),
8642 lab4, lab5);
8643 gimple_seq_add_stmt (end, g);
8644 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8645 }
8646
8647 tree new_var;
8648 if (TREE_TYPE (ptr) == ptr_type_node)
8649 {
8650 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8651 unshare_expr (byte_position (field)));
8652 seq = NULL;
8653 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8654 gimple_seq_add_seq (end, seq);
8655 tree pbool = build_pointer_type (TREE_TYPE (field));
8656 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8657 build_int_cst (pbool, 0));
8658 }
8659 else
8660 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8661 build_simple_mem_ref (ptr), field, NULL_TREE);
8662
8663 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8664 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8665 ref = build_simple_mem_ref (ref);
8666 /* reduction(-:var) sums up the partial results, so it acts
8667 identically to reduction(+:var). */
8668 if (rcode == MINUS_EXPR)
8669 rcode = PLUS_EXPR;
8670 if (TREE_CODE (decl) == MEM_REF)
8671 {
8672 tree type = TREE_TYPE (new_var);
8673 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8674 tree i = create_tmp_var (TREE_TYPE (v));
8675 tree ptype = build_pointer_type (TREE_TYPE (type));
8676 if (DECL_P (v))
8677 {
8678 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8679 tree vv = create_tmp_var (TREE_TYPE (v));
8680 gimplify_assign (vv, v, start);
8681 v = vv;
8682 }
8683 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8684 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8685 new_var = build_fold_addr_expr (new_var);
8686 new_var = fold_convert (ptype, new_var);
8687 ref = fold_convert (ptype, ref);
8688 tree m = create_tmp_var (ptype);
8689 gimplify_assign (m, new_var, end);
8690 new_var = m;
8691 m = create_tmp_var (ptype);
8692 gimplify_assign (m, ref, end);
8693 ref = m;
8694 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8695 tree body = create_artificial_label (UNKNOWN_LOCATION);
8696 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8697 gimple_seq_add_stmt (end, gimple_build_label (body));
8698 tree priv = build_simple_mem_ref (new_var);
8699 tree out = build_simple_mem_ref (ref);
8700 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8701 {
8702 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8703 tree decl_placeholder
8704 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8705 tree lab6 = NULL_TREE;
8706 if (cancellable)
8707 {
8708 /* If this reduction needs destruction and parallel
8709 has been cancelled, jump around the merge operation
8710 to the destruction. */
8711 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8712 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8713 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8714 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8715 lab6, lab5);
8716 gimple_seq_add_stmt (end, g);
8717 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8718 }
8719 SET_DECL_VALUE_EXPR (placeholder, out);
8720 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8721 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8722 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8723 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8724 gimple_seq_add_seq (end,
8725 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8726 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8728 {
8729 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8730 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8731 }
8732 if (cancellable)
8733 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8734 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8735 if (x)
8736 {
8737 gimple_seq tseq = NULL;
8738 gimplify_stmt (&x, &tseq);
8739 gimple_seq_add_seq (end, tseq);
8740 }
8741 }
8742 else
8743 {
8744 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8745 out = unshare_expr (out);
8746 gimplify_assign (out, x, end);
8747 }
8748 gimple *g
8749 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8750 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8751 gimple_seq_add_stmt (end, g);
8752 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8753 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8754 gimple_seq_add_stmt (end, g);
8755 g = gimple_build_assign (i, PLUS_EXPR, i,
8756 build_int_cst (TREE_TYPE (i), 1));
8757 gimple_seq_add_stmt (end, g);
8758 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8759 gimple_seq_add_stmt (end, g);
8760 gimple_seq_add_stmt (end, gimple_build_label (endl));
8761 }
8762 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8763 {
8764 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8765 tree oldv = NULL_TREE;
8766 tree lab6 = NULL_TREE;
8767 if (cancellable)
8768 {
8769 /* If this reduction needs destruction and parallel
8770 has been cancelled, jump around the merge operation
8771 to the destruction. */
8772 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8773 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8774 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8775 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8776 lab6, lab5);
8777 gimple_seq_add_stmt (end, g);
8778 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8779 }
8780 if (omp_is_reference (decl)
8781 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8782 TREE_TYPE (ref)))
8783 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8784 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8785 tree refv = create_tmp_var (TREE_TYPE (ref));
8786 gimplify_assign (refv, ref, end);
8787 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8788 SET_DECL_VALUE_EXPR (placeholder, ref);
8789 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8790 tree d = maybe_lookup_decl (decl, ctx);
8791 gcc_assert (d);
8792 if (DECL_HAS_VALUE_EXPR_P (d))
8793 oldv = DECL_VALUE_EXPR (d);
8794 if (omp_is_reference (var))
8795 {
8796 tree v = fold_convert (TREE_TYPE (d),
8797 build_fold_addr_expr (new_var));
8798 SET_DECL_VALUE_EXPR (d, v);
8799 }
8800 else
8801 SET_DECL_VALUE_EXPR (d, new_var);
8802 DECL_HAS_VALUE_EXPR_P (d) = 1;
8803 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8804 if (oldv)
8805 SET_DECL_VALUE_EXPR (d, oldv);
8806 else
8807 {
8808 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8809 DECL_HAS_VALUE_EXPR_P (d) = 0;
8810 }
8811 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8812 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8813 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8814 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8815 if (cancellable)
8816 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8817 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8818 if (x)
8819 {
8820 gimple_seq tseq = NULL;
8821 gimplify_stmt (&x, &tseq);
8822 gimple_seq_add_seq (end, tseq);
8823 }
8824 }
8825 else
8826 {
8827 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8828 ref = unshare_expr (ref);
8829 gimplify_assign (ref, x, end);
8830 }
8831 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8832 ++cnt;
8833 field = DECL_CHAIN (bfield);
8834 }
8835 }
8836
8837 if (code == OMP_TASKGROUP)
8838 {
8839 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8840 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8841 gimple_seq_add_stmt (start, g);
8842 }
8843 else
8844 {
8845 tree c;
8846 if (code == OMP_FOR)
8847 c = gimple_omp_for_clauses (ctx->stmt);
8848 else if (code == OMP_SECTIONS)
8849 c = gimple_omp_sections_clauses (ctx->stmt);
8850 else
8851 c = gimple_omp_taskreg_clauses (ctx->stmt);
8852 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8853 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8854 build_fold_addr_expr (avar));
8855 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8856 }
8857
8858 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8859 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8860 size_one_node));
8861 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8862 gimple_seq_add_stmt (end, g);
8863 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8864 if (code == OMP_FOR || code == OMP_SECTIONS)
8865 {
8866 enum built_in_function bfn
8867 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8868 t = builtin_decl_explicit (bfn);
8869 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8870 tree arg;
8871 if (cancellable)
8872 {
8873 arg = create_tmp_var (c_bool_type);
8874 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8875 cancellable));
8876 }
8877 else
8878 arg = build_int_cst (c_bool_type, 0);
8879 g = gimple_build_call (t, 1, arg);
8880 }
8881 else
8882 {
8883 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8884 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8885 }
8886 gimple_seq_add_stmt (end, g);
8887 t = build_constructor (atype, NULL);
8888 TREE_THIS_VOLATILE (t) = 1;
8889 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8890 }
8891
8892 /* Expand code for an OpenMP taskgroup directive. */
8893
8894 static void
8895 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8896 {
8897 gimple *stmt = gsi_stmt (*gsi_p);
8898 gcall *x;
8899 gbind *bind;
8900 gimple_seq dseq = NULL;
8901 tree block = make_node (BLOCK);
8902
8903 bind = gimple_build_bind (NULL, NULL, block);
8904 gsi_replace (gsi_p, bind, true);
8905 gimple_bind_add_stmt (bind, stmt);
8906
8907 push_gimplify_context ();
8908
8909 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8910 0);
8911 gimple_bind_add_stmt (bind, x);
8912
8913 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8914 gimple_omp_taskgroup_clauses (stmt),
8915 gimple_bind_body_ptr (bind), &dseq);
8916
8917 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8918 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8919 gimple_omp_set_body (stmt, NULL);
8920
8921 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8922 gimple_bind_add_seq (bind, dseq);
8923
8924 pop_gimplify_context (bind);
8925
8926 gimple_bind_append_vars (bind, ctx->block_vars);
8927 BLOCK_VARS (block) = ctx->block_vars;
8928 }
8929
8930
8931 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8932
8933 static void
8934 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8935 omp_context *ctx)
8936 {
8937 struct omp_for_data fd;
8938 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8939 return;
8940
8941 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8942 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8943 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8944 if (!fd.ordered)
8945 return;
8946
8947 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8948 tree c = gimple_omp_ordered_clauses (ord_stmt);
8949 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8950 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8951 {
8952 /* Merge depend clauses from multiple adjacent
8953 #pragma omp ordered depend(sink:...) constructs
8954 into one #pragma omp ordered depend(sink:...), so that
8955 we can optimize them together. */
8956 gimple_stmt_iterator gsi = *gsi_p;
8957 gsi_next (&gsi);
8958 while (!gsi_end_p (gsi))
8959 {
8960 gimple *stmt = gsi_stmt (gsi);
8961 if (is_gimple_debug (stmt)
8962 || gimple_code (stmt) == GIMPLE_NOP)
8963 {
8964 gsi_next (&gsi);
8965 continue;
8966 }
8967 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8968 break;
8969 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8970 c = gimple_omp_ordered_clauses (ord_stmt2);
8971 if (c == NULL_TREE
8972 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8973 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8974 break;
8975 while (*list_p)
8976 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8977 *list_p = c;
8978 gsi_remove (&gsi, true);
8979 }
8980 }
8981
8982 /* Canonicalize sink dependence clauses into one folded clause if
8983 possible.
8984
8985 The basic algorithm is to create a sink vector whose first
8986 element is the GCD of all the first elements, and whose remaining
8987 elements are the minimum of the subsequent columns.
8988
8989 We ignore dependence vectors whose first element is zero because
8990 such dependencies are known to be executed by the same thread.
8991
8992 We take into account the direction of the loop, so a minimum
8993 becomes a maximum if the loop is iterating forwards. We also
8994 ignore sink clauses where the loop direction is unknown, or where
8995 the offsets are clearly invalid because they are not a multiple
8996 of the loop increment.
8997
8998 For example:
8999
9000 #pragma omp for ordered(2)
9001 for (i=0; i < N; ++i)
9002 for (j=0; j < M; ++j)
9003 {
9004 #pragma omp ordered \
9005 depend(sink:i-8,j-2) \
9006 depend(sink:i,j-1) \ // Completely ignored because i+0.
9007 depend(sink:i-4,j-3) \
9008 depend(sink:i-6,j-4)
9009 #pragma omp ordered depend(source)
9010 }
9011
9012 Folded clause is:
9013
9014 depend(sink:-gcd(8,4,6),-min(2,3,4))
9015 -or-
9016 depend(sink:-2,-2)
9017 */
9018
9019 /* FIXME: Computing GCD's where the first element is zero is
9020 non-trivial in the presence of collapsed loops. Do this later. */
9021 if (fd.collapse > 1)
9022 return;
9023
9024 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9025
9026 /* wide_int is not a POD so it must be default-constructed. */
9027 for (unsigned i = 0; i != 2 * len - 1; ++i)
9028 new (static_cast<void*>(folded_deps + i)) wide_int ();
9029
9030 tree folded_dep = NULL_TREE;
9031 /* TRUE if the first dimension's offset is negative. */
9032 bool neg_offset_p = false;
9033
9034 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9035 unsigned int i;
9036 while ((c = *list_p) != NULL)
9037 {
9038 bool remove = false;
9039
9040 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9041 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9042 goto next_ordered_clause;
9043
9044 tree vec;
9045 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9046 vec && TREE_CODE (vec) == TREE_LIST;
9047 vec = TREE_CHAIN (vec), ++i)
9048 {
9049 gcc_assert (i < len);
9050
9051 /* omp_extract_for_data has canonicalized the condition. */
9052 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9053 || fd.loops[i].cond_code == GT_EXPR);
9054 bool forward = fd.loops[i].cond_code == LT_EXPR;
9055 bool maybe_lexically_later = true;
9056
9057 /* While the committee makes up its mind, bail if we have any
9058 non-constant steps. */
9059 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9060 goto lower_omp_ordered_ret;
9061
9062 tree itype = TREE_TYPE (TREE_VALUE (vec));
9063 if (POINTER_TYPE_P (itype))
9064 itype = sizetype;
9065 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9066 TYPE_PRECISION (itype),
9067 TYPE_SIGN (itype));
9068
9069 /* Ignore invalid offsets that are not multiples of the step. */
9070 if (!wi::multiple_of_p (wi::abs (offset),
9071 wi::abs (wi::to_wide (fd.loops[i].step)),
9072 UNSIGNED))
9073 {
9074 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9075 "ignoring sink clause with offset that is not "
9076 "a multiple of the loop step");
9077 remove = true;
9078 goto next_ordered_clause;
9079 }
9080
9081 /* Calculate the first dimension. The first dimension of
9082 the folded dependency vector is the GCD of the first
9083 elements, while ignoring any first elements whose offset
9084 is 0. */
9085 if (i == 0)
9086 {
9087 /* Ignore dependence vectors whose first dimension is 0. */
9088 if (offset == 0)
9089 {
9090 remove = true;
9091 goto next_ordered_clause;
9092 }
9093 else
9094 {
9095 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9096 {
9097 error_at (OMP_CLAUSE_LOCATION (c),
9098 "first offset must be in opposite direction "
9099 "of loop iterations");
9100 goto lower_omp_ordered_ret;
9101 }
9102 if (forward)
9103 offset = -offset;
9104 neg_offset_p = forward;
9105 /* Initialize the first time around. */
9106 if (folded_dep == NULL_TREE)
9107 {
9108 folded_dep = c;
9109 folded_deps[0] = offset;
9110 }
9111 else
9112 folded_deps[0] = wi::gcd (folded_deps[0],
9113 offset, UNSIGNED);
9114 }
9115 }
9116 /* Calculate minimum for the remaining dimensions. */
9117 else
9118 {
9119 folded_deps[len + i - 1] = offset;
9120 if (folded_dep == c)
9121 folded_deps[i] = offset;
9122 else if (maybe_lexically_later
9123 && !wi::eq_p (folded_deps[i], offset))
9124 {
9125 if (forward ^ wi::gts_p (folded_deps[i], offset))
9126 {
9127 unsigned int j;
9128 folded_dep = c;
9129 for (j = 1; j <= i; j++)
9130 folded_deps[j] = folded_deps[len + j - 1];
9131 }
9132 else
9133 maybe_lexically_later = false;
9134 }
9135 }
9136 }
9137 gcc_assert (i == len);
9138
9139 remove = true;
9140
9141 next_ordered_clause:
9142 if (remove)
9143 *list_p = OMP_CLAUSE_CHAIN (c);
9144 else
9145 list_p = &OMP_CLAUSE_CHAIN (c);
9146 }
9147
9148 if (folded_dep)
9149 {
9150 if (neg_offset_p)
9151 folded_deps[0] = -folded_deps[0];
9152
9153 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9154 if (POINTER_TYPE_P (itype))
9155 itype = sizetype;
9156
9157 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9158 = wide_int_to_tree (itype, folded_deps[0]);
9159 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9160 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9161 }
9162
9163 lower_omp_ordered_ret:
9164
9165 /* Ordered without clauses is #pragma omp threads, while we want
9166 a nop instead if we remove all clauses. */
9167 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9168 gsi_replace (gsi_p, gimple_build_nop (), true);
9169 }
9170
9171
9172 /* Expand code for an OpenMP ordered directive. */
9173
9174 static void
9175 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9176 {
9177 tree block;
9178 gimple *stmt = gsi_stmt (*gsi_p), *g;
9179 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9180 gcall *x;
9181 gbind *bind;
9182 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9183 OMP_CLAUSE_SIMD);
9184 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9185 loop. */
9186 bool maybe_simt
9187 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9188 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9189 OMP_CLAUSE_THREADS);
9190
9191 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9192 OMP_CLAUSE_DEPEND))
9193 {
9194 /* FIXME: This is needs to be moved to the expansion to verify various
9195 conditions only testable on cfg with dominators computed, and also
9196 all the depend clauses to be merged still might need to be available
9197 for the runtime checks. */
9198 if (0)
9199 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9200 return;
9201 }
9202
9203 push_gimplify_context ();
9204
9205 block = make_node (BLOCK);
9206 bind = gimple_build_bind (NULL, NULL, block);
9207 gsi_replace (gsi_p, bind, true);
9208 gimple_bind_add_stmt (bind, stmt);
9209
9210 if (simd)
9211 {
9212 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9213 build_int_cst (NULL_TREE, threads));
9214 cfun->has_simduid_loops = true;
9215 }
9216 else
9217 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9218 0);
9219 gimple_bind_add_stmt (bind, x);
9220
9221 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9222 if (maybe_simt)
9223 {
9224 counter = create_tmp_var (integer_type_node);
9225 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9226 gimple_call_set_lhs (g, counter);
9227 gimple_bind_add_stmt (bind, g);
9228
9229 body = create_artificial_label (UNKNOWN_LOCATION);
9230 test = create_artificial_label (UNKNOWN_LOCATION);
9231 gimple_bind_add_stmt (bind, gimple_build_label (body));
9232
9233 tree simt_pred = create_tmp_var (integer_type_node);
9234 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9235 gimple_call_set_lhs (g, simt_pred);
9236 gimple_bind_add_stmt (bind, g);
9237
9238 tree t = create_artificial_label (UNKNOWN_LOCATION);
9239 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9240 gimple_bind_add_stmt (bind, g);
9241
9242 gimple_bind_add_stmt (bind, gimple_build_label (t));
9243 }
9244 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9245 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9246 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9247 gimple_omp_set_body (stmt, NULL);
9248
9249 if (maybe_simt)
9250 {
9251 gimple_bind_add_stmt (bind, gimple_build_label (test));
9252 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9253 gimple_bind_add_stmt (bind, g);
9254
9255 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9256 tree nonneg = create_tmp_var (integer_type_node);
9257 gimple_seq tseq = NULL;
9258 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9259 gimple_bind_add_seq (bind, tseq);
9260
9261 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9262 gimple_call_set_lhs (g, nonneg);
9263 gimple_bind_add_stmt (bind, g);
9264
9265 tree end = create_artificial_label (UNKNOWN_LOCATION);
9266 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9267 gimple_bind_add_stmt (bind, g);
9268
9269 gimple_bind_add_stmt (bind, gimple_build_label (end));
9270 }
9271 if (simd)
9272 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9273 build_int_cst (NULL_TREE, threads));
9274 else
9275 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9276 0);
9277 gimple_bind_add_stmt (bind, x);
9278
9279 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9280
9281 pop_gimplify_context (bind);
9282
9283 gimple_bind_append_vars (bind, ctx->block_vars);
9284 BLOCK_VARS (block) = gimple_bind_vars (bind);
9285 }
9286
9287
9288 /* Expand code for an OpenMP scan directive and the structured block
9289 before the scan directive. */
9290
9291 static void
9292 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9293 {
9294 gimple *stmt = gsi_stmt (*gsi_p);
9295 bool has_clauses
9296 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9297 tree lane = NULL_TREE;
9298 gimple_seq before = NULL;
9299 omp_context *octx = ctx->outer;
9300 gcc_assert (octx);
9301 if (octx->scan_exclusive && !has_clauses)
9302 {
9303 gimple_stmt_iterator gsi2 = *gsi_p;
9304 gsi_next (&gsi2);
9305 gimple *stmt2 = gsi_stmt (gsi2);
9306 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9307 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9308 the one with exclusive clause(s), comes first. */
9309 if (stmt2
9310 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9311 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9312 {
9313 gsi_remove (gsi_p, false);
9314 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9315 ctx = maybe_lookup_ctx (stmt2);
9316 gcc_assert (ctx);
9317 lower_omp_scan (gsi_p, ctx);
9318 return;
9319 }
9320 }
9321
9322 bool input_phase = has_clauses ^ octx->scan_inclusive;
9323 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9324 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9325 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9326 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9327 && !gimple_omp_for_combined_p (octx->stmt));
9328 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9329 if (is_for_simd && octx->for_simd_scan_phase)
9330 is_simd = false;
9331 if (is_simd)
9332 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9333 OMP_CLAUSE__SIMDUID_))
9334 {
9335 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9336 lane = create_tmp_var (unsigned_type_node);
9337 tree t = build_int_cst (integer_type_node,
9338 input_phase ? 1
9339 : octx->scan_inclusive ? 2 : 3);
9340 gimple *g
9341 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9342 gimple_call_set_lhs (g, lane);
9343 gimple_seq_add_stmt (&before, g);
9344 }
9345
9346 if (is_simd || is_for)
9347 {
9348 for (tree c = gimple_omp_for_clauses (octx->stmt);
9349 c; c = OMP_CLAUSE_CHAIN (c))
9350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9351 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9352 {
9353 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9354 tree var = OMP_CLAUSE_DECL (c);
9355 tree new_var = lookup_decl (var, octx);
9356 tree val = new_var;
9357 tree var2 = NULL_TREE;
9358 tree var3 = NULL_TREE;
9359 tree var4 = NULL_TREE;
9360 tree lane0 = NULL_TREE;
9361 tree new_vard = new_var;
9362 if (omp_is_reference (var))
9363 {
9364 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9365 val = new_var;
9366 }
9367 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9368 {
9369 val = DECL_VALUE_EXPR (new_vard);
9370 if (new_vard != new_var)
9371 {
9372 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9373 val = TREE_OPERAND (val, 0);
9374 }
9375 if (TREE_CODE (val) == ARRAY_REF
9376 && VAR_P (TREE_OPERAND (val, 0)))
9377 {
9378 tree v = TREE_OPERAND (val, 0);
9379 if (lookup_attribute ("omp simd array",
9380 DECL_ATTRIBUTES (v)))
9381 {
9382 val = unshare_expr (val);
9383 lane0 = TREE_OPERAND (val, 1);
9384 TREE_OPERAND (val, 1) = lane;
9385 var2 = lookup_decl (v, octx);
9386 if (octx->scan_exclusive)
9387 var4 = lookup_decl (var2, octx);
9388 if (input_phase
9389 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9390 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9391 if (!input_phase)
9392 {
9393 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9394 var2, lane, NULL_TREE, NULL_TREE);
9395 TREE_THIS_NOTRAP (var2) = 1;
9396 if (octx->scan_exclusive)
9397 {
9398 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9399 var4, lane, NULL_TREE,
9400 NULL_TREE);
9401 TREE_THIS_NOTRAP (var4) = 1;
9402 }
9403 }
9404 else
9405 var2 = val;
9406 }
9407 }
9408 gcc_assert (var2);
9409 }
9410 else
9411 {
9412 var2 = build_outer_var_ref (var, octx);
9413 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9414 {
9415 var3 = maybe_lookup_decl (new_vard, octx);
9416 if (var3 == new_vard || var3 == NULL_TREE)
9417 var3 = NULL_TREE;
9418 else if (is_simd && octx->scan_exclusive && !input_phase)
9419 {
9420 var4 = maybe_lookup_decl (var3, octx);
9421 if (var4 == var3 || var4 == NULL_TREE)
9422 {
9423 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9424 {
9425 var4 = var3;
9426 var3 = NULL_TREE;
9427 }
9428 else
9429 var4 = NULL_TREE;
9430 }
9431 }
9432 }
9433 if (is_simd
9434 && octx->scan_exclusive
9435 && !input_phase
9436 && var4 == NULL_TREE)
9437 var4 = create_tmp_var (TREE_TYPE (val));
9438 }
9439 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9440 {
9441 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9442 if (input_phase)
9443 {
9444 if (var3)
9445 {
9446 /* If we've added a separate identity element
9447 variable, copy it over into val. */
9448 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9449 var3);
9450 gimplify_and_add (x, &before);
9451 }
9452 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9453 {
9454 /* Otherwise, assign to it the identity element. */
9455 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9456 if (is_for)
9457 tseq = copy_gimple_seq_and_replace_locals (tseq);
9458 tree ref = build_outer_var_ref (var, octx);
9459 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9460 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9461 if (x)
9462 {
9463 if (new_vard != new_var)
9464 val = build_fold_addr_expr_loc (clause_loc, val);
9465 SET_DECL_VALUE_EXPR (new_vard, val);
9466 }
9467 SET_DECL_VALUE_EXPR (placeholder, ref);
9468 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9469 lower_omp (&tseq, octx);
9470 if (x)
9471 SET_DECL_VALUE_EXPR (new_vard, x);
9472 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9473 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9474 gimple_seq_add_seq (&before, tseq);
9475 if (is_simd)
9476 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9477 }
9478 }
9479 else if (is_simd)
9480 {
9481 tree x;
9482 if (octx->scan_exclusive)
9483 {
9484 tree v4 = unshare_expr (var4);
9485 tree v2 = unshare_expr (var2);
9486 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9487 gimplify_and_add (x, &before);
9488 }
9489 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9490 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9491 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9492 tree vexpr = val;
9493 if (x && new_vard != new_var)
9494 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9495 if (x)
9496 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9497 SET_DECL_VALUE_EXPR (placeholder, var2);
9498 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9499 lower_omp (&tseq, octx);
9500 gimple_seq_add_seq (&before, tseq);
9501 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9502 if (x)
9503 SET_DECL_VALUE_EXPR (new_vard, x);
9504 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9505 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9506 if (octx->scan_inclusive)
9507 {
9508 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9509 var2);
9510 gimplify_and_add (x, &before);
9511 }
9512 else if (lane0 == NULL_TREE)
9513 {
9514 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9515 var4);
9516 gimplify_and_add (x, &before);
9517 }
9518 }
9519 }
9520 else
9521 {
9522 if (input_phase)
9523 {
9524 /* input phase. Set val to initializer before
9525 the body. */
9526 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9527 gimplify_assign (val, x, &before);
9528 }
9529 else if (is_simd)
9530 {
9531 /* scan phase. */
9532 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9533 if (code == MINUS_EXPR)
9534 code = PLUS_EXPR;
9535
9536 tree x = build2 (code, TREE_TYPE (var2),
9537 unshare_expr (var2), unshare_expr (val));
9538 if (octx->scan_inclusive)
9539 {
9540 gimplify_assign (unshare_expr (var2), x, &before);
9541 gimplify_assign (val, var2, &before);
9542 }
9543 else
9544 {
9545 gimplify_assign (unshare_expr (var4),
9546 unshare_expr (var2), &before);
9547 gimplify_assign (var2, x, &before);
9548 if (lane0 == NULL_TREE)
9549 gimplify_assign (val, var4, &before);
9550 }
9551 }
9552 }
9553 if (octx->scan_exclusive && !input_phase && lane0)
9554 {
9555 tree vexpr = unshare_expr (var4);
9556 TREE_OPERAND (vexpr, 1) = lane0;
9557 if (new_vard != new_var)
9558 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9559 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9560 }
9561 }
9562 }
9563 if (is_simd && !is_for_simd)
9564 {
9565 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9566 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9567 gsi_replace (gsi_p, gimple_build_nop (), true);
9568 return;
9569 }
9570 lower_omp (gimple_omp_body_ptr (stmt), octx);
9571 if (before)
9572 {
9573 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9574 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9575 }
9576 }
9577
9578
9579 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9580 substitution of a couple of function calls. But in the NAMED case,
9581 requires that languages coordinate a symbol name. It is therefore
9582 best put here in common code. */
9583
9584 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9585
9586 static void
9587 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9588 {
9589 tree block;
9590 tree name, lock, unlock;
9591 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9592 gbind *bind;
9593 location_t loc = gimple_location (stmt);
9594 gimple_seq tbody;
9595
9596 name = gimple_omp_critical_name (stmt);
9597 if (name)
9598 {
9599 tree decl;
9600
9601 if (!critical_name_mutexes)
9602 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9603
9604 tree *n = critical_name_mutexes->get (name);
9605 if (n == NULL)
9606 {
9607 char *new_str;
9608
9609 decl = create_tmp_var_raw (ptr_type_node);
9610
9611 new_str = ACONCAT ((".gomp_critical_user_",
9612 IDENTIFIER_POINTER (name), NULL));
9613 DECL_NAME (decl) = get_identifier (new_str);
9614 TREE_PUBLIC (decl) = 1;
9615 TREE_STATIC (decl) = 1;
9616 DECL_COMMON (decl) = 1;
9617 DECL_ARTIFICIAL (decl) = 1;
9618 DECL_IGNORED_P (decl) = 1;
9619
9620 varpool_node::finalize_decl (decl);
9621
9622 critical_name_mutexes->put (name, decl);
9623 }
9624 else
9625 decl = *n;
9626
9627 /* If '#pragma omp critical' is inside offloaded region or
9628 inside function marked as offloadable, the symbol must be
9629 marked as offloadable too. */
9630 omp_context *octx;
9631 if (cgraph_node::get (current_function_decl)->offloadable)
9632 varpool_node::get_create (decl)->offloadable = 1;
9633 else
9634 for (octx = ctx->outer; octx; octx = octx->outer)
9635 if (is_gimple_omp_offloaded (octx->stmt))
9636 {
9637 varpool_node::get_create (decl)->offloadable = 1;
9638 break;
9639 }
9640
9641 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9642 lock = build_call_expr_loc (loc, lock, 1,
9643 build_fold_addr_expr_loc (loc, decl));
9644
9645 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9646 unlock = build_call_expr_loc (loc, unlock, 1,
9647 build_fold_addr_expr_loc (loc, decl));
9648 }
9649 else
9650 {
9651 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9652 lock = build_call_expr_loc (loc, lock, 0);
9653
9654 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9655 unlock = build_call_expr_loc (loc, unlock, 0);
9656 }
9657
9658 push_gimplify_context ();
9659
9660 block = make_node (BLOCK);
9661 bind = gimple_build_bind (NULL, NULL, block);
9662 gsi_replace (gsi_p, bind, true);
9663 gimple_bind_add_stmt (bind, stmt);
9664
9665 tbody = gimple_bind_body (bind);
9666 gimplify_and_add (lock, &tbody);
9667 gimple_bind_set_body (bind, tbody);
9668
9669 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9670 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9671 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9672 gimple_omp_set_body (stmt, NULL);
9673
9674 tbody = gimple_bind_body (bind);
9675 gimplify_and_add (unlock, &tbody);
9676 gimple_bind_set_body (bind, tbody);
9677
9678 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9679
9680 pop_gimplify_context (bind);
9681 gimple_bind_append_vars (bind, ctx->block_vars);
9682 BLOCK_VARS (block) = gimple_bind_vars (bind);
9683 }
9684
9685 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9686 for a lastprivate clause. Given a loop control predicate of (V
9687 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9688 is appended to *DLIST, iterator initialization is appended to
9689 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9690 to be emitted in a critical section. */
9691
9692 static void
9693 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9694 gimple_seq *dlist, gimple_seq *clist,
9695 struct omp_context *ctx)
9696 {
9697 tree clauses, cond, vinit;
9698 enum tree_code cond_code;
9699 gimple_seq stmts;
9700
9701 cond_code = fd->loop.cond_code;
9702 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9703
9704 /* When possible, use a strict equality expression. This can let VRP
9705 type optimizations deduce the value and remove a copy. */
9706 if (tree_fits_shwi_p (fd->loop.step))
9707 {
9708 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9709 if (step == 1 || step == -1)
9710 cond_code = EQ_EXPR;
9711 }
9712
9713 tree n2 = fd->loop.n2;
9714 if (fd->collapse > 1
9715 && TREE_CODE (n2) != INTEGER_CST
9716 && gimple_omp_for_combined_into_p (fd->for_stmt))
9717 {
9718 struct omp_context *taskreg_ctx = NULL;
9719 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9720 {
9721 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9722 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9723 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9724 {
9725 if (gimple_omp_for_combined_into_p (gfor))
9726 {
9727 gcc_assert (ctx->outer->outer
9728 && is_parallel_ctx (ctx->outer->outer));
9729 taskreg_ctx = ctx->outer->outer;
9730 }
9731 else
9732 {
9733 struct omp_for_data outer_fd;
9734 omp_extract_for_data (gfor, &outer_fd, NULL);
9735 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9736 }
9737 }
9738 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9739 taskreg_ctx = ctx->outer->outer;
9740 }
9741 else if (is_taskreg_ctx (ctx->outer))
9742 taskreg_ctx = ctx->outer;
9743 if (taskreg_ctx)
9744 {
9745 int i;
9746 tree taskreg_clauses
9747 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9748 tree innerc = omp_find_clause (taskreg_clauses,
9749 OMP_CLAUSE__LOOPTEMP_);
9750 gcc_assert (innerc);
9751 int count = fd->collapse;
9752 if (fd->non_rect
9753 && fd->last_nonrect == fd->first_nonrect + 1)
9754 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
9755 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
9756 count += 4;
9757 for (i = 0; i < count; i++)
9758 {
9759 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9760 OMP_CLAUSE__LOOPTEMP_);
9761 gcc_assert (innerc);
9762 }
9763 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9764 OMP_CLAUSE__LOOPTEMP_);
9765 if (innerc)
9766 n2 = fold_convert (TREE_TYPE (n2),
9767 lookup_decl (OMP_CLAUSE_DECL (innerc),
9768 taskreg_ctx));
9769 }
9770 }
9771 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9772
9773 clauses = gimple_omp_for_clauses (fd->for_stmt);
9774 stmts = NULL;
9775 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9776 if (!gimple_seq_empty_p (stmts))
9777 {
9778 gimple_seq_add_seq (&stmts, *dlist);
9779 *dlist = stmts;
9780
9781 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9782 vinit = fd->loop.n1;
9783 if (cond_code == EQ_EXPR
9784 && tree_fits_shwi_p (fd->loop.n2)
9785 && ! integer_zerop (fd->loop.n2))
9786 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9787 else
9788 vinit = unshare_expr (vinit);
9789
9790 /* Initialize the iterator variable, so that threads that don't execute
9791 any iterations don't execute the lastprivate clauses by accident. */
9792 gimplify_assign (fd->loop.v, vinit, body_p);
9793 }
9794 }
9795
9796 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9797
9798 static tree
9799 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9800 struct walk_stmt_info *wi)
9801 {
9802 gimple *stmt = gsi_stmt (*gsi_p);
9803
9804 *handled_ops_p = true;
9805 switch (gimple_code (stmt))
9806 {
9807 WALK_SUBSTMTS;
9808
9809 case GIMPLE_OMP_FOR:
9810 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9811 && gimple_omp_for_combined_into_p (stmt))
9812 *handled_ops_p = false;
9813 break;
9814
9815 case GIMPLE_OMP_SCAN:
9816 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9817 return integer_zero_node;
9818 default:
9819 break;
9820 }
9821 return NULL;
9822 }
9823
9824 /* Helper function for lower_omp_for, add transformations for a worksharing
9825 loop with scan directives inside of it.
9826 For worksharing loop not combined with simd, transform:
9827 #pragma omp for reduction(inscan,+:r) private(i)
9828 for (i = 0; i < n; i = i + 1)
9829 {
9830 {
9831 update (r);
9832 }
9833 #pragma omp scan inclusive(r)
9834 {
9835 use (r);
9836 }
9837 }
9838
9839 into two worksharing loops + code to merge results:
9840
9841 num_threads = omp_get_num_threads ();
9842 thread_num = omp_get_thread_num ();
9843 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9844 <D.2099>:
9845 var2 = r;
9846 goto <D.2101>;
9847 <D.2100>:
9848 // For UDRs this is UDR init, or if ctors are needed, copy from
9849 // var3 that has been constructed to contain the neutral element.
9850 var2 = 0;
9851 <D.2101>:
9852 ivar = 0;
9853 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9854 // a shared array with num_threads elements and rprivb to a local array
9855 // number of elements equal to the number of (contiguous) iterations the
9856 // current thread will perform. controlb and controlp variables are
9857 // temporaries to handle deallocation of rprivb at the end of second
9858 // GOMP_FOR.
9859 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9860 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9861 for (i = 0; i < n; i = i + 1)
9862 {
9863 {
9864 // For UDRs this is UDR init or copy from var3.
9865 r = 0;
9866 // This is the input phase from user code.
9867 update (r);
9868 }
9869 {
9870 // For UDRs this is UDR merge.
9871 var2 = var2 + r;
9872 // Rather than handing it over to the user, save to local thread's
9873 // array.
9874 rprivb[ivar] = var2;
9875 // For exclusive scan, the above two statements are swapped.
9876 ivar = ivar + 1;
9877 }
9878 }
9879 // And remember the final value from this thread's into the shared
9880 // rpriva array.
9881 rpriva[(sizetype) thread_num] = var2;
9882 // If more than one thread, compute using Work-Efficient prefix sum
9883 // the inclusive parallel scan of the rpriva array.
9884 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9885 <D.2102>:
9886 GOMP_barrier ();
9887 down = 0;
9888 k = 1;
9889 num_threadsu = (unsigned int) num_threads;
9890 thread_numup1 = (unsigned int) thread_num + 1;
9891 <D.2108>:
9892 twok = k << 1;
9893 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9894 <D.2110>:
9895 down = 4294967295;
9896 k = k >> 1;
9897 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9898 <D.2112>:
9899 k = k >> 1;
9900 <D.2111>:
9901 twok = k << 1;
9902 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9903 mul = REALPART_EXPR <cplx>;
9904 ovf = IMAGPART_EXPR <cplx>;
9905 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9906 <D.2116>:
9907 andv = k & down;
9908 andvm1 = andv + 4294967295;
9909 l = mul + andvm1;
9910 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9911 <D.2120>:
9912 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9913 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9914 rpriva[l] = rpriva[l - k] + rpriva[l];
9915 <D.2117>:
9916 if (down == 0) goto <D.2121>; else goto <D.2122>;
9917 <D.2121>:
9918 k = k << 1;
9919 goto <D.2123>;
9920 <D.2122>:
9921 k = k >> 1;
9922 <D.2123>:
9923 GOMP_barrier ();
9924 if (k != 0) goto <D.2108>; else goto <D.2103>;
9925 <D.2103>:
9926 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9927 <D.2124>:
9928 // For UDRs this is UDR init or copy from var3.
9929 var2 = 0;
9930 goto <D.2126>;
9931 <D.2125>:
9932 var2 = rpriva[thread_num - 1];
9933 <D.2126>:
9934 ivar = 0;
9935 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9936 reduction(inscan,+:r) private(i)
9937 for (i = 0; i < n; i = i + 1)
9938 {
9939 {
9940 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9941 r = var2 + rprivb[ivar];
9942 }
9943 {
9944 // This is the scan phase from user code.
9945 use (r);
9946 // Plus a bump of the iterator.
9947 ivar = ivar + 1;
9948 }
9949 } */
9950
9951 static void
9952 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9953 struct omp_for_data *fd, omp_context *ctx)
9954 {
9955 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9956 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9957
9958 gimple_seq body = gimple_omp_body (stmt);
9959 gimple_stmt_iterator input1_gsi = gsi_none ();
9960 struct walk_stmt_info wi;
9961 memset (&wi, 0, sizeof (wi));
9962 wi.val_only = true;
9963 wi.info = (void *) &input1_gsi;
9964 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9965 gcc_assert (!gsi_end_p (input1_gsi));
9966
9967 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9968 gimple_stmt_iterator gsi = input1_gsi;
9969 gsi_next (&gsi);
9970 gimple_stmt_iterator scan1_gsi = gsi;
9971 gimple *scan_stmt1 = gsi_stmt (gsi);
9972 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9973
9974 gimple_seq input_body = gimple_omp_body (input_stmt1);
9975 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9976 gimple_omp_set_body (input_stmt1, NULL);
9977 gimple_omp_set_body (scan_stmt1, NULL);
9978 gimple_omp_set_body (stmt, NULL);
9979
9980 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9981 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9982 gimple_omp_set_body (stmt, body);
9983 gimple_omp_set_body (input_stmt1, input_body);
9984
9985 gimple_stmt_iterator input2_gsi = gsi_none ();
9986 memset (&wi, 0, sizeof (wi));
9987 wi.val_only = true;
9988 wi.info = (void *) &input2_gsi;
9989 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9990 gcc_assert (!gsi_end_p (input2_gsi));
9991
9992 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9993 gsi = input2_gsi;
9994 gsi_next (&gsi);
9995 gimple_stmt_iterator scan2_gsi = gsi;
9996 gimple *scan_stmt2 = gsi_stmt (gsi);
9997 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9998 gimple_omp_set_body (scan_stmt2, scan_body);
9999
10000 gimple_stmt_iterator input3_gsi = gsi_none ();
10001 gimple_stmt_iterator scan3_gsi = gsi_none ();
10002 gimple_stmt_iterator input4_gsi = gsi_none ();
10003 gimple_stmt_iterator scan4_gsi = gsi_none ();
10004 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10005 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10006 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10007 if (is_for_simd)
10008 {
10009 memset (&wi, 0, sizeof (wi));
10010 wi.val_only = true;
10011 wi.info = (void *) &input3_gsi;
10012 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10013 gcc_assert (!gsi_end_p (input3_gsi));
10014
10015 input_stmt3 = gsi_stmt (input3_gsi);
10016 gsi = input3_gsi;
10017 gsi_next (&gsi);
10018 scan3_gsi = gsi;
10019 scan_stmt3 = gsi_stmt (gsi);
10020 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10021
10022 memset (&wi, 0, sizeof (wi));
10023 wi.val_only = true;
10024 wi.info = (void *) &input4_gsi;
10025 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10026 gcc_assert (!gsi_end_p (input4_gsi));
10027
10028 input_stmt4 = gsi_stmt (input4_gsi);
10029 gsi = input4_gsi;
10030 gsi_next (&gsi);
10031 scan4_gsi = gsi;
10032 scan_stmt4 = gsi_stmt (gsi);
10033 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10034
10035 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10036 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10037 }
10038
10039 tree num_threads = create_tmp_var (integer_type_node);
10040 tree thread_num = create_tmp_var (integer_type_node);
10041 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10042 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10043 gimple *g = gimple_build_call (nthreads_decl, 0);
10044 gimple_call_set_lhs (g, num_threads);
10045 gimple_seq_add_stmt (body_p, g);
10046 g = gimple_build_call (threadnum_decl, 0);
10047 gimple_call_set_lhs (g, thread_num);
10048 gimple_seq_add_stmt (body_p, g);
10049
10050 tree ivar = create_tmp_var (sizetype);
10051 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10052 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10053 tree k = create_tmp_var (unsigned_type_node);
10054 tree l = create_tmp_var (unsigned_type_node);
10055
10056 gimple_seq clist = NULL, mdlist = NULL;
10057 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10058 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10059 gimple_seq scan1_list = NULL, input2_list = NULL;
10060 gimple_seq last_list = NULL, reduc_list = NULL;
10061 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10062 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10063 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10064 {
10065 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10066 tree var = OMP_CLAUSE_DECL (c);
10067 tree new_var = lookup_decl (var, ctx);
10068 tree var3 = NULL_TREE;
10069 tree new_vard = new_var;
10070 if (omp_is_reference (var))
10071 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10072 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10073 {
10074 var3 = maybe_lookup_decl (new_vard, ctx);
10075 if (var3 == new_vard)
10076 var3 = NULL_TREE;
10077 }
10078
10079 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10080 tree rpriva = create_tmp_var (ptype);
10081 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10082 OMP_CLAUSE_DECL (nc) = rpriva;
10083 *cp1 = nc;
10084 cp1 = &OMP_CLAUSE_CHAIN (nc);
10085
10086 tree rprivb = create_tmp_var (ptype);
10087 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10088 OMP_CLAUSE_DECL (nc) = rprivb;
10089 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10090 *cp1 = nc;
10091 cp1 = &OMP_CLAUSE_CHAIN (nc);
10092
10093 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10094 if (new_vard != new_var)
10095 TREE_ADDRESSABLE (var2) = 1;
10096 gimple_add_tmp_var (var2);
10097
10098 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10099 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10100 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10101 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10102 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10103
10104 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10105 thread_num, integer_minus_one_node);
10106 x = fold_convert_loc (clause_loc, sizetype, x);
10107 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10108 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10109 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10110 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10111
10112 x = fold_convert_loc (clause_loc, sizetype, l);
10113 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10114 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10115 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10116 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10117
10118 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10119 x = fold_convert_loc (clause_loc, sizetype, x);
10120 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10121 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10122 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10123 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10124
10125 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10126 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10127 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10128 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10129
10130 tree var4 = is_for_simd ? new_var : var2;
10131 tree var5 = NULL_TREE, var6 = NULL_TREE;
10132 if (is_for_simd)
10133 {
10134 var5 = lookup_decl (var, input_simd_ctx);
10135 var6 = lookup_decl (var, scan_simd_ctx);
10136 if (new_vard != new_var)
10137 {
10138 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10139 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10140 }
10141 }
10142 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10143 {
10144 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10145 tree val = var2;
10146
10147 x = lang_hooks.decls.omp_clause_default_ctor
10148 (c, var2, build_outer_var_ref (var, ctx));
10149 if (x)
10150 gimplify_and_add (x, &clist);
10151
10152 x = build_outer_var_ref (var, ctx);
10153 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10154 x);
10155 gimplify_and_add (x, &thr01_list);
10156
10157 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10158 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10159 if (var3)
10160 {
10161 x = unshare_expr (var4);
10162 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10163 gimplify_and_add (x, &thrn1_list);
10164 x = unshare_expr (var4);
10165 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10166 gimplify_and_add (x, &thr02_list);
10167 }
10168 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10169 {
10170 /* Otherwise, assign to it the identity element. */
10171 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10172 tseq = copy_gimple_seq_and_replace_locals (tseq);
10173 if (!is_for_simd)
10174 {
10175 if (new_vard != new_var)
10176 val = build_fold_addr_expr_loc (clause_loc, val);
10177 SET_DECL_VALUE_EXPR (new_vard, val);
10178 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10179 }
10180 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10181 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10182 lower_omp (&tseq, ctx);
10183 gimple_seq_add_seq (&thrn1_list, tseq);
10184 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10185 lower_omp (&tseq, ctx);
10186 gimple_seq_add_seq (&thr02_list, tseq);
10187 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10188 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10189 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10190 if (y)
10191 SET_DECL_VALUE_EXPR (new_vard, y);
10192 else
10193 {
10194 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10195 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10196 }
10197 }
10198
10199 x = unshare_expr (var4);
10200 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10201 gimplify_and_add (x, &thrn2_list);
10202
10203 if (is_for_simd)
10204 {
10205 x = unshare_expr (rprivb_ref);
10206 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10207 gimplify_and_add (x, &scan1_list);
10208 }
10209 else
10210 {
10211 if (ctx->scan_exclusive)
10212 {
10213 x = unshare_expr (rprivb_ref);
10214 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10215 gimplify_and_add (x, &scan1_list);
10216 }
10217
10218 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10219 tseq = copy_gimple_seq_and_replace_locals (tseq);
10220 SET_DECL_VALUE_EXPR (placeholder, var2);
10221 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10222 lower_omp (&tseq, ctx);
10223 gimple_seq_add_seq (&scan1_list, tseq);
10224
10225 if (ctx->scan_inclusive)
10226 {
10227 x = unshare_expr (rprivb_ref);
10228 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10229 gimplify_and_add (x, &scan1_list);
10230 }
10231 }
10232
10233 x = unshare_expr (rpriva_ref);
10234 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10235 unshare_expr (var4));
10236 gimplify_and_add (x, &mdlist);
10237
10238 x = unshare_expr (is_for_simd ? var6 : new_var);
10239 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10240 gimplify_and_add (x, &input2_list);
10241
10242 val = rprivb_ref;
10243 if (new_vard != new_var)
10244 val = build_fold_addr_expr_loc (clause_loc, val);
10245
10246 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10247 tseq = copy_gimple_seq_and_replace_locals (tseq);
10248 SET_DECL_VALUE_EXPR (new_vard, val);
10249 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10250 if (is_for_simd)
10251 {
10252 SET_DECL_VALUE_EXPR (placeholder, var6);
10253 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10254 }
10255 else
10256 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10257 lower_omp (&tseq, ctx);
10258 if (y)
10259 SET_DECL_VALUE_EXPR (new_vard, y);
10260 else
10261 {
10262 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10263 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10264 }
10265 if (!is_for_simd)
10266 {
10267 SET_DECL_VALUE_EXPR (placeholder, new_var);
10268 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10269 lower_omp (&tseq, ctx);
10270 }
10271 gimple_seq_add_seq (&input2_list, tseq);
10272
10273 x = build_outer_var_ref (var, ctx);
10274 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10275 gimplify_and_add (x, &last_list);
10276
10277 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10278 gimplify_and_add (x, &reduc_list);
10279 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10280 tseq = copy_gimple_seq_and_replace_locals (tseq);
10281 val = rprival_ref;
10282 if (new_vard != new_var)
10283 val = build_fold_addr_expr_loc (clause_loc, val);
10284 SET_DECL_VALUE_EXPR (new_vard, val);
10285 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10286 SET_DECL_VALUE_EXPR (placeholder, var2);
10287 lower_omp (&tseq, ctx);
10288 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10289 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10290 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10291 if (y)
10292 SET_DECL_VALUE_EXPR (new_vard, y);
10293 else
10294 {
10295 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10296 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10297 }
10298 gimple_seq_add_seq (&reduc_list, tseq);
10299 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10300 gimplify_and_add (x, &reduc_list);
10301
10302 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10303 if (x)
10304 gimplify_and_add (x, dlist);
10305 }
10306 else
10307 {
10308 x = build_outer_var_ref (var, ctx);
10309 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10310
10311 x = omp_reduction_init (c, TREE_TYPE (new_var));
10312 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10313 &thrn1_list);
10314 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10315
10316 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10317
10318 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10319 if (code == MINUS_EXPR)
10320 code = PLUS_EXPR;
10321
10322 if (is_for_simd)
10323 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10324 else
10325 {
10326 if (ctx->scan_exclusive)
10327 gimplify_assign (unshare_expr (rprivb_ref), var2,
10328 &scan1_list);
10329 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10330 gimplify_assign (var2, x, &scan1_list);
10331 if (ctx->scan_inclusive)
10332 gimplify_assign (unshare_expr (rprivb_ref), var2,
10333 &scan1_list);
10334 }
10335
10336 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10337 &mdlist);
10338
10339 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10340 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10341
10342 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10343 &last_list);
10344
10345 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10346 unshare_expr (rprival_ref));
10347 gimplify_assign (rprival_ref, x, &reduc_list);
10348 }
10349 }
10350
10351 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10352 gimple_seq_add_stmt (&scan1_list, g);
10353 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10354 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10355 ? scan_stmt4 : scan_stmt2), g);
10356
10357 tree controlb = create_tmp_var (boolean_type_node);
10358 tree controlp = create_tmp_var (ptr_type_node);
10359 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10360 OMP_CLAUSE_DECL (nc) = controlb;
10361 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10362 *cp1 = nc;
10363 cp1 = &OMP_CLAUSE_CHAIN (nc);
10364 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10365 OMP_CLAUSE_DECL (nc) = controlp;
10366 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10367 *cp1 = nc;
10368 cp1 = &OMP_CLAUSE_CHAIN (nc);
10369 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10370 OMP_CLAUSE_DECL (nc) = controlb;
10371 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10372 *cp2 = nc;
10373 cp2 = &OMP_CLAUSE_CHAIN (nc);
10374 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10375 OMP_CLAUSE_DECL (nc) = controlp;
10376 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10377 *cp2 = nc;
10378 cp2 = &OMP_CLAUSE_CHAIN (nc);
10379
10380 *cp1 = gimple_omp_for_clauses (stmt);
10381 gimple_omp_for_set_clauses (stmt, new_clauses1);
10382 *cp2 = gimple_omp_for_clauses (new_stmt);
10383 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10384
10385 if (is_for_simd)
10386 {
10387 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10388 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10389
10390 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10391 GSI_SAME_STMT);
10392 gsi_remove (&input3_gsi, true);
10393 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10394 GSI_SAME_STMT);
10395 gsi_remove (&scan3_gsi, true);
10396 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10397 GSI_SAME_STMT);
10398 gsi_remove (&input4_gsi, true);
10399 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10400 GSI_SAME_STMT);
10401 gsi_remove (&scan4_gsi, true);
10402 }
10403 else
10404 {
10405 gimple_omp_set_body (scan_stmt1, scan1_list);
10406 gimple_omp_set_body (input_stmt2, input2_list);
10407 }
10408
10409 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10410 GSI_SAME_STMT);
10411 gsi_remove (&input1_gsi, true);
10412 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10413 GSI_SAME_STMT);
10414 gsi_remove (&scan1_gsi, true);
10415 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10416 GSI_SAME_STMT);
10417 gsi_remove (&input2_gsi, true);
10418 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10419 GSI_SAME_STMT);
10420 gsi_remove (&scan2_gsi, true);
10421
10422 gimple_seq_add_seq (body_p, clist);
10423
10424 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10425 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10426 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10427 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10428 gimple_seq_add_stmt (body_p, g);
10429 g = gimple_build_label (lab1);
10430 gimple_seq_add_stmt (body_p, g);
10431 gimple_seq_add_seq (body_p, thr01_list);
10432 g = gimple_build_goto (lab3);
10433 gimple_seq_add_stmt (body_p, g);
10434 g = gimple_build_label (lab2);
10435 gimple_seq_add_stmt (body_p, g);
10436 gimple_seq_add_seq (body_p, thrn1_list);
10437 g = gimple_build_label (lab3);
10438 gimple_seq_add_stmt (body_p, g);
10439
10440 g = gimple_build_assign (ivar, size_zero_node);
10441 gimple_seq_add_stmt (body_p, g);
10442
10443 gimple_seq_add_stmt (body_p, stmt);
10444 gimple_seq_add_seq (body_p, body);
10445 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10446 fd->loop.v));
10447
10448 g = gimple_build_omp_return (true);
10449 gimple_seq_add_stmt (body_p, g);
10450 gimple_seq_add_seq (body_p, mdlist);
10451
10452 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10453 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10454 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10455 gimple_seq_add_stmt (body_p, g);
10456 g = gimple_build_label (lab1);
10457 gimple_seq_add_stmt (body_p, g);
10458
10459 g = omp_build_barrier (NULL);
10460 gimple_seq_add_stmt (body_p, g);
10461
10462 tree down = create_tmp_var (unsigned_type_node);
10463 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10464 gimple_seq_add_stmt (body_p, g);
10465
10466 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10467 gimple_seq_add_stmt (body_p, g);
10468
10469 tree num_threadsu = create_tmp_var (unsigned_type_node);
10470 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10471 gimple_seq_add_stmt (body_p, g);
10472
10473 tree thread_numu = create_tmp_var (unsigned_type_node);
10474 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10475 gimple_seq_add_stmt (body_p, g);
10476
10477 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10478 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10479 build_int_cst (unsigned_type_node, 1));
10480 gimple_seq_add_stmt (body_p, g);
10481
10482 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10483 g = gimple_build_label (lab3);
10484 gimple_seq_add_stmt (body_p, g);
10485
10486 tree twok = create_tmp_var (unsigned_type_node);
10487 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10488 gimple_seq_add_stmt (body_p, g);
10489
10490 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10491 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10492 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10493 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10494 gimple_seq_add_stmt (body_p, g);
10495 g = gimple_build_label (lab4);
10496 gimple_seq_add_stmt (body_p, g);
10497 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10498 gimple_seq_add_stmt (body_p, g);
10499 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10500 gimple_seq_add_stmt (body_p, g);
10501
10502 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10503 gimple_seq_add_stmt (body_p, g);
10504 g = gimple_build_label (lab6);
10505 gimple_seq_add_stmt (body_p, g);
10506
10507 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10508 gimple_seq_add_stmt (body_p, g);
10509
10510 g = gimple_build_label (lab5);
10511 gimple_seq_add_stmt (body_p, g);
10512
10513 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10514 gimple_seq_add_stmt (body_p, g);
10515
10516 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10517 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10518 gimple_call_set_lhs (g, cplx);
10519 gimple_seq_add_stmt (body_p, g);
10520 tree mul = create_tmp_var (unsigned_type_node);
10521 g = gimple_build_assign (mul, REALPART_EXPR,
10522 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10523 gimple_seq_add_stmt (body_p, g);
10524 tree ovf = create_tmp_var (unsigned_type_node);
10525 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10526 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10527 gimple_seq_add_stmt (body_p, g);
10528
10529 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10530 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10531 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10532 lab7, lab8);
10533 gimple_seq_add_stmt (body_p, g);
10534 g = gimple_build_label (lab7);
10535 gimple_seq_add_stmt (body_p, g);
10536
10537 tree andv = create_tmp_var (unsigned_type_node);
10538 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10539 gimple_seq_add_stmt (body_p, g);
10540 tree andvm1 = create_tmp_var (unsigned_type_node);
10541 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10542 build_minus_one_cst (unsigned_type_node));
10543 gimple_seq_add_stmt (body_p, g);
10544
10545 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10546 gimple_seq_add_stmt (body_p, g);
10547
10548 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10549 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10550 gimple_seq_add_stmt (body_p, g);
10551 g = gimple_build_label (lab9);
10552 gimple_seq_add_stmt (body_p, g);
10553 gimple_seq_add_seq (body_p, reduc_list);
10554 g = gimple_build_label (lab8);
10555 gimple_seq_add_stmt (body_p, g);
10556
10557 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10558 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10559 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10560 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10561 lab10, lab11);
10562 gimple_seq_add_stmt (body_p, g);
10563 g = gimple_build_label (lab10);
10564 gimple_seq_add_stmt (body_p, g);
10565 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10566 gimple_seq_add_stmt (body_p, g);
10567 g = gimple_build_goto (lab12);
10568 gimple_seq_add_stmt (body_p, g);
10569 g = gimple_build_label (lab11);
10570 gimple_seq_add_stmt (body_p, g);
10571 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10572 gimple_seq_add_stmt (body_p, g);
10573 g = gimple_build_label (lab12);
10574 gimple_seq_add_stmt (body_p, g);
10575
10576 g = omp_build_barrier (NULL);
10577 gimple_seq_add_stmt (body_p, g);
10578
10579 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10580 lab3, lab2);
10581 gimple_seq_add_stmt (body_p, g);
10582
10583 g = gimple_build_label (lab2);
10584 gimple_seq_add_stmt (body_p, g);
10585
10586 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10587 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10588 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10589 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10590 gimple_seq_add_stmt (body_p, g);
10591 g = gimple_build_label (lab1);
10592 gimple_seq_add_stmt (body_p, g);
10593 gimple_seq_add_seq (body_p, thr02_list);
10594 g = gimple_build_goto (lab3);
10595 gimple_seq_add_stmt (body_p, g);
10596 g = gimple_build_label (lab2);
10597 gimple_seq_add_stmt (body_p, g);
10598 gimple_seq_add_seq (body_p, thrn2_list);
10599 g = gimple_build_label (lab3);
10600 gimple_seq_add_stmt (body_p, g);
10601
10602 g = gimple_build_assign (ivar, size_zero_node);
10603 gimple_seq_add_stmt (body_p, g);
10604 gimple_seq_add_stmt (body_p, new_stmt);
10605 gimple_seq_add_seq (body_p, new_body);
10606
10607 gimple_seq new_dlist = NULL;
10608 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10609 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10610 tree num_threadsm1 = create_tmp_var (integer_type_node);
10611 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10612 integer_minus_one_node);
10613 gimple_seq_add_stmt (&new_dlist, g);
10614 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10615 gimple_seq_add_stmt (&new_dlist, g);
10616 g = gimple_build_label (lab1);
10617 gimple_seq_add_stmt (&new_dlist, g);
10618 gimple_seq_add_seq (&new_dlist, last_list);
10619 g = gimple_build_label (lab2);
10620 gimple_seq_add_stmt (&new_dlist, g);
10621 gimple_seq_add_seq (&new_dlist, *dlist);
10622 *dlist = new_dlist;
10623 }
10624
10625 /* Lower code for an OMP loop directive. */
10626
10627 static void
10628 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10629 {
10630 tree *rhs_p, block;
10631 struct omp_for_data fd, *fdp = NULL;
10632 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10633 gbind *new_stmt;
10634 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10635 gimple_seq cnt_list = NULL, clist = NULL;
10636 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10637 size_t i;
10638
10639 push_gimplify_context ();
10640
10641 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10642
10643 block = make_node (BLOCK);
10644 new_stmt = gimple_build_bind (NULL, NULL, block);
10645 /* Replace at gsi right away, so that 'stmt' is no member
10646 of a sequence anymore as we're going to add to a different
10647 one below. */
10648 gsi_replace (gsi_p, new_stmt, true);
10649
10650 /* Move declaration of temporaries in the loop body before we make
10651 it go away. */
10652 omp_for_body = gimple_omp_body (stmt);
10653 if (!gimple_seq_empty_p (omp_for_body)
10654 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10655 {
10656 gbind *inner_bind
10657 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10658 tree vars = gimple_bind_vars (inner_bind);
10659 gimple_bind_append_vars (new_stmt, vars);
10660 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10661 keep them on the inner_bind and it's block. */
10662 gimple_bind_set_vars (inner_bind, NULL_TREE);
10663 if (gimple_bind_block (inner_bind))
10664 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10665 }
10666
10667 if (gimple_omp_for_combined_into_p (stmt))
10668 {
10669 omp_extract_for_data (stmt, &fd, NULL);
10670 fdp = &fd;
10671
10672 /* We need two temporaries with fd.loop.v type (istart/iend)
10673 and then (fd.collapse - 1) temporaries with the same
10674 type for count2 ... countN-1 vars if not constant. */
10675 size_t count = 2;
10676 tree type = fd.iter_type;
10677 if (fd.collapse > 1
10678 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10679 count += fd.collapse - 1;
10680 size_t count2 = 0;
10681 tree type2 = NULL_TREE;
10682 bool taskreg_for
10683 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10684 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10685 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10686 tree simtc = NULL;
10687 tree clauses = *pc;
10688 if (fd.collapse > 1
10689 && fd.non_rect
10690 && fd.last_nonrect == fd.first_nonrect + 1
10691 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10692 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
10693 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10694 {
10695 v = gimple_omp_for_index (stmt, fd.first_nonrect);
10696 type2 = TREE_TYPE (v);
10697 count++;
10698 count2 = 3;
10699 }
10700 if (taskreg_for)
10701 outerc
10702 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10703 OMP_CLAUSE__LOOPTEMP_);
10704 if (ctx->simt_stmt)
10705 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10706 OMP_CLAUSE__LOOPTEMP_);
10707 for (i = 0; i < count + count2; i++)
10708 {
10709 tree temp;
10710 if (taskreg_for)
10711 {
10712 gcc_assert (outerc);
10713 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10714 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10715 OMP_CLAUSE__LOOPTEMP_);
10716 }
10717 else
10718 {
10719 /* If there are 2 adjacent SIMD stmts, one with _simt_
10720 clause, another without, make sure they have the same
10721 decls in _looptemp_ clauses, because the outer stmt
10722 they are combined into will look up just one inner_stmt. */
10723 if (ctx->simt_stmt)
10724 temp = OMP_CLAUSE_DECL (simtc);
10725 else
10726 temp = create_tmp_var (i >= count ? type2 : type);
10727 insert_decl_map (&ctx->outer->cb, temp, temp);
10728 }
10729 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10730 OMP_CLAUSE_DECL (*pc) = temp;
10731 pc = &OMP_CLAUSE_CHAIN (*pc);
10732 if (ctx->simt_stmt)
10733 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10734 OMP_CLAUSE__LOOPTEMP_);
10735 }
10736 *pc = clauses;
10737 }
10738
10739 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10740 dlist = NULL;
10741 body = NULL;
10742 tree rclauses
10743 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10744 OMP_CLAUSE_REDUCTION);
10745 tree rtmp = NULL_TREE;
10746 if (rclauses)
10747 {
10748 tree type = build_pointer_type (pointer_sized_int_node);
10749 tree temp = create_tmp_var (type);
10750 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10751 OMP_CLAUSE_DECL (c) = temp;
10752 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10753 gimple_omp_for_set_clauses (stmt, c);
10754 lower_omp_task_reductions (ctx, OMP_FOR,
10755 gimple_omp_for_clauses (stmt),
10756 &tred_ilist, &tred_dlist);
10757 rclauses = c;
10758 rtmp = make_ssa_name (type);
10759 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10760 }
10761
10762 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10763 ctx);
10764
10765 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10766 fdp);
10767 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10768 gimple_omp_for_pre_body (stmt));
10769
10770 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10771
10772 /* Lower the header expressions. At this point, we can assume that
10773 the header is of the form:
10774
10775 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10776
10777 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10778 using the .omp_data_s mapping, if needed. */
10779 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10780 {
10781 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10782 if (TREE_CODE (*rhs_p) == TREE_VEC)
10783 {
10784 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10785 TREE_VEC_ELT (*rhs_p, 1)
10786 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10787 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10788 TREE_VEC_ELT (*rhs_p, 2)
10789 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10790 }
10791 else if (!is_gimple_min_invariant (*rhs_p))
10792 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10793 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10794 recompute_tree_invariant_for_addr_expr (*rhs_p);
10795
10796 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10797 if (TREE_CODE (*rhs_p) == TREE_VEC)
10798 {
10799 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10800 TREE_VEC_ELT (*rhs_p, 1)
10801 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10802 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10803 TREE_VEC_ELT (*rhs_p, 2)
10804 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10805 }
10806 else if (!is_gimple_min_invariant (*rhs_p))
10807 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10808 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10809 recompute_tree_invariant_for_addr_expr (*rhs_p);
10810
10811 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10812 if (!is_gimple_min_invariant (*rhs_p))
10813 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10814 }
10815 if (rclauses)
10816 gimple_seq_add_seq (&tred_ilist, cnt_list);
10817 else
10818 gimple_seq_add_seq (&body, cnt_list);
10819
10820 /* Once lowered, extract the bounds and clauses. */
10821 omp_extract_for_data (stmt, &fd, NULL);
10822
10823 if (is_gimple_omp_oacc (ctx->stmt)
10824 && !ctx_in_oacc_kernels_region (ctx))
10825 lower_oacc_head_tail (gimple_location (stmt),
10826 gimple_omp_for_clauses (stmt),
10827 &oacc_head, &oacc_tail, ctx);
10828
10829 /* Add OpenACC partitioning and reduction markers just before the loop. */
10830 if (oacc_head)
10831 gimple_seq_add_seq (&body, oacc_head);
10832
10833 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10834
10835 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10836 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10838 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10839 {
10840 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10841 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10842 OMP_CLAUSE_LINEAR_STEP (c)
10843 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10844 ctx);
10845 }
10846
10847 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10848 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10849 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10850 else
10851 {
10852 gimple_seq_add_stmt (&body, stmt);
10853 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10854 }
10855
10856 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10857 fd.loop.v));
10858
10859 /* After the loop, add exit clauses. */
10860 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10861
10862 if (clist)
10863 {
10864 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10865 gcall *g = gimple_build_call (fndecl, 0);
10866 gimple_seq_add_stmt (&body, g);
10867 gimple_seq_add_seq (&body, clist);
10868 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10869 g = gimple_build_call (fndecl, 0);
10870 gimple_seq_add_stmt (&body, g);
10871 }
10872
10873 if (ctx->cancellable)
10874 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10875
10876 gimple_seq_add_seq (&body, dlist);
10877
10878 if (rclauses)
10879 {
10880 gimple_seq_add_seq (&tred_ilist, body);
10881 body = tred_ilist;
10882 }
10883
10884 body = maybe_catch_exception (body);
10885
10886 /* Region exit marker goes at the end of the loop body. */
10887 gimple *g = gimple_build_omp_return (fd.have_nowait);
10888 gimple_seq_add_stmt (&body, g);
10889
10890 gimple_seq_add_seq (&body, tred_dlist);
10891
10892 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10893
10894 if (rclauses)
10895 OMP_CLAUSE_DECL (rclauses) = rtmp;
10896
10897 /* Add OpenACC joining and reduction markers just after the loop. */
10898 if (oacc_tail)
10899 gimple_seq_add_seq (&body, oacc_tail);
10900
10901 pop_gimplify_context (new_stmt);
10902
10903 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10904 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10905 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10906 if (BLOCK_VARS (block))
10907 TREE_USED (block) = 1;
10908
10909 gimple_bind_set_body (new_stmt, body);
10910 gimple_omp_set_body (stmt, NULL);
10911 gimple_omp_for_set_pre_body (stmt, NULL);
10912 }
10913
10914 /* Callback for walk_stmts. Check if the current statement only contains
10915 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10916
10917 static tree
10918 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10919 bool *handled_ops_p,
10920 struct walk_stmt_info *wi)
10921 {
10922 int *info = (int *) wi->info;
10923 gimple *stmt = gsi_stmt (*gsi_p);
10924
10925 *handled_ops_p = true;
10926 switch (gimple_code (stmt))
10927 {
10928 WALK_SUBSTMTS;
10929
10930 case GIMPLE_DEBUG:
10931 break;
10932 case GIMPLE_OMP_FOR:
10933 case GIMPLE_OMP_SECTIONS:
10934 *info = *info == 0 ? 1 : -1;
10935 break;
10936 default:
10937 *info = -1;
10938 break;
10939 }
10940 return NULL;
10941 }
10942
10943 struct omp_taskcopy_context
10944 {
10945 /* This field must be at the beginning, as we do "inheritance": Some
10946 callback functions for tree-inline.c (e.g., omp_copy_decl)
10947 receive a copy_body_data pointer that is up-casted to an
10948 omp_context pointer. */
10949 copy_body_data cb;
10950 omp_context *ctx;
10951 };
10952
10953 static tree
10954 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10955 {
10956 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10957
10958 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10959 return create_tmp_var (TREE_TYPE (var));
10960
10961 return var;
10962 }
10963
10964 static tree
10965 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10966 {
10967 tree name, new_fields = NULL, type, f;
10968
10969 type = lang_hooks.types.make_type (RECORD_TYPE);
10970 name = DECL_NAME (TYPE_NAME (orig_type));
10971 name = build_decl (gimple_location (tcctx->ctx->stmt),
10972 TYPE_DECL, name, type);
10973 TYPE_NAME (type) = name;
10974
10975 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10976 {
10977 tree new_f = copy_node (f);
10978 DECL_CONTEXT (new_f) = type;
10979 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10980 TREE_CHAIN (new_f) = new_fields;
10981 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10982 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10983 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10984 &tcctx->cb, NULL);
10985 new_fields = new_f;
10986 tcctx->cb.decl_map->put (f, new_f);
10987 }
10988 TYPE_FIELDS (type) = nreverse (new_fields);
10989 layout_type (type);
10990 return type;
10991 }
10992
10993 /* Create task copyfn. */
10994
10995 static void
10996 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10997 {
10998 struct function *child_cfun;
10999 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11000 tree record_type, srecord_type, bind, list;
11001 bool record_needs_remap = false, srecord_needs_remap = false;
11002 splay_tree_node n;
11003 struct omp_taskcopy_context tcctx;
11004 location_t loc = gimple_location (task_stmt);
11005 size_t looptempno = 0;
11006
11007 child_fn = gimple_omp_task_copy_fn (task_stmt);
11008 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11009 gcc_assert (child_cfun->cfg == NULL);
11010 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11011
11012 /* Reset DECL_CONTEXT on function arguments. */
11013 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11014 DECL_CONTEXT (t) = child_fn;
11015
11016 /* Populate the function. */
11017 push_gimplify_context ();
11018 push_cfun (child_cfun);
11019
11020 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11021 TREE_SIDE_EFFECTS (bind) = 1;
11022 list = NULL;
11023 DECL_SAVED_TREE (child_fn) = bind;
11024 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11025
11026 /* Remap src and dst argument types if needed. */
11027 record_type = ctx->record_type;
11028 srecord_type = ctx->srecord_type;
11029 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11030 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11031 {
11032 record_needs_remap = true;
11033 break;
11034 }
11035 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11036 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11037 {
11038 srecord_needs_remap = true;
11039 break;
11040 }
11041
11042 if (record_needs_remap || srecord_needs_remap)
11043 {
11044 memset (&tcctx, '\0', sizeof (tcctx));
11045 tcctx.cb.src_fn = ctx->cb.src_fn;
11046 tcctx.cb.dst_fn = child_fn;
11047 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11048 gcc_checking_assert (tcctx.cb.src_node);
11049 tcctx.cb.dst_node = tcctx.cb.src_node;
11050 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11051 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11052 tcctx.cb.eh_lp_nr = 0;
11053 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11054 tcctx.cb.decl_map = new hash_map<tree, tree>;
11055 tcctx.ctx = ctx;
11056
11057 if (record_needs_remap)
11058 record_type = task_copyfn_remap_type (&tcctx, record_type);
11059 if (srecord_needs_remap)
11060 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11061 }
11062 else
11063 tcctx.cb.decl_map = NULL;
11064
11065 arg = DECL_ARGUMENTS (child_fn);
11066 TREE_TYPE (arg) = build_pointer_type (record_type);
11067 sarg = DECL_CHAIN (arg);
11068 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11069
11070 /* First pass: initialize temporaries used in record_type and srecord_type
11071 sizes and field offsets. */
11072 if (tcctx.cb.decl_map)
11073 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11074 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11075 {
11076 tree *p;
11077
11078 decl = OMP_CLAUSE_DECL (c);
11079 p = tcctx.cb.decl_map->get (decl);
11080 if (p == NULL)
11081 continue;
11082 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11083 sf = (tree) n->value;
11084 sf = *tcctx.cb.decl_map->get (sf);
11085 src = build_simple_mem_ref_loc (loc, sarg);
11086 src = omp_build_component_ref (src, sf);
11087 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11088 append_to_statement_list (t, &list);
11089 }
11090
11091 /* Second pass: copy shared var pointers and copy construct non-VLA
11092 firstprivate vars. */
11093 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11094 switch (OMP_CLAUSE_CODE (c))
11095 {
11096 splay_tree_key key;
11097 case OMP_CLAUSE_SHARED:
11098 decl = OMP_CLAUSE_DECL (c);
11099 key = (splay_tree_key) decl;
11100 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11101 key = (splay_tree_key) &DECL_UID (decl);
11102 n = splay_tree_lookup (ctx->field_map, key);
11103 if (n == NULL)
11104 break;
11105 f = (tree) n->value;
11106 if (tcctx.cb.decl_map)
11107 f = *tcctx.cb.decl_map->get (f);
11108 n = splay_tree_lookup (ctx->sfield_map, key);
11109 sf = (tree) n->value;
11110 if (tcctx.cb.decl_map)
11111 sf = *tcctx.cb.decl_map->get (sf);
11112 src = build_simple_mem_ref_loc (loc, sarg);
11113 src = omp_build_component_ref (src, sf);
11114 dst = build_simple_mem_ref_loc (loc, arg);
11115 dst = omp_build_component_ref (dst, f);
11116 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11117 append_to_statement_list (t, &list);
11118 break;
11119 case OMP_CLAUSE_REDUCTION:
11120 case OMP_CLAUSE_IN_REDUCTION:
11121 decl = OMP_CLAUSE_DECL (c);
11122 if (TREE_CODE (decl) == MEM_REF)
11123 {
11124 decl = TREE_OPERAND (decl, 0);
11125 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11126 decl = TREE_OPERAND (decl, 0);
11127 if (TREE_CODE (decl) == INDIRECT_REF
11128 || TREE_CODE (decl) == ADDR_EXPR)
11129 decl = TREE_OPERAND (decl, 0);
11130 }
11131 key = (splay_tree_key) decl;
11132 n = splay_tree_lookup (ctx->field_map, key);
11133 if (n == NULL)
11134 break;
11135 f = (tree) n->value;
11136 if (tcctx.cb.decl_map)
11137 f = *tcctx.cb.decl_map->get (f);
11138 n = splay_tree_lookup (ctx->sfield_map, key);
11139 sf = (tree) n->value;
11140 if (tcctx.cb.decl_map)
11141 sf = *tcctx.cb.decl_map->get (sf);
11142 src = build_simple_mem_ref_loc (loc, sarg);
11143 src = omp_build_component_ref (src, sf);
11144 if (decl != OMP_CLAUSE_DECL (c)
11145 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11146 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11147 src = build_simple_mem_ref_loc (loc, src);
11148 dst = build_simple_mem_ref_loc (loc, arg);
11149 dst = omp_build_component_ref (dst, f);
11150 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11151 append_to_statement_list (t, &list);
11152 break;
11153 case OMP_CLAUSE__LOOPTEMP_:
11154 /* Fields for first two _looptemp_ clauses are initialized by
11155 GOMP_taskloop*, the rest are handled like firstprivate. */
11156 if (looptempno < 2)
11157 {
11158 looptempno++;
11159 break;
11160 }
11161 /* FALLTHRU */
11162 case OMP_CLAUSE__REDUCTEMP_:
11163 case OMP_CLAUSE_FIRSTPRIVATE:
11164 decl = OMP_CLAUSE_DECL (c);
11165 if (is_variable_sized (decl))
11166 break;
11167 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11168 if (n == NULL)
11169 break;
11170 f = (tree) n->value;
11171 if (tcctx.cb.decl_map)
11172 f = *tcctx.cb.decl_map->get (f);
11173 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11174 if (n != NULL)
11175 {
11176 sf = (tree) n->value;
11177 if (tcctx.cb.decl_map)
11178 sf = *tcctx.cb.decl_map->get (sf);
11179 src = build_simple_mem_ref_loc (loc, sarg);
11180 src = omp_build_component_ref (src, sf);
11181 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11182 src = build_simple_mem_ref_loc (loc, src);
11183 }
11184 else
11185 src = decl;
11186 dst = build_simple_mem_ref_loc (loc, arg);
11187 dst = omp_build_component_ref (dst, f);
11188 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11189 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11190 else
11191 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11192 append_to_statement_list (t, &list);
11193 break;
11194 case OMP_CLAUSE_PRIVATE:
11195 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11196 break;
11197 decl = OMP_CLAUSE_DECL (c);
11198 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11199 f = (tree) n->value;
11200 if (tcctx.cb.decl_map)
11201 f = *tcctx.cb.decl_map->get (f);
11202 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11203 if (n != NULL)
11204 {
11205 sf = (tree) n->value;
11206 if (tcctx.cb.decl_map)
11207 sf = *tcctx.cb.decl_map->get (sf);
11208 src = build_simple_mem_ref_loc (loc, sarg);
11209 src = omp_build_component_ref (src, sf);
11210 if (use_pointer_for_field (decl, NULL))
11211 src = build_simple_mem_ref_loc (loc, src);
11212 }
11213 else
11214 src = decl;
11215 dst = build_simple_mem_ref_loc (loc, arg);
11216 dst = omp_build_component_ref (dst, f);
11217 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11218 append_to_statement_list (t, &list);
11219 break;
11220 default:
11221 break;
11222 }
11223
11224 /* Last pass: handle VLA firstprivates. */
11225 if (tcctx.cb.decl_map)
11226 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11227 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11228 {
11229 tree ind, ptr, df;
11230
11231 decl = OMP_CLAUSE_DECL (c);
11232 if (!is_variable_sized (decl))
11233 continue;
11234 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11235 if (n == NULL)
11236 continue;
11237 f = (tree) n->value;
11238 f = *tcctx.cb.decl_map->get (f);
11239 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11240 ind = DECL_VALUE_EXPR (decl);
11241 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11242 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11243 n = splay_tree_lookup (ctx->sfield_map,
11244 (splay_tree_key) TREE_OPERAND (ind, 0));
11245 sf = (tree) n->value;
11246 sf = *tcctx.cb.decl_map->get (sf);
11247 src = build_simple_mem_ref_loc (loc, sarg);
11248 src = omp_build_component_ref (src, sf);
11249 src = build_simple_mem_ref_loc (loc, src);
11250 dst = build_simple_mem_ref_loc (loc, arg);
11251 dst = omp_build_component_ref (dst, f);
11252 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11253 append_to_statement_list (t, &list);
11254 n = splay_tree_lookup (ctx->field_map,
11255 (splay_tree_key) TREE_OPERAND (ind, 0));
11256 df = (tree) n->value;
11257 df = *tcctx.cb.decl_map->get (df);
11258 ptr = build_simple_mem_ref_loc (loc, arg);
11259 ptr = omp_build_component_ref (ptr, df);
11260 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11261 build_fold_addr_expr_loc (loc, dst));
11262 append_to_statement_list (t, &list);
11263 }
11264
11265 t = build1 (RETURN_EXPR, void_type_node, NULL);
11266 append_to_statement_list (t, &list);
11267
11268 if (tcctx.cb.decl_map)
11269 delete tcctx.cb.decl_map;
11270 pop_gimplify_context (NULL);
11271 BIND_EXPR_BODY (bind) = list;
11272 pop_cfun ();
11273 }
11274
11275 static void
11276 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11277 {
11278 tree c, clauses;
11279 gimple *g;
11280 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11281
11282 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11283 gcc_assert (clauses);
11284 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11286 switch (OMP_CLAUSE_DEPEND_KIND (c))
11287 {
11288 case OMP_CLAUSE_DEPEND_LAST:
11289 /* Lowering already done at gimplification. */
11290 return;
11291 case OMP_CLAUSE_DEPEND_IN:
11292 cnt[2]++;
11293 break;
11294 case OMP_CLAUSE_DEPEND_OUT:
11295 case OMP_CLAUSE_DEPEND_INOUT:
11296 cnt[0]++;
11297 break;
11298 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11299 cnt[1]++;
11300 break;
11301 case OMP_CLAUSE_DEPEND_DEPOBJ:
11302 cnt[3]++;
11303 break;
11304 case OMP_CLAUSE_DEPEND_SOURCE:
11305 case OMP_CLAUSE_DEPEND_SINK:
11306 /* FALLTHRU */
11307 default:
11308 gcc_unreachable ();
11309 }
11310 if (cnt[1] || cnt[3])
11311 idx = 5;
11312 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11313 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11314 tree array = create_tmp_var (type);
11315 TREE_ADDRESSABLE (array) = 1;
11316 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11317 NULL_TREE);
11318 if (idx == 5)
11319 {
11320 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11321 gimple_seq_add_stmt (iseq, g);
11322 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11323 NULL_TREE);
11324 }
11325 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11326 gimple_seq_add_stmt (iseq, g);
11327 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11328 {
11329 r = build4 (ARRAY_REF, ptr_type_node, array,
11330 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11331 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11332 gimple_seq_add_stmt (iseq, g);
11333 }
11334 for (i = 0; i < 4; i++)
11335 {
11336 if (cnt[i] == 0)
11337 continue;
11338 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11339 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11340 continue;
11341 else
11342 {
11343 switch (OMP_CLAUSE_DEPEND_KIND (c))
11344 {
11345 case OMP_CLAUSE_DEPEND_IN:
11346 if (i != 2)
11347 continue;
11348 break;
11349 case OMP_CLAUSE_DEPEND_OUT:
11350 case OMP_CLAUSE_DEPEND_INOUT:
11351 if (i != 0)
11352 continue;
11353 break;
11354 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11355 if (i != 1)
11356 continue;
11357 break;
11358 case OMP_CLAUSE_DEPEND_DEPOBJ:
11359 if (i != 3)
11360 continue;
11361 break;
11362 default:
11363 gcc_unreachable ();
11364 }
11365 tree t = OMP_CLAUSE_DECL (c);
11366 t = fold_convert (ptr_type_node, t);
11367 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11368 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11369 NULL_TREE, NULL_TREE);
11370 g = gimple_build_assign (r, t);
11371 gimple_seq_add_stmt (iseq, g);
11372 }
11373 }
11374 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11375 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11376 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11377 OMP_CLAUSE_CHAIN (c) = *pclauses;
11378 *pclauses = c;
11379 tree clobber = build_clobber (type);
11380 g = gimple_build_assign (array, clobber);
11381 gimple_seq_add_stmt (oseq, g);
11382 }
11383
11384 /* Lower the OpenMP parallel or task directive in the current statement
11385 in GSI_P. CTX holds context information for the directive. */
11386
11387 static void
11388 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11389 {
11390 tree clauses;
11391 tree child_fn, t;
11392 gimple *stmt = gsi_stmt (*gsi_p);
11393 gbind *par_bind, *bind, *dep_bind = NULL;
11394 gimple_seq par_body;
11395 location_t loc = gimple_location (stmt);
11396
11397 clauses = gimple_omp_taskreg_clauses (stmt);
11398 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11399 && gimple_omp_task_taskwait_p (stmt))
11400 {
11401 par_bind = NULL;
11402 par_body = NULL;
11403 }
11404 else
11405 {
11406 par_bind
11407 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11408 par_body = gimple_bind_body (par_bind);
11409 }
11410 child_fn = ctx->cb.dst_fn;
11411 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11412 && !gimple_omp_parallel_combined_p (stmt))
11413 {
11414 struct walk_stmt_info wi;
11415 int ws_num = 0;
11416
11417 memset (&wi, 0, sizeof (wi));
11418 wi.info = &ws_num;
11419 wi.val_only = true;
11420 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11421 if (ws_num == 1)
11422 gimple_omp_parallel_set_combined_p (stmt, true);
11423 }
11424 gimple_seq dep_ilist = NULL;
11425 gimple_seq dep_olist = NULL;
11426 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11427 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11428 {
11429 push_gimplify_context ();
11430 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11431 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11432 &dep_ilist, &dep_olist);
11433 }
11434
11435 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11436 && gimple_omp_task_taskwait_p (stmt))
11437 {
11438 if (dep_bind)
11439 {
11440 gsi_replace (gsi_p, dep_bind, true);
11441 gimple_bind_add_seq (dep_bind, dep_ilist);
11442 gimple_bind_add_stmt (dep_bind, stmt);
11443 gimple_bind_add_seq (dep_bind, dep_olist);
11444 pop_gimplify_context (dep_bind);
11445 }
11446 return;
11447 }
11448
11449 if (ctx->srecord_type)
11450 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11451
11452 gimple_seq tskred_ilist = NULL;
11453 gimple_seq tskred_olist = NULL;
11454 if ((is_task_ctx (ctx)
11455 && gimple_omp_task_taskloop_p (ctx->stmt)
11456 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11457 OMP_CLAUSE_REDUCTION))
11458 || (is_parallel_ctx (ctx)
11459 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11460 OMP_CLAUSE__REDUCTEMP_)))
11461 {
11462 if (dep_bind == NULL)
11463 {
11464 push_gimplify_context ();
11465 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11466 }
11467 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11468 : OMP_PARALLEL,
11469 gimple_omp_taskreg_clauses (ctx->stmt),
11470 &tskred_ilist, &tskred_olist);
11471 }
11472
11473 push_gimplify_context ();
11474
11475 gimple_seq par_olist = NULL;
11476 gimple_seq par_ilist = NULL;
11477 gimple_seq par_rlist = NULL;
11478 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11479 lower_omp (&par_body, ctx);
11480 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11481 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11482
11483 /* Declare all the variables created by mapping and the variables
11484 declared in the scope of the parallel body. */
11485 record_vars_into (ctx->block_vars, child_fn);
11486 maybe_remove_omp_member_access_dummy_vars (par_bind);
11487 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11488
11489 if (ctx->record_type)
11490 {
11491 ctx->sender_decl
11492 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11493 : ctx->record_type, ".omp_data_o");
11494 DECL_NAMELESS (ctx->sender_decl) = 1;
11495 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11496 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11497 }
11498
11499 gimple_seq olist = NULL;
11500 gimple_seq ilist = NULL;
11501 lower_send_clauses (clauses, &ilist, &olist, ctx);
11502 lower_send_shared_vars (&ilist, &olist, ctx);
11503
11504 if (ctx->record_type)
11505 {
11506 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11507 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11508 clobber));
11509 }
11510
11511 /* Once all the expansions are done, sequence all the different
11512 fragments inside gimple_omp_body. */
11513
11514 gimple_seq new_body = NULL;
11515
11516 if (ctx->record_type)
11517 {
11518 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11519 /* fixup_child_record_type might have changed receiver_decl's type. */
11520 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11521 gimple_seq_add_stmt (&new_body,
11522 gimple_build_assign (ctx->receiver_decl, t));
11523 }
11524
11525 gimple_seq_add_seq (&new_body, par_ilist);
11526 gimple_seq_add_seq (&new_body, par_body);
11527 gimple_seq_add_seq (&new_body, par_rlist);
11528 if (ctx->cancellable)
11529 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11530 gimple_seq_add_seq (&new_body, par_olist);
11531 new_body = maybe_catch_exception (new_body);
11532 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11533 gimple_seq_add_stmt (&new_body,
11534 gimple_build_omp_continue (integer_zero_node,
11535 integer_zero_node));
11536 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11537 gimple_omp_set_body (stmt, new_body);
11538
11539 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11540 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11541 else
11542 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11543 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11544 gimple_bind_add_seq (bind, ilist);
11545 gimple_bind_add_stmt (bind, stmt);
11546 gimple_bind_add_seq (bind, olist);
11547
11548 pop_gimplify_context (NULL);
11549
11550 if (dep_bind)
11551 {
11552 gimple_bind_add_seq (dep_bind, dep_ilist);
11553 gimple_bind_add_seq (dep_bind, tskred_ilist);
11554 gimple_bind_add_stmt (dep_bind, bind);
11555 gimple_bind_add_seq (dep_bind, tskred_olist);
11556 gimple_bind_add_seq (dep_bind, dep_olist);
11557 pop_gimplify_context (dep_bind);
11558 }
11559 }
11560
11561 /* Lower the GIMPLE_OMP_TARGET in the current statement
11562 in GSI_P. CTX holds context information for the directive. */
11563
11564 static void
11565 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11566 {
11567 tree clauses;
11568 tree child_fn, t, c;
11569 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11570 gbind *tgt_bind, *bind, *dep_bind = NULL;
11571 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11572 location_t loc = gimple_location (stmt);
11573 bool offloaded, data_region;
11574 unsigned int map_cnt = 0;
11575
11576 offloaded = is_gimple_omp_offloaded (stmt);
11577 switch (gimple_omp_target_kind (stmt))
11578 {
11579 case GF_OMP_TARGET_KIND_REGION:
11580 case GF_OMP_TARGET_KIND_UPDATE:
11581 case GF_OMP_TARGET_KIND_ENTER_DATA:
11582 case GF_OMP_TARGET_KIND_EXIT_DATA:
11583 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11584 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11585 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11586 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11587 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11588 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11589 data_region = false;
11590 break;
11591 case GF_OMP_TARGET_KIND_DATA:
11592 case GF_OMP_TARGET_KIND_OACC_DATA:
11593 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11594 data_region = true;
11595 break;
11596 default:
11597 gcc_unreachable ();
11598 }
11599
11600 clauses = gimple_omp_target_clauses (stmt);
11601
11602 gimple_seq dep_ilist = NULL;
11603 gimple_seq dep_olist = NULL;
11604 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11605 {
11606 push_gimplify_context ();
11607 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11608 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11609 &dep_ilist, &dep_olist);
11610 }
11611
11612 tgt_bind = NULL;
11613 tgt_body = NULL;
11614 if (offloaded)
11615 {
11616 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11617 tgt_body = gimple_bind_body (tgt_bind);
11618 }
11619 else if (data_region)
11620 tgt_body = gimple_omp_body (stmt);
11621 child_fn = ctx->cb.dst_fn;
11622
11623 push_gimplify_context ();
11624 fplist = NULL;
11625
11626 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11627 switch (OMP_CLAUSE_CODE (c))
11628 {
11629 tree var, x;
11630
11631 default:
11632 break;
11633 case OMP_CLAUSE_MAP:
11634 #if CHECKING_P
11635 /* First check what we're prepared to handle in the following. */
11636 switch (OMP_CLAUSE_MAP_KIND (c))
11637 {
11638 case GOMP_MAP_ALLOC:
11639 case GOMP_MAP_TO:
11640 case GOMP_MAP_FROM:
11641 case GOMP_MAP_TOFROM:
11642 case GOMP_MAP_POINTER:
11643 case GOMP_MAP_TO_PSET:
11644 case GOMP_MAP_DELETE:
11645 case GOMP_MAP_RELEASE:
11646 case GOMP_MAP_ALWAYS_TO:
11647 case GOMP_MAP_ALWAYS_FROM:
11648 case GOMP_MAP_ALWAYS_TOFROM:
11649 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11650 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11651 case GOMP_MAP_STRUCT:
11652 case GOMP_MAP_ALWAYS_POINTER:
11653 case GOMP_MAP_ATTACH:
11654 case GOMP_MAP_DETACH:
11655 break;
11656 case GOMP_MAP_IF_PRESENT:
11657 case GOMP_MAP_FORCE_ALLOC:
11658 case GOMP_MAP_FORCE_TO:
11659 case GOMP_MAP_FORCE_FROM:
11660 case GOMP_MAP_FORCE_TOFROM:
11661 case GOMP_MAP_FORCE_PRESENT:
11662 case GOMP_MAP_FORCE_DEVICEPTR:
11663 case GOMP_MAP_DEVICE_RESIDENT:
11664 case GOMP_MAP_LINK:
11665 case GOMP_MAP_FORCE_DETACH:
11666 gcc_assert (is_gimple_omp_oacc (stmt));
11667 break;
11668 default:
11669 gcc_unreachable ();
11670 }
11671 #endif
11672 /* FALLTHRU */
11673 case OMP_CLAUSE_TO:
11674 case OMP_CLAUSE_FROM:
11675 oacc_firstprivate:
11676 var = OMP_CLAUSE_DECL (c);
11677 if (!DECL_P (var))
11678 {
11679 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11680 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11681 && (OMP_CLAUSE_MAP_KIND (c)
11682 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11683 map_cnt++;
11684 continue;
11685 }
11686
11687 if (DECL_SIZE (var)
11688 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11689 {
11690 tree var2 = DECL_VALUE_EXPR (var);
11691 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11692 var2 = TREE_OPERAND (var2, 0);
11693 gcc_assert (DECL_P (var2));
11694 var = var2;
11695 }
11696
11697 if (offloaded
11698 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11699 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11700 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11701 {
11702 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11703 {
11704 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11705 && varpool_node::get_create (var)->offloadable)
11706 continue;
11707
11708 tree type = build_pointer_type (TREE_TYPE (var));
11709 tree new_var = lookup_decl (var, ctx);
11710 x = create_tmp_var_raw (type, get_name (new_var));
11711 gimple_add_tmp_var (x);
11712 x = build_simple_mem_ref (x);
11713 SET_DECL_VALUE_EXPR (new_var, x);
11714 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11715 }
11716 continue;
11717 }
11718
11719 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11720 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11721 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
11722 && is_omp_target (stmt))
11723 {
11724 gcc_assert (maybe_lookup_field (c, ctx));
11725 map_cnt++;
11726 continue;
11727 }
11728
11729 if (!maybe_lookup_field (var, ctx))
11730 continue;
11731
11732 /* Don't remap compute constructs' reduction variables, because the
11733 intermediate result must be local to each gang. */
11734 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11735 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11736 {
11737 x = build_receiver_ref (var, true, ctx);
11738 tree new_var = lookup_decl (var, ctx);
11739
11740 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11741 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11742 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11743 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11744 x = build_simple_mem_ref (x);
11745 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11746 {
11747 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11748 if (omp_is_reference (new_var)
11749 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11750 || DECL_BY_REFERENCE (var)))
11751 {
11752 /* Create a local object to hold the instance
11753 value. */
11754 tree type = TREE_TYPE (TREE_TYPE (new_var));
11755 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11756 tree inst = create_tmp_var (type, id);
11757 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11758 x = build_fold_addr_expr (inst);
11759 }
11760 gimplify_assign (new_var, x, &fplist);
11761 }
11762 else if (DECL_P (new_var))
11763 {
11764 SET_DECL_VALUE_EXPR (new_var, x);
11765 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11766 }
11767 else
11768 gcc_unreachable ();
11769 }
11770 map_cnt++;
11771 break;
11772
11773 case OMP_CLAUSE_FIRSTPRIVATE:
11774 if (is_oacc_parallel_or_serial (ctx))
11775 goto oacc_firstprivate;
11776 map_cnt++;
11777 var = OMP_CLAUSE_DECL (c);
11778 if (!omp_is_reference (var)
11779 && !is_gimple_reg_type (TREE_TYPE (var)))
11780 {
11781 tree new_var = lookup_decl (var, ctx);
11782 if (is_variable_sized (var))
11783 {
11784 tree pvar = DECL_VALUE_EXPR (var);
11785 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11786 pvar = TREE_OPERAND (pvar, 0);
11787 gcc_assert (DECL_P (pvar));
11788 tree new_pvar = lookup_decl (pvar, ctx);
11789 x = build_fold_indirect_ref (new_pvar);
11790 TREE_THIS_NOTRAP (x) = 1;
11791 }
11792 else
11793 x = build_receiver_ref (var, true, ctx);
11794 SET_DECL_VALUE_EXPR (new_var, x);
11795 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11796 }
11797 break;
11798
11799 case OMP_CLAUSE_PRIVATE:
11800 if (is_gimple_omp_oacc (ctx->stmt))
11801 break;
11802 var = OMP_CLAUSE_DECL (c);
11803 if (is_variable_sized (var))
11804 {
11805 tree new_var = lookup_decl (var, ctx);
11806 tree pvar = DECL_VALUE_EXPR (var);
11807 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11808 pvar = TREE_OPERAND (pvar, 0);
11809 gcc_assert (DECL_P (pvar));
11810 tree new_pvar = lookup_decl (pvar, ctx);
11811 x = build_fold_indirect_ref (new_pvar);
11812 TREE_THIS_NOTRAP (x) = 1;
11813 SET_DECL_VALUE_EXPR (new_var, x);
11814 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11815 }
11816 break;
11817
11818 case OMP_CLAUSE_USE_DEVICE_PTR:
11819 case OMP_CLAUSE_USE_DEVICE_ADDR:
11820 case OMP_CLAUSE_IS_DEVICE_PTR:
11821 var = OMP_CLAUSE_DECL (c);
11822 map_cnt++;
11823 if (is_variable_sized (var))
11824 {
11825 tree new_var = lookup_decl (var, ctx);
11826 tree pvar = DECL_VALUE_EXPR (var);
11827 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11828 pvar = TREE_OPERAND (pvar, 0);
11829 gcc_assert (DECL_P (pvar));
11830 tree new_pvar = lookup_decl (pvar, ctx);
11831 x = build_fold_indirect_ref (new_pvar);
11832 TREE_THIS_NOTRAP (x) = 1;
11833 SET_DECL_VALUE_EXPR (new_var, x);
11834 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11835 }
11836 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11837 && !omp_is_reference (var)
11838 && !omp_is_allocatable_or_ptr (var)
11839 && !lang_hooks.decls.omp_array_data (var, true))
11840 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11841 {
11842 tree new_var = lookup_decl (var, ctx);
11843 tree type = build_pointer_type (TREE_TYPE (var));
11844 x = create_tmp_var_raw (type, get_name (new_var));
11845 gimple_add_tmp_var (x);
11846 x = build_simple_mem_ref (x);
11847 SET_DECL_VALUE_EXPR (new_var, x);
11848 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11849 }
11850 else
11851 {
11852 tree new_var = lookup_decl (var, ctx);
11853 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11854 gimple_add_tmp_var (x);
11855 SET_DECL_VALUE_EXPR (new_var, x);
11856 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11857 }
11858 break;
11859 }
11860
11861 if (offloaded)
11862 {
11863 target_nesting_level++;
11864 lower_omp (&tgt_body, ctx);
11865 target_nesting_level--;
11866 }
11867 else if (data_region)
11868 lower_omp (&tgt_body, ctx);
11869
11870 if (offloaded)
11871 {
11872 /* Declare all the variables created by mapping and the variables
11873 declared in the scope of the target body. */
11874 record_vars_into (ctx->block_vars, child_fn);
11875 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11876 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11877 }
11878
11879 olist = NULL;
11880 ilist = NULL;
11881 if (ctx->record_type)
11882 {
11883 ctx->sender_decl
11884 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11885 DECL_NAMELESS (ctx->sender_decl) = 1;
11886 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11887 t = make_tree_vec (3);
11888 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11889 TREE_VEC_ELT (t, 1)
11890 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11891 ".omp_data_sizes");
11892 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11893 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11894 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11895 tree tkind_type = short_unsigned_type_node;
11896 int talign_shift = 8;
11897 TREE_VEC_ELT (t, 2)
11898 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11899 ".omp_data_kinds");
11900 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11901 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11902 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11903 gimple_omp_target_set_data_arg (stmt, t);
11904
11905 vec<constructor_elt, va_gc> *vsize;
11906 vec<constructor_elt, va_gc> *vkind;
11907 vec_alloc (vsize, map_cnt);
11908 vec_alloc (vkind, map_cnt);
11909 unsigned int map_idx = 0;
11910
11911 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11912 switch (OMP_CLAUSE_CODE (c))
11913 {
11914 tree ovar, nc, s, purpose, var, x, type;
11915 unsigned int talign;
11916
11917 default:
11918 break;
11919
11920 case OMP_CLAUSE_MAP:
11921 case OMP_CLAUSE_TO:
11922 case OMP_CLAUSE_FROM:
11923 oacc_firstprivate_map:
11924 nc = c;
11925 ovar = OMP_CLAUSE_DECL (c);
11926 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11927 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11928 || (OMP_CLAUSE_MAP_KIND (c)
11929 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11930 break;
11931 if (!DECL_P (ovar))
11932 {
11933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11934 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11935 {
11936 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11937 == get_base_address (ovar));
11938 nc = OMP_CLAUSE_CHAIN (c);
11939 ovar = OMP_CLAUSE_DECL (nc);
11940 }
11941 else
11942 {
11943 tree x = build_sender_ref (ovar, ctx);
11944 tree v
11945 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11946 gimplify_assign (x, v, &ilist);
11947 nc = NULL_TREE;
11948 }
11949 }
11950 else
11951 {
11952 if (DECL_SIZE (ovar)
11953 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11954 {
11955 tree ovar2 = DECL_VALUE_EXPR (ovar);
11956 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11957 ovar2 = TREE_OPERAND (ovar2, 0);
11958 gcc_assert (DECL_P (ovar2));
11959 ovar = ovar2;
11960 }
11961 if (!maybe_lookup_field (ovar, ctx)
11962 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11963 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11964 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
11965 continue;
11966 }
11967
11968 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11969 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11970 talign = DECL_ALIGN_UNIT (ovar);
11971
11972 if (nc
11973 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11974 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11975 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
11976 && is_omp_target (stmt))
11977 {
11978 var = lookup_decl_in_outer_ctx (ovar, ctx);
11979 x = build_sender_ref (c, ctx);
11980 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
11981 }
11982 else if (nc)
11983 {
11984 var = lookup_decl_in_outer_ctx (ovar, ctx);
11985 x = build_sender_ref (ovar, ctx);
11986
11987 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11988 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11989 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11990 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11991 {
11992 gcc_assert (offloaded);
11993 tree avar
11994 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11995 mark_addressable (avar);
11996 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11997 talign = DECL_ALIGN_UNIT (avar);
11998 avar = build_fold_addr_expr (avar);
11999 gimplify_assign (x, avar, &ilist);
12000 }
12001 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12002 {
12003 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12004 if (!omp_is_reference (var))
12005 {
12006 if (is_gimple_reg (var)
12007 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12008 TREE_NO_WARNING (var) = 1;
12009 var = build_fold_addr_expr (var);
12010 }
12011 else
12012 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12013 gimplify_assign (x, var, &ilist);
12014 }
12015 else if (is_gimple_reg (var))
12016 {
12017 gcc_assert (offloaded);
12018 tree avar = create_tmp_var (TREE_TYPE (var));
12019 mark_addressable (avar);
12020 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
12021 if (GOMP_MAP_COPY_TO_P (map_kind)
12022 || map_kind == GOMP_MAP_POINTER
12023 || map_kind == GOMP_MAP_TO_PSET
12024 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12025 {
12026 /* If we need to initialize a temporary
12027 with VAR because it is not addressable, and
12028 the variable hasn't been initialized yet, then
12029 we'll get a warning for the store to avar.
12030 Don't warn in that case, the mapping might
12031 be implicit. */
12032 TREE_NO_WARNING (var) = 1;
12033 gimplify_assign (avar, var, &ilist);
12034 }
12035 avar = build_fold_addr_expr (avar);
12036 gimplify_assign (x, avar, &ilist);
12037 if ((GOMP_MAP_COPY_FROM_P (map_kind)
12038 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12039 && !TYPE_READONLY (TREE_TYPE (var)))
12040 {
12041 x = unshare_expr (x);
12042 x = build_simple_mem_ref (x);
12043 gimplify_assign (var, x, &olist);
12044 }
12045 }
12046 else
12047 {
12048 /* While MAP is handled explicitly by the FE,
12049 for 'target update', only the identified is passed. */
12050 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
12051 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
12052 && (omp_is_allocatable_or_ptr (var)
12053 && omp_check_optional_argument (var, false)))
12054 var = build_fold_indirect_ref (var);
12055 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
12056 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
12057 || (!omp_is_allocatable_or_ptr (var)
12058 && !omp_check_optional_argument (var, false)))
12059 var = build_fold_addr_expr (var);
12060 gimplify_assign (x, var, &ilist);
12061 }
12062 }
12063 s = NULL_TREE;
12064 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12065 {
12066 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12067 s = TREE_TYPE (ovar);
12068 if (TREE_CODE (s) == REFERENCE_TYPE
12069 || omp_check_optional_argument (ovar, false))
12070 s = TREE_TYPE (s);
12071 s = TYPE_SIZE_UNIT (s);
12072 }
12073 else
12074 s = OMP_CLAUSE_SIZE (c);
12075 if (s == NULL_TREE)
12076 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12077 s = fold_convert (size_type_node, s);
12078 purpose = size_int (map_idx++);
12079 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12080 if (TREE_CODE (s) != INTEGER_CST)
12081 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12082
12083 unsigned HOST_WIDE_INT tkind, tkind_zero;
12084 switch (OMP_CLAUSE_CODE (c))
12085 {
12086 case OMP_CLAUSE_MAP:
12087 tkind = OMP_CLAUSE_MAP_KIND (c);
12088 tkind_zero = tkind;
12089 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
12090 switch (tkind)
12091 {
12092 case GOMP_MAP_ALLOC:
12093 case GOMP_MAP_IF_PRESENT:
12094 case GOMP_MAP_TO:
12095 case GOMP_MAP_FROM:
12096 case GOMP_MAP_TOFROM:
12097 case GOMP_MAP_ALWAYS_TO:
12098 case GOMP_MAP_ALWAYS_FROM:
12099 case GOMP_MAP_ALWAYS_TOFROM:
12100 case GOMP_MAP_RELEASE:
12101 case GOMP_MAP_FORCE_TO:
12102 case GOMP_MAP_FORCE_FROM:
12103 case GOMP_MAP_FORCE_TOFROM:
12104 case GOMP_MAP_FORCE_PRESENT:
12105 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
12106 break;
12107 case GOMP_MAP_DELETE:
12108 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
12109 default:
12110 break;
12111 }
12112 if (tkind_zero != tkind)
12113 {
12114 if (integer_zerop (s))
12115 tkind = tkind_zero;
12116 else if (integer_nonzerop (s))
12117 tkind_zero = tkind;
12118 }
12119 break;
12120 case OMP_CLAUSE_FIRSTPRIVATE:
12121 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12122 tkind = GOMP_MAP_TO;
12123 tkind_zero = tkind;
12124 break;
12125 case OMP_CLAUSE_TO:
12126 tkind = GOMP_MAP_TO;
12127 tkind_zero = tkind;
12128 break;
12129 case OMP_CLAUSE_FROM:
12130 tkind = GOMP_MAP_FROM;
12131 tkind_zero = tkind;
12132 break;
12133 default:
12134 gcc_unreachable ();
12135 }
12136 gcc_checking_assert (tkind
12137 < (HOST_WIDE_INT_C (1U) << talign_shift));
12138 gcc_checking_assert (tkind_zero
12139 < (HOST_WIDE_INT_C (1U) << talign_shift));
12140 talign = ceil_log2 (talign);
12141 tkind |= talign << talign_shift;
12142 tkind_zero |= talign << talign_shift;
12143 gcc_checking_assert (tkind
12144 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12145 gcc_checking_assert (tkind_zero
12146 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12147 if (tkind == tkind_zero)
12148 x = build_int_cstu (tkind_type, tkind);
12149 else
12150 {
12151 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12152 x = build3 (COND_EXPR, tkind_type,
12153 fold_build2 (EQ_EXPR, boolean_type_node,
12154 unshare_expr (s), size_zero_node),
12155 build_int_cstu (tkind_type, tkind_zero),
12156 build_int_cstu (tkind_type, tkind));
12157 }
12158 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12159 if (nc && nc != c)
12160 c = nc;
12161 break;
12162
12163 case OMP_CLAUSE_FIRSTPRIVATE:
12164 if (is_oacc_parallel_or_serial (ctx))
12165 goto oacc_firstprivate_map;
12166 ovar = OMP_CLAUSE_DECL (c);
12167 if (omp_is_reference (ovar))
12168 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12169 else
12170 talign = DECL_ALIGN_UNIT (ovar);
12171 var = lookup_decl_in_outer_ctx (ovar, ctx);
12172 x = build_sender_ref (ovar, ctx);
12173 tkind = GOMP_MAP_FIRSTPRIVATE;
12174 type = TREE_TYPE (ovar);
12175 if (omp_is_reference (ovar))
12176 type = TREE_TYPE (type);
12177 if ((INTEGRAL_TYPE_P (type)
12178 && TYPE_PRECISION (type) <= POINTER_SIZE)
12179 || TREE_CODE (type) == POINTER_TYPE)
12180 {
12181 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12182 tree t = var;
12183 if (omp_is_reference (var))
12184 t = build_simple_mem_ref (var);
12185 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12186 TREE_NO_WARNING (var) = 1;
12187 if (TREE_CODE (type) != POINTER_TYPE)
12188 t = fold_convert (pointer_sized_int_node, t);
12189 t = fold_convert (TREE_TYPE (x), t);
12190 gimplify_assign (x, t, &ilist);
12191 }
12192 else if (omp_is_reference (var))
12193 gimplify_assign (x, var, &ilist);
12194 else if (is_gimple_reg (var))
12195 {
12196 tree avar = create_tmp_var (TREE_TYPE (var));
12197 mark_addressable (avar);
12198 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12199 TREE_NO_WARNING (var) = 1;
12200 gimplify_assign (avar, var, &ilist);
12201 avar = build_fold_addr_expr (avar);
12202 gimplify_assign (x, avar, &ilist);
12203 }
12204 else
12205 {
12206 var = build_fold_addr_expr (var);
12207 gimplify_assign (x, var, &ilist);
12208 }
12209 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12210 s = size_int (0);
12211 else if (omp_is_reference (ovar))
12212 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12213 else
12214 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12215 s = fold_convert (size_type_node, s);
12216 purpose = size_int (map_idx++);
12217 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12218 if (TREE_CODE (s) != INTEGER_CST)
12219 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12220
12221 gcc_checking_assert (tkind
12222 < (HOST_WIDE_INT_C (1U) << talign_shift));
12223 talign = ceil_log2 (talign);
12224 tkind |= talign << talign_shift;
12225 gcc_checking_assert (tkind
12226 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12227 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12228 build_int_cstu (tkind_type, tkind));
12229 break;
12230
12231 case OMP_CLAUSE_USE_DEVICE_PTR:
12232 case OMP_CLAUSE_USE_DEVICE_ADDR:
12233 case OMP_CLAUSE_IS_DEVICE_PTR:
12234 ovar = OMP_CLAUSE_DECL (c);
12235 var = lookup_decl_in_outer_ctx (ovar, ctx);
12236
12237 if (lang_hooks.decls.omp_array_data (ovar, true))
12238 {
12239 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12240 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12241 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12242 }
12243 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12244 {
12245 tkind = GOMP_MAP_USE_DEVICE_PTR;
12246 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12247 }
12248 else
12249 {
12250 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12251 x = build_sender_ref (ovar, ctx);
12252 }
12253
12254 if (is_gimple_omp_oacc (ctx->stmt))
12255 {
12256 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12257
12258 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12259 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12260 }
12261
12262 type = TREE_TYPE (ovar);
12263 if (lang_hooks.decls.omp_array_data (ovar, true))
12264 var = lang_hooks.decls.omp_array_data (ovar, false);
12265 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12266 && !omp_is_reference (ovar)
12267 && !omp_is_allocatable_or_ptr (ovar))
12268 || TREE_CODE (type) == ARRAY_TYPE)
12269 var = build_fold_addr_expr (var);
12270 else
12271 {
12272 if (omp_is_reference (ovar)
12273 || omp_check_optional_argument (ovar, false)
12274 || omp_is_allocatable_or_ptr (ovar))
12275 {
12276 type = TREE_TYPE (type);
12277 if (TREE_CODE (type) != ARRAY_TYPE
12278 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12279 && !omp_is_allocatable_or_ptr (ovar))
12280 || (omp_is_reference (ovar)
12281 && omp_is_allocatable_or_ptr (ovar))))
12282 var = build_simple_mem_ref (var);
12283 var = fold_convert (TREE_TYPE (x), var);
12284 }
12285 }
12286 tree present;
12287 present = omp_check_optional_argument (ovar, true);
12288 if (present)
12289 {
12290 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12291 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12292 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12293 tree new_x = unshare_expr (x);
12294 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12295 fb_rvalue);
12296 gcond *cond = gimple_build_cond_from_tree (present,
12297 notnull_label,
12298 null_label);
12299 gimple_seq_add_stmt (&ilist, cond);
12300 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12301 gimplify_assign (new_x, null_pointer_node, &ilist);
12302 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12303 gimple_seq_add_stmt (&ilist,
12304 gimple_build_label (notnull_label));
12305 gimplify_assign (x, var, &ilist);
12306 gimple_seq_add_stmt (&ilist,
12307 gimple_build_label (opt_arg_label));
12308 }
12309 else
12310 gimplify_assign (x, var, &ilist);
12311 s = size_int (0);
12312 purpose = size_int (map_idx++);
12313 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12314 gcc_checking_assert (tkind
12315 < (HOST_WIDE_INT_C (1U) << talign_shift));
12316 gcc_checking_assert (tkind
12317 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12318 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12319 build_int_cstu (tkind_type, tkind));
12320 break;
12321 }
12322
12323 gcc_assert (map_idx == map_cnt);
12324
12325 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12326 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12327 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12328 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12329 for (int i = 1; i <= 2; i++)
12330 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12331 {
12332 gimple_seq initlist = NULL;
12333 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12334 TREE_VEC_ELT (t, i)),
12335 &initlist, true, NULL_TREE);
12336 gimple_seq_add_seq (&ilist, initlist);
12337
12338 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12339 gimple_seq_add_stmt (&olist,
12340 gimple_build_assign (TREE_VEC_ELT (t, i),
12341 clobber));
12342 }
12343
12344 tree clobber = build_clobber (ctx->record_type);
12345 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12346 clobber));
12347 }
12348
12349 /* Once all the expansions are done, sequence all the different
12350 fragments inside gimple_omp_body. */
12351
12352 new_body = NULL;
12353
12354 if (offloaded
12355 && ctx->record_type)
12356 {
12357 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12358 /* fixup_child_record_type might have changed receiver_decl's type. */
12359 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12360 gimple_seq_add_stmt (&new_body,
12361 gimple_build_assign (ctx->receiver_decl, t));
12362 }
12363 gimple_seq_add_seq (&new_body, fplist);
12364
12365 if (offloaded || data_region)
12366 {
12367 tree prev = NULL_TREE;
12368 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12369 switch (OMP_CLAUSE_CODE (c))
12370 {
12371 tree var, x;
12372 default:
12373 break;
12374 case OMP_CLAUSE_FIRSTPRIVATE:
12375 if (is_gimple_omp_oacc (ctx->stmt))
12376 break;
12377 var = OMP_CLAUSE_DECL (c);
12378 if (omp_is_reference (var)
12379 || is_gimple_reg_type (TREE_TYPE (var)))
12380 {
12381 tree new_var = lookup_decl (var, ctx);
12382 tree type;
12383 type = TREE_TYPE (var);
12384 if (omp_is_reference (var))
12385 type = TREE_TYPE (type);
12386 if ((INTEGRAL_TYPE_P (type)
12387 && TYPE_PRECISION (type) <= POINTER_SIZE)
12388 || TREE_CODE (type) == POINTER_TYPE)
12389 {
12390 x = build_receiver_ref (var, false, ctx);
12391 if (TREE_CODE (type) != POINTER_TYPE)
12392 x = fold_convert (pointer_sized_int_node, x);
12393 x = fold_convert (type, x);
12394 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12395 fb_rvalue);
12396 if (omp_is_reference (var))
12397 {
12398 tree v = create_tmp_var_raw (type, get_name (var));
12399 gimple_add_tmp_var (v);
12400 TREE_ADDRESSABLE (v) = 1;
12401 gimple_seq_add_stmt (&new_body,
12402 gimple_build_assign (v, x));
12403 x = build_fold_addr_expr (v);
12404 }
12405 gimple_seq_add_stmt (&new_body,
12406 gimple_build_assign (new_var, x));
12407 }
12408 else
12409 {
12410 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12411 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12412 fb_rvalue);
12413 gimple_seq_add_stmt (&new_body,
12414 gimple_build_assign (new_var, x));
12415 }
12416 }
12417 else if (is_variable_sized (var))
12418 {
12419 tree pvar = DECL_VALUE_EXPR (var);
12420 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12421 pvar = TREE_OPERAND (pvar, 0);
12422 gcc_assert (DECL_P (pvar));
12423 tree new_var = lookup_decl (pvar, ctx);
12424 x = build_receiver_ref (var, false, ctx);
12425 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12426 gimple_seq_add_stmt (&new_body,
12427 gimple_build_assign (new_var, x));
12428 }
12429 break;
12430 case OMP_CLAUSE_PRIVATE:
12431 if (is_gimple_omp_oacc (ctx->stmt))
12432 break;
12433 var = OMP_CLAUSE_DECL (c);
12434 if (omp_is_reference (var))
12435 {
12436 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12437 tree new_var = lookup_decl (var, ctx);
12438 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12439 if (TREE_CONSTANT (x))
12440 {
12441 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12442 get_name (var));
12443 gimple_add_tmp_var (x);
12444 TREE_ADDRESSABLE (x) = 1;
12445 x = build_fold_addr_expr_loc (clause_loc, x);
12446 }
12447 else
12448 break;
12449
12450 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12451 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12452 gimple_seq_add_stmt (&new_body,
12453 gimple_build_assign (new_var, x));
12454 }
12455 break;
12456 case OMP_CLAUSE_USE_DEVICE_PTR:
12457 case OMP_CLAUSE_USE_DEVICE_ADDR:
12458 case OMP_CLAUSE_IS_DEVICE_PTR:
12459 tree new_var;
12460 gimple_seq assign_body;
12461 bool is_array_data;
12462 bool do_optional_check;
12463 assign_body = NULL;
12464 do_optional_check = false;
12465 var = OMP_CLAUSE_DECL (c);
12466 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12467
12468 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12469 x = build_sender_ref (is_array_data
12470 ? (splay_tree_key) &DECL_NAME (var)
12471 : (splay_tree_key) &DECL_UID (var), ctx);
12472 else
12473 x = build_receiver_ref (var, false, ctx);
12474
12475 if (is_array_data)
12476 {
12477 bool is_ref = omp_is_reference (var);
12478 do_optional_check = true;
12479 /* First, we copy the descriptor data from the host; then
12480 we update its data to point to the target address. */
12481 new_var = lookup_decl (var, ctx);
12482 new_var = DECL_VALUE_EXPR (new_var);
12483 tree v = new_var;
12484
12485 if (is_ref)
12486 {
12487 var = build_fold_indirect_ref (var);
12488 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12489 fb_rvalue);
12490 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12491 gimple_add_tmp_var (v);
12492 TREE_ADDRESSABLE (v) = 1;
12493 gimple_seq_add_stmt (&assign_body,
12494 gimple_build_assign (v, var));
12495 tree rhs = build_fold_addr_expr (v);
12496 gimple_seq_add_stmt (&assign_body,
12497 gimple_build_assign (new_var, rhs));
12498 }
12499 else
12500 gimple_seq_add_stmt (&assign_body,
12501 gimple_build_assign (new_var, var));
12502
12503 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12504 gcc_assert (v2);
12505 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12506 gimple_seq_add_stmt (&assign_body,
12507 gimple_build_assign (v2, x));
12508 }
12509 else if (is_variable_sized (var))
12510 {
12511 tree pvar = DECL_VALUE_EXPR (var);
12512 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12513 pvar = TREE_OPERAND (pvar, 0);
12514 gcc_assert (DECL_P (pvar));
12515 new_var = lookup_decl (pvar, ctx);
12516 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12517 gimple_seq_add_stmt (&assign_body,
12518 gimple_build_assign (new_var, x));
12519 }
12520 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12521 && !omp_is_reference (var)
12522 && !omp_is_allocatable_or_ptr (var))
12523 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12524 {
12525 new_var = lookup_decl (var, ctx);
12526 new_var = DECL_VALUE_EXPR (new_var);
12527 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12528 new_var = TREE_OPERAND (new_var, 0);
12529 gcc_assert (DECL_P (new_var));
12530 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12531 gimple_seq_add_stmt (&assign_body,
12532 gimple_build_assign (new_var, x));
12533 }
12534 else
12535 {
12536 tree type = TREE_TYPE (var);
12537 new_var = lookup_decl (var, ctx);
12538 if (omp_is_reference (var))
12539 {
12540 type = TREE_TYPE (type);
12541 if (TREE_CODE (type) != ARRAY_TYPE
12542 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12543 || (omp_is_reference (var)
12544 && omp_is_allocatable_or_ptr (var))))
12545 {
12546 tree v = create_tmp_var_raw (type, get_name (var));
12547 gimple_add_tmp_var (v);
12548 TREE_ADDRESSABLE (v) = 1;
12549 x = fold_convert (type, x);
12550 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12551 fb_rvalue);
12552 gimple_seq_add_stmt (&assign_body,
12553 gimple_build_assign (v, x));
12554 x = build_fold_addr_expr (v);
12555 do_optional_check = true;
12556 }
12557 }
12558 new_var = DECL_VALUE_EXPR (new_var);
12559 x = fold_convert (TREE_TYPE (new_var), x);
12560 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12561 gimple_seq_add_stmt (&assign_body,
12562 gimple_build_assign (new_var, x));
12563 }
12564 tree present;
12565 present = (do_optional_check
12566 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12567 : NULL_TREE);
12568 if (present)
12569 {
12570 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12571 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12572 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12573 glabel *null_glabel = gimple_build_label (null_label);
12574 glabel *notnull_glabel = gimple_build_label (notnull_label);
12575 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12576 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12577 fb_rvalue);
12578 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12579 fb_rvalue);
12580 gcond *cond = gimple_build_cond_from_tree (present,
12581 notnull_label,
12582 null_label);
12583 gimple_seq_add_stmt (&new_body, cond);
12584 gimple_seq_add_stmt (&new_body, null_glabel);
12585 gimplify_assign (new_var, null_pointer_node, &new_body);
12586 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12587 gimple_seq_add_stmt (&new_body, notnull_glabel);
12588 gimple_seq_add_seq (&new_body, assign_body);
12589 gimple_seq_add_stmt (&new_body,
12590 gimple_build_label (opt_arg_label));
12591 }
12592 else
12593 gimple_seq_add_seq (&new_body, assign_body);
12594 break;
12595 }
12596 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12597 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12598 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12599 or references to VLAs. */
12600 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12601 switch (OMP_CLAUSE_CODE (c))
12602 {
12603 tree var;
12604 default:
12605 break;
12606 case OMP_CLAUSE_MAP:
12607 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12608 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12609 {
12610 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12611 poly_int64 offset = 0;
12612 gcc_assert (prev);
12613 var = OMP_CLAUSE_DECL (c);
12614 if (DECL_P (var)
12615 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12616 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12617 ctx))
12618 && varpool_node::get_create (var)->offloadable)
12619 break;
12620 if (TREE_CODE (var) == INDIRECT_REF
12621 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12622 var = TREE_OPERAND (var, 0);
12623 if (TREE_CODE (var) == COMPONENT_REF)
12624 {
12625 var = get_addr_base_and_unit_offset (var, &offset);
12626 gcc_assert (var != NULL_TREE && DECL_P (var));
12627 }
12628 else if (DECL_SIZE (var)
12629 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12630 {
12631 tree var2 = DECL_VALUE_EXPR (var);
12632 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12633 var2 = TREE_OPERAND (var2, 0);
12634 gcc_assert (DECL_P (var2));
12635 var = var2;
12636 }
12637 tree new_var = lookup_decl (var, ctx), x;
12638 tree type = TREE_TYPE (new_var);
12639 bool is_ref;
12640 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12641 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12642 == COMPONENT_REF))
12643 {
12644 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12645 is_ref = true;
12646 new_var = build2 (MEM_REF, type,
12647 build_fold_addr_expr (new_var),
12648 build_int_cst (build_pointer_type (type),
12649 offset));
12650 }
12651 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12652 {
12653 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12654 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12655 new_var = build2 (MEM_REF, type,
12656 build_fold_addr_expr (new_var),
12657 build_int_cst (build_pointer_type (type),
12658 offset));
12659 }
12660 else
12661 is_ref = omp_is_reference (var);
12662 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12663 is_ref = false;
12664 bool ref_to_array = false;
12665 if (is_ref)
12666 {
12667 type = TREE_TYPE (type);
12668 if (TREE_CODE (type) == ARRAY_TYPE)
12669 {
12670 type = build_pointer_type (type);
12671 ref_to_array = true;
12672 }
12673 }
12674 else if (TREE_CODE (type) == ARRAY_TYPE)
12675 {
12676 tree decl2 = DECL_VALUE_EXPR (new_var);
12677 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12678 decl2 = TREE_OPERAND (decl2, 0);
12679 gcc_assert (DECL_P (decl2));
12680 new_var = decl2;
12681 type = TREE_TYPE (new_var);
12682 }
12683 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12684 x = fold_convert_loc (clause_loc, type, x);
12685 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12686 {
12687 tree bias = OMP_CLAUSE_SIZE (c);
12688 if (DECL_P (bias))
12689 bias = lookup_decl (bias, ctx);
12690 bias = fold_convert_loc (clause_loc, sizetype, bias);
12691 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12692 bias);
12693 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12694 TREE_TYPE (x), x, bias);
12695 }
12696 if (ref_to_array)
12697 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12698 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12699 if (is_ref && !ref_to_array)
12700 {
12701 tree t = create_tmp_var_raw (type, get_name (var));
12702 gimple_add_tmp_var (t);
12703 TREE_ADDRESSABLE (t) = 1;
12704 gimple_seq_add_stmt (&new_body,
12705 gimple_build_assign (t, x));
12706 x = build_fold_addr_expr_loc (clause_loc, t);
12707 }
12708 gimple_seq_add_stmt (&new_body,
12709 gimple_build_assign (new_var, x));
12710 prev = NULL_TREE;
12711 }
12712 else if (OMP_CLAUSE_CHAIN (c)
12713 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12714 == OMP_CLAUSE_MAP
12715 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12716 == GOMP_MAP_FIRSTPRIVATE_POINTER
12717 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12718 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12719 prev = c;
12720 break;
12721 case OMP_CLAUSE_PRIVATE:
12722 var = OMP_CLAUSE_DECL (c);
12723 if (is_variable_sized (var))
12724 {
12725 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12726 tree new_var = lookup_decl (var, ctx);
12727 tree pvar = DECL_VALUE_EXPR (var);
12728 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12729 pvar = TREE_OPERAND (pvar, 0);
12730 gcc_assert (DECL_P (pvar));
12731 tree new_pvar = lookup_decl (pvar, ctx);
12732 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12733 tree al = size_int (DECL_ALIGN (var));
12734 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12735 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12736 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12737 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12738 gimple_seq_add_stmt (&new_body,
12739 gimple_build_assign (new_pvar, x));
12740 }
12741 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12742 {
12743 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12744 tree new_var = lookup_decl (var, ctx);
12745 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12746 if (TREE_CONSTANT (x))
12747 break;
12748 else
12749 {
12750 tree atmp
12751 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12752 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12753 tree al = size_int (TYPE_ALIGN (rtype));
12754 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12755 }
12756
12757 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12758 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12759 gimple_seq_add_stmt (&new_body,
12760 gimple_build_assign (new_var, x));
12761 }
12762 break;
12763 }
12764
12765 gimple_seq fork_seq = NULL;
12766 gimple_seq join_seq = NULL;
12767
12768 if (is_oacc_parallel_or_serial (ctx))
12769 {
12770 /* If there are reductions on the offloaded region itself, treat
12771 them as a dummy GANG loop. */
12772 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12773
12774 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12775 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12776 }
12777
12778 gimple_seq_add_seq (&new_body, fork_seq);
12779 gimple_seq_add_seq (&new_body, tgt_body);
12780 gimple_seq_add_seq (&new_body, join_seq);
12781
12782 if (offloaded)
12783 new_body = maybe_catch_exception (new_body);
12784
12785 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12786 gimple_omp_set_body (stmt, new_body);
12787 }
12788
12789 bind = gimple_build_bind (NULL, NULL,
12790 tgt_bind ? gimple_bind_block (tgt_bind)
12791 : NULL_TREE);
12792 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12793 gimple_bind_add_seq (bind, ilist);
12794 gimple_bind_add_stmt (bind, stmt);
12795 gimple_bind_add_seq (bind, olist);
12796
12797 pop_gimplify_context (NULL);
12798
12799 if (dep_bind)
12800 {
12801 gimple_bind_add_seq (dep_bind, dep_ilist);
12802 gimple_bind_add_stmt (dep_bind, bind);
12803 gimple_bind_add_seq (dep_bind, dep_olist);
12804 pop_gimplify_context (dep_bind);
12805 }
12806 }
12807
12808 /* Expand code for an OpenMP teams directive. */
12809
12810 static void
12811 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12812 {
12813 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12814 push_gimplify_context ();
12815
12816 tree block = make_node (BLOCK);
12817 gbind *bind = gimple_build_bind (NULL, NULL, block);
12818 gsi_replace (gsi_p, bind, true);
12819 gimple_seq bind_body = NULL;
12820 gimple_seq dlist = NULL;
12821 gimple_seq olist = NULL;
12822
12823 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12824 OMP_CLAUSE_NUM_TEAMS);
12825 if (num_teams == NULL_TREE)
12826 num_teams = build_int_cst (unsigned_type_node, 0);
12827 else
12828 {
12829 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12830 num_teams = fold_convert (unsigned_type_node, num_teams);
12831 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12832 }
12833 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12834 OMP_CLAUSE_THREAD_LIMIT);
12835 if (thread_limit == NULL_TREE)
12836 thread_limit = build_int_cst (unsigned_type_node, 0);
12837 else
12838 {
12839 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12840 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12841 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12842 fb_rvalue);
12843 }
12844
12845 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12846 &bind_body, &dlist, ctx, NULL);
12847 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12848 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12849 NULL, ctx);
12850 gimple_seq_add_stmt (&bind_body, teams_stmt);
12851
12852 location_t loc = gimple_location (teams_stmt);
12853 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12854 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12855 gimple_set_location (call, loc);
12856 gimple_seq_add_stmt (&bind_body, call);
12857
12858 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12859 gimple_omp_set_body (teams_stmt, NULL);
12860 gimple_seq_add_seq (&bind_body, olist);
12861 gimple_seq_add_seq (&bind_body, dlist);
12862 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12863 gimple_bind_set_body (bind, bind_body);
12864
12865 pop_gimplify_context (bind);
12866
12867 gimple_bind_append_vars (bind, ctx->block_vars);
12868 BLOCK_VARS (block) = ctx->block_vars;
12869 if (BLOCK_VARS (block))
12870 TREE_USED (block) = 1;
12871 }
12872
12873 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12874 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12875 of OMP context, but with task_shared_vars set. */
12876
12877 static tree
12878 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12879 void *data)
12880 {
12881 tree t = *tp;
12882
12883 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12884 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12885 return t;
12886
12887 if (task_shared_vars
12888 && DECL_P (t)
12889 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12890 return t;
12891
12892 /* If a global variable has been privatized, TREE_CONSTANT on
12893 ADDR_EXPR might be wrong. */
12894 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12895 recompute_tree_invariant_for_addr_expr (t);
12896
12897 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12898 return NULL_TREE;
12899 }
12900
12901 /* Data to be communicated between lower_omp_regimplify_operands and
12902 lower_omp_regimplify_operands_p. */
12903
12904 struct lower_omp_regimplify_operands_data
12905 {
12906 omp_context *ctx;
12907 vec<tree> *decls;
12908 };
12909
12910 /* Helper function for lower_omp_regimplify_operands. Find
12911 omp_member_access_dummy_var vars and adjust temporarily their
12912 DECL_VALUE_EXPRs if needed. */
12913
12914 static tree
12915 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12916 void *data)
12917 {
12918 tree t = omp_member_access_dummy_var (*tp);
12919 if (t)
12920 {
12921 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12922 lower_omp_regimplify_operands_data *ldata
12923 = (lower_omp_regimplify_operands_data *) wi->info;
12924 tree o = maybe_lookup_decl (t, ldata->ctx);
12925 if (o != t)
12926 {
12927 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12928 ldata->decls->safe_push (*tp);
12929 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12930 SET_DECL_VALUE_EXPR (*tp, v);
12931 }
12932 }
12933 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12934 return NULL_TREE;
12935 }
12936
12937 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12938 of omp_member_access_dummy_var vars during regimplification. */
12939
12940 static void
12941 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12942 gimple_stmt_iterator *gsi_p)
12943 {
12944 auto_vec<tree, 10> decls;
12945 if (ctx)
12946 {
12947 struct walk_stmt_info wi;
12948 memset (&wi, '\0', sizeof (wi));
12949 struct lower_omp_regimplify_operands_data data;
12950 data.ctx = ctx;
12951 data.decls = &decls;
12952 wi.info = &data;
12953 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12954 }
12955 gimple_regimplify_operands (stmt, gsi_p);
12956 while (!decls.is_empty ())
12957 {
12958 tree t = decls.pop ();
12959 tree v = decls.pop ();
12960 SET_DECL_VALUE_EXPR (t, v);
12961 }
12962 }
12963
12964 static void
12965 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12966 {
12967 gimple *stmt = gsi_stmt (*gsi_p);
12968 struct walk_stmt_info wi;
12969 gcall *call_stmt;
12970
12971 if (gimple_has_location (stmt))
12972 input_location = gimple_location (stmt);
12973
12974 if (task_shared_vars)
12975 memset (&wi, '\0', sizeof (wi));
12976
12977 /* If we have issued syntax errors, avoid doing any heavy lifting.
12978 Just replace the OMP directives with a NOP to avoid
12979 confusing RTL expansion. */
12980 if (seen_error () && is_gimple_omp (stmt))
12981 {
12982 gsi_replace (gsi_p, gimple_build_nop (), true);
12983 return;
12984 }
12985
12986 switch (gimple_code (stmt))
12987 {
12988 case GIMPLE_COND:
12989 {
12990 gcond *cond_stmt = as_a <gcond *> (stmt);
12991 if ((ctx || task_shared_vars)
12992 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12993 lower_omp_regimplify_p,
12994 ctx ? NULL : &wi, NULL)
12995 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12996 lower_omp_regimplify_p,
12997 ctx ? NULL : &wi, NULL)))
12998 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12999 }
13000 break;
13001 case GIMPLE_CATCH:
13002 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
13003 break;
13004 case GIMPLE_EH_FILTER:
13005 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
13006 break;
13007 case GIMPLE_TRY:
13008 lower_omp (gimple_try_eval_ptr (stmt), ctx);
13009 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
13010 break;
13011 case GIMPLE_TRANSACTION:
13012 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
13013 ctx);
13014 break;
13015 case GIMPLE_BIND:
13016 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
13017 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
13018 break;
13019 case GIMPLE_OMP_PARALLEL:
13020 case GIMPLE_OMP_TASK:
13021 ctx = maybe_lookup_ctx (stmt);
13022 gcc_assert (ctx);
13023 if (ctx->cancellable)
13024 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13025 lower_omp_taskreg (gsi_p, ctx);
13026 break;
13027 case GIMPLE_OMP_FOR:
13028 ctx = maybe_lookup_ctx (stmt);
13029 gcc_assert (ctx);
13030 if (ctx->cancellable)
13031 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13032 lower_omp_for (gsi_p, ctx);
13033 break;
13034 case GIMPLE_OMP_SECTIONS:
13035 ctx = maybe_lookup_ctx (stmt);
13036 gcc_assert (ctx);
13037 if (ctx->cancellable)
13038 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13039 lower_omp_sections (gsi_p, ctx);
13040 break;
13041 case GIMPLE_OMP_SINGLE:
13042 ctx = maybe_lookup_ctx (stmt);
13043 gcc_assert (ctx);
13044 lower_omp_single (gsi_p, ctx);
13045 break;
13046 case GIMPLE_OMP_MASTER:
13047 ctx = maybe_lookup_ctx (stmt);
13048 gcc_assert (ctx);
13049 lower_omp_master (gsi_p, ctx);
13050 break;
13051 case GIMPLE_OMP_TASKGROUP:
13052 ctx = maybe_lookup_ctx (stmt);
13053 gcc_assert (ctx);
13054 lower_omp_taskgroup (gsi_p, ctx);
13055 break;
13056 case GIMPLE_OMP_ORDERED:
13057 ctx = maybe_lookup_ctx (stmt);
13058 gcc_assert (ctx);
13059 lower_omp_ordered (gsi_p, ctx);
13060 break;
13061 case GIMPLE_OMP_SCAN:
13062 ctx = maybe_lookup_ctx (stmt);
13063 gcc_assert (ctx);
13064 lower_omp_scan (gsi_p, ctx);
13065 break;
13066 case GIMPLE_OMP_CRITICAL:
13067 ctx = maybe_lookup_ctx (stmt);
13068 gcc_assert (ctx);
13069 lower_omp_critical (gsi_p, ctx);
13070 break;
13071 case GIMPLE_OMP_ATOMIC_LOAD:
13072 if ((ctx || task_shared_vars)
13073 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13074 as_a <gomp_atomic_load *> (stmt)),
13075 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
13076 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13077 break;
13078 case GIMPLE_OMP_TARGET:
13079 ctx = maybe_lookup_ctx (stmt);
13080 gcc_assert (ctx);
13081 lower_omp_target (gsi_p, ctx);
13082 break;
13083 case GIMPLE_OMP_TEAMS:
13084 ctx = maybe_lookup_ctx (stmt);
13085 gcc_assert (ctx);
13086 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
13087 lower_omp_taskreg (gsi_p, ctx);
13088 else
13089 lower_omp_teams (gsi_p, ctx);
13090 break;
13091 case GIMPLE_CALL:
13092 tree fndecl;
13093 call_stmt = as_a <gcall *> (stmt);
13094 fndecl = gimple_call_fndecl (call_stmt);
13095 if (fndecl
13096 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13097 switch (DECL_FUNCTION_CODE (fndecl))
13098 {
13099 case BUILT_IN_GOMP_BARRIER:
13100 if (ctx == NULL)
13101 break;
13102 /* FALLTHRU */
13103 case BUILT_IN_GOMP_CANCEL:
13104 case BUILT_IN_GOMP_CANCELLATION_POINT:
13105 omp_context *cctx;
13106 cctx = ctx;
13107 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
13108 cctx = cctx->outer;
13109 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
13110 if (!cctx->cancellable)
13111 {
13112 if (DECL_FUNCTION_CODE (fndecl)
13113 == BUILT_IN_GOMP_CANCELLATION_POINT)
13114 {
13115 stmt = gimple_build_nop ();
13116 gsi_replace (gsi_p, stmt, false);
13117 }
13118 break;
13119 }
13120 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
13121 {
13122 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13123 gimple_call_set_fndecl (call_stmt, fndecl);
13124 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13125 }
13126 tree lhs;
13127 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13128 gimple_call_set_lhs (call_stmt, lhs);
13129 tree fallthru_label;
13130 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13131 gimple *g;
13132 g = gimple_build_label (fallthru_label);
13133 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13134 g = gimple_build_cond (NE_EXPR, lhs,
13135 fold_convert (TREE_TYPE (lhs),
13136 boolean_false_node),
13137 cctx->cancel_label, fallthru_label);
13138 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13139 break;
13140 default:
13141 break;
13142 }
13143 goto regimplify;
13144
13145 case GIMPLE_ASSIGN:
13146 for (omp_context *up = ctx; up; up = up->outer)
13147 {
13148 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13149 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13150 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13151 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13152 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13153 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13154 && (gimple_omp_target_kind (up->stmt)
13155 == GF_OMP_TARGET_KIND_DATA)))
13156 continue;
13157 else if (!up->lastprivate_conditional_map)
13158 break;
13159 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13160 if (TREE_CODE (lhs) == MEM_REF
13161 && DECL_P (TREE_OPERAND (lhs, 0))
13162 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13163 0))) == REFERENCE_TYPE)
13164 lhs = TREE_OPERAND (lhs, 0);
13165 if (DECL_P (lhs))
13166 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13167 {
13168 tree clauses;
13169 if (up->combined_into_simd_safelen1)
13170 {
13171 up = up->outer;
13172 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13173 up = up->outer;
13174 }
13175 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13176 clauses = gimple_omp_for_clauses (up->stmt);
13177 else
13178 clauses = gimple_omp_sections_clauses (up->stmt);
13179 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13180 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13181 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13182 OMP_CLAUSE__CONDTEMP_);
13183 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13184 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13185 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13186 }
13187 }
13188 /* FALLTHRU */
13189
13190 default:
13191 regimplify:
13192 if ((ctx || task_shared_vars)
13193 && walk_gimple_op (stmt, lower_omp_regimplify_p,
13194 ctx ? NULL : &wi))
13195 {
13196 /* Just remove clobbers, this should happen only if we have
13197 "privatized" local addressable variables in SIMD regions,
13198 the clobber isn't needed in that case and gimplifying address
13199 of the ARRAY_REF into a pointer and creating MEM_REF based
13200 clobber would create worse code than we get with the clobber
13201 dropped. */
13202 if (gimple_clobber_p (stmt))
13203 {
13204 gsi_replace (gsi_p, gimple_build_nop (), true);
13205 break;
13206 }
13207 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13208 }
13209 break;
13210 }
13211 }
13212
13213 static void
13214 lower_omp (gimple_seq *body, omp_context *ctx)
13215 {
13216 location_t saved_location = input_location;
13217 gimple_stmt_iterator gsi;
13218 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13219 lower_omp_1 (&gsi, ctx);
13220 /* During gimplification, we haven't folded statments inside offloading
13221 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13222 if (target_nesting_level || taskreg_nesting_level)
13223 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13224 fold_stmt (&gsi);
13225 input_location = saved_location;
13226 }
13227
13228 /* Main entry point. */
13229
13230 static unsigned int
13231 execute_lower_omp (void)
13232 {
13233 gimple_seq body;
13234 int i;
13235 omp_context *ctx;
13236
13237 /* This pass always runs, to provide PROP_gimple_lomp.
13238 But often, there is nothing to do. */
13239 if (flag_openacc == 0 && flag_openmp == 0
13240 && flag_openmp_simd == 0)
13241 return 0;
13242
13243 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13244 delete_omp_context);
13245
13246 body = gimple_body (current_function_decl);
13247
13248 scan_omp (&body, NULL);
13249 gcc_assert (taskreg_nesting_level == 0);
13250 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13251 finish_taskreg_scan (ctx);
13252 taskreg_contexts.release ();
13253
13254 if (all_contexts->root)
13255 {
13256 if (task_shared_vars)
13257 push_gimplify_context ();
13258 lower_omp (&body, NULL);
13259 if (task_shared_vars)
13260 pop_gimplify_context (NULL);
13261 }
13262
13263 if (all_contexts)
13264 {
13265 splay_tree_delete (all_contexts);
13266 all_contexts = NULL;
13267 }
13268 BITMAP_FREE (task_shared_vars);
13269 BITMAP_FREE (global_nonaddressable_vars);
13270
13271 /* If current function is a method, remove artificial dummy VAR_DECL created
13272 for non-static data member privatization, they aren't needed for
13273 debuginfo nor anything else, have been already replaced everywhere in the
13274 IL and cause problems with LTO. */
13275 if (DECL_ARGUMENTS (current_function_decl)
13276 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13277 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13278 == POINTER_TYPE))
13279 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13280 return 0;
13281 }
13282
13283 namespace {
13284
13285 const pass_data pass_data_lower_omp =
13286 {
13287 GIMPLE_PASS, /* type */
13288 "omplower", /* name */
13289 OPTGROUP_OMP, /* optinfo_flags */
13290 TV_NONE, /* tv_id */
13291 PROP_gimple_any, /* properties_required */
13292 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13293 0, /* properties_destroyed */
13294 0, /* todo_flags_start */
13295 0, /* todo_flags_finish */
13296 };
13297
13298 class pass_lower_omp : public gimple_opt_pass
13299 {
13300 public:
13301 pass_lower_omp (gcc::context *ctxt)
13302 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13303 {}
13304
13305 /* opt_pass methods: */
13306 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13307
13308 }; // class pass_lower_omp
13309
13310 } // anon namespace
13311
13312 gimple_opt_pass *
13313 make_pass_lower_omp (gcc::context *ctxt)
13314 {
13315 return new pass_lower_omp (ctxt);
13316 }
13317 \f
13318 /* The following is a utility to diagnose structured block violations.
13319 It is not part of the "omplower" pass, as that's invoked too late. It
13320 should be invoked by the respective front ends after gimplification. */
13321
13322 static splay_tree all_labels;
13323
13324 /* Check for mismatched contexts and generate an error if needed. Return
13325 true if an error is detected. */
13326
13327 static bool
13328 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13329 gimple *branch_ctx, gimple *label_ctx)
13330 {
13331 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13332 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13333
13334 if (label_ctx == branch_ctx)
13335 return false;
13336
13337 const char* kind = NULL;
13338
13339 if (flag_openacc)
13340 {
13341 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13342 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13343 {
13344 gcc_checking_assert (kind == NULL);
13345 kind = "OpenACC";
13346 }
13347 }
13348 if (kind == NULL)
13349 {
13350 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13351 kind = "OpenMP";
13352 }
13353
13354 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13355 so we could traverse it and issue a correct "exit" or "enter" error
13356 message upon a structured block violation.
13357
13358 We built the context by building a list with tree_cons'ing, but there is
13359 no easy counterpart in gimple tuples. It seems like far too much work
13360 for issuing exit/enter error messages. If someone really misses the
13361 distinct error message... patches welcome. */
13362
13363 #if 0
13364 /* Try to avoid confusing the user by producing and error message
13365 with correct "exit" or "enter" verbiage. We prefer "exit"
13366 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13367 if (branch_ctx == NULL)
13368 exit_p = false;
13369 else
13370 {
13371 while (label_ctx)
13372 {
13373 if (TREE_VALUE (label_ctx) == branch_ctx)
13374 {
13375 exit_p = false;
13376 break;
13377 }
13378 label_ctx = TREE_CHAIN (label_ctx);
13379 }
13380 }
13381
13382 if (exit_p)
13383 error ("invalid exit from %s structured block", kind);
13384 else
13385 error ("invalid entry to %s structured block", kind);
13386 #endif
13387
13388 /* If it's obvious we have an invalid entry, be specific about the error. */
13389 if (branch_ctx == NULL)
13390 error ("invalid entry to %s structured block", kind);
13391 else
13392 {
13393 /* Otherwise, be vague and lazy, but efficient. */
13394 error ("invalid branch to/from %s structured block", kind);
13395 }
13396
13397 gsi_replace (gsi_p, gimple_build_nop (), false);
13398 return true;
13399 }
13400
13401 /* Pass 1: Create a minimal tree of structured blocks, and record
13402 where each label is found. */
13403
13404 static tree
13405 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13406 struct walk_stmt_info *wi)
13407 {
13408 gimple *context = (gimple *) wi->info;
13409 gimple *inner_context;
13410 gimple *stmt = gsi_stmt (*gsi_p);
13411
13412 *handled_ops_p = true;
13413
13414 switch (gimple_code (stmt))
13415 {
13416 WALK_SUBSTMTS;
13417
13418 case GIMPLE_OMP_PARALLEL:
13419 case GIMPLE_OMP_TASK:
13420 case GIMPLE_OMP_SECTIONS:
13421 case GIMPLE_OMP_SINGLE:
13422 case GIMPLE_OMP_SECTION:
13423 case GIMPLE_OMP_MASTER:
13424 case GIMPLE_OMP_ORDERED:
13425 case GIMPLE_OMP_SCAN:
13426 case GIMPLE_OMP_CRITICAL:
13427 case GIMPLE_OMP_TARGET:
13428 case GIMPLE_OMP_TEAMS:
13429 case GIMPLE_OMP_TASKGROUP:
13430 /* The minimal context here is just the current OMP construct. */
13431 inner_context = stmt;
13432 wi->info = inner_context;
13433 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13434 wi->info = context;
13435 break;
13436
13437 case GIMPLE_OMP_FOR:
13438 inner_context = stmt;
13439 wi->info = inner_context;
13440 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13441 walk them. */
13442 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13443 diagnose_sb_1, NULL, wi);
13444 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13445 wi->info = context;
13446 break;
13447
13448 case GIMPLE_LABEL:
13449 splay_tree_insert (all_labels,
13450 (splay_tree_key) gimple_label_label (
13451 as_a <glabel *> (stmt)),
13452 (splay_tree_value) context);
13453 break;
13454
13455 default:
13456 break;
13457 }
13458
13459 return NULL_TREE;
13460 }
13461
13462 /* Pass 2: Check each branch and see if its context differs from that of
13463 the destination label's context. */
13464
13465 static tree
13466 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13467 struct walk_stmt_info *wi)
13468 {
13469 gimple *context = (gimple *) wi->info;
13470 splay_tree_node n;
13471 gimple *stmt = gsi_stmt (*gsi_p);
13472
13473 *handled_ops_p = true;
13474
13475 switch (gimple_code (stmt))
13476 {
13477 WALK_SUBSTMTS;
13478
13479 case GIMPLE_OMP_PARALLEL:
13480 case GIMPLE_OMP_TASK:
13481 case GIMPLE_OMP_SECTIONS:
13482 case GIMPLE_OMP_SINGLE:
13483 case GIMPLE_OMP_SECTION:
13484 case GIMPLE_OMP_MASTER:
13485 case GIMPLE_OMP_ORDERED:
13486 case GIMPLE_OMP_SCAN:
13487 case GIMPLE_OMP_CRITICAL:
13488 case GIMPLE_OMP_TARGET:
13489 case GIMPLE_OMP_TEAMS:
13490 case GIMPLE_OMP_TASKGROUP:
13491 wi->info = stmt;
13492 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13493 wi->info = context;
13494 break;
13495
13496 case GIMPLE_OMP_FOR:
13497 wi->info = stmt;
13498 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13499 walk them. */
13500 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13501 diagnose_sb_2, NULL, wi);
13502 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13503 wi->info = context;
13504 break;
13505
13506 case GIMPLE_COND:
13507 {
13508 gcond *cond_stmt = as_a <gcond *> (stmt);
13509 tree lab = gimple_cond_true_label (cond_stmt);
13510 if (lab)
13511 {
13512 n = splay_tree_lookup (all_labels,
13513 (splay_tree_key) lab);
13514 diagnose_sb_0 (gsi_p, context,
13515 n ? (gimple *) n->value : NULL);
13516 }
13517 lab = gimple_cond_false_label (cond_stmt);
13518 if (lab)
13519 {
13520 n = splay_tree_lookup (all_labels,
13521 (splay_tree_key) lab);
13522 diagnose_sb_0 (gsi_p, context,
13523 n ? (gimple *) n->value : NULL);
13524 }
13525 }
13526 break;
13527
13528 case GIMPLE_GOTO:
13529 {
13530 tree lab = gimple_goto_dest (stmt);
13531 if (TREE_CODE (lab) != LABEL_DECL)
13532 break;
13533
13534 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13535 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13536 }
13537 break;
13538
13539 case GIMPLE_SWITCH:
13540 {
13541 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13542 unsigned int i;
13543 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13544 {
13545 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13546 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13547 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13548 break;
13549 }
13550 }
13551 break;
13552
13553 case GIMPLE_RETURN:
13554 diagnose_sb_0 (gsi_p, context, NULL);
13555 break;
13556
13557 default:
13558 break;
13559 }
13560
13561 return NULL_TREE;
13562 }
13563
13564 static unsigned int
13565 diagnose_omp_structured_block_errors (void)
13566 {
13567 struct walk_stmt_info wi;
13568 gimple_seq body = gimple_body (current_function_decl);
13569
13570 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13571
13572 memset (&wi, 0, sizeof (wi));
13573 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13574
13575 memset (&wi, 0, sizeof (wi));
13576 wi.want_locations = true;
13577 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13578
13579 gimple_set_body (current_function_decl, body);
13580
13581 splay_tree_delete (all_labels);
13582 all_labels = NULL;
13583
13584 return 0;
13585 }
13586
13587 namespace {
13588
13589 const pass_data pass_data_diagnose_omp_blocks =
13590 {
13591 GIMPLE_PASS, /* type */
13592 "*diagnose_omp_blocks", /* name */
13593 OPTGROUP_OMP, /* optinfo_flags */
13594 TV_NONE, /* tv_id */
13595 PROP_gimple_any, /* properties_required */
13596 0, /* properties_provided */
13597 0, /* properties_destroyed */
13598 0, /* todo_flags_start */
13599 0, /* todo_flags_finish */
13600 };
13601
13602 class pass_diagnose_omp_blocks : public gimple_opt_pass
13603 {
13604 public:
13605 pass_diagnose_omp_blocks (gcc::context *ctxt)
13606 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13607 {}
13608
13609 /* opt_pass methods: */
13610 virtual bool gate (function *)
13611 {
13612 return flag_openacc || flag_openmp || flag_openmp_simd;
13613 }
13614 virtual unsigned int execute (function *)
13615 {
13616 return diagnose_omp_structured_block_errors ();
13617 }
13618
13619 }; // class pass_diagnose_omp_blocks
13620
13621 } // anon namespace
13622
13623 gimple_opt_pass *
13624 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13625 {
13626 return new pass_diagnose_omp_blocks (ctxt);
13627 }
13628 \f
13629
13630 #include "gt-omp-low.h"