Daily bump.
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* And a hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* Nesting depth of this context. Used to beautify error messages re
127 invalid gotos. The outermost ctx is depth 1, with depth 0 being
128 reserved for the main body of the function. */
129 int depth;
130
131 /* True if this parallel directive is nested within another. */
132 bool is_nested;
133
134 /* True if this construct can be cancelled. */
135 bool cancellable;
136 };
137
138 static splay_tree all_contexts;
139 static int taskreg_nesting_level;
140 static int target_nesting_level;
141 static bitmap task_shared_vars;
142 static vec<omp_context *> taskreg_contexts;
143
144 static void scan_omp (gimple_seq *, omp_context *);
145 static tree scan_omp_1_op (tree *, int *, void *);
146
147 #define WALK_SUBSTMTS \
148 case GIMPLE_BIND: \
149 case GIMPLE_TRY: \
150 case GIMPLE_CATCH: \
151 case GIMPLE_EH_FILTER: \
152 case GIMPLE_TRANSACTION: \
153 /* The sub-statements for these should be walked. */ \
154 *handled_ops_p = false; \
155 break;
156
157 /* Return true if CTX corresponds to an oacc parallel region. */
158
159 static bool
160 is_oacc_parallel (omp_context *ctx)
161 {
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
166 }
167
168 /* Return true if CTX corresponds to an oacc kernels region. */
169
170 static bool
171 is_oacc_kernels (omp_context *ctx)
172 {
173 enum gimple_code outer_type = gimple_code (ctx->stmt);
174 return ((outer_type == GIMPLE_OMP_TARGET)
175 && (gimple_omp_target_kind (ctx->stmt)
176 == GF_OMP_TARGET_KIND_OACC_KERNELS));
177 }
178
179 /* If DECL is the artificial dummy VAR_DECL created for non-static
180 data member privatization, return the underlying "this" parameter,
181 otherwise return NULL. */
182
183 tree
184 omp_member_access_dummy_var (tree decl)
185 {
186 if (!VAR_P (decl)
187 || !DECL_ARTIFICIAL (decl)
188 || !DECL_IGNORED_P (decl)
189 || !DECL_HAS_VALUE_EXPR_P (decl)
190 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
191 return NULL_TREE;
192
193 tree v = DECL_VALUE_EXPR (decl);
194 if (TREE_CODE (v) != COMPONENT_REF)
195 return NULL_TREE;
196
197 while (1)
198 switch (TREE_CODE (v))
199 {
200 case COMPONENT_REF:
201 case MEM_REF:
202 case INDIRECT_REF:
203 CASE_CONVERT:
204 case POINTER_PLUS_EXPR:
205 v = TREE_OPERAND (v, 0);
206 continue;
207 case PARM_DECL:
208 if (DECL_CONTEXT (v) == current_function_decl
209 && DECL_ARTIFICIAL (v)
210 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
211 return v;
212 return NULL_TREE;
213 default:
214 return NULL_TREE;
215 }
216 }
217
218 /* Helper for unshare_and_remap, called through walk_tree. */
219
220 static tree
221 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
222 {
223 tree *pair = (tree *) data;
224 if (*tp == pair[0])
225 {
226 *tp = unshare_expr (pair[1]);
227 *walk_subtrees = 0;
228 }
229 else if (IS_TYPE_OR_DECL_P (*tp))
230 *walk_subtrees = 0;
231 return NULL_TREE;
232 }
233
234 /* Return unshare_expr (X) with all occurrences of FROM
235 replaced with TO. */
236
237 static tree
238 unshare_and_remap (tree x, tree from, tree to)
239 {
240 tree pair[2] = { from, to };
241 x = unshare_expr (x);
242 walk_tree (&x, unshare_and_remap_1, pair, NULL);
243 return x;
244 }
245
246 /* Convenience function for calling scan_omp_1_op on tree operands. */
247
248 static inline tree
249 scan_omp_op (tree *tp, omp_context *ctx)
250 {
251 struct walk_stmt_info wi;
252
253 memset (&wi, 0, sizeof (wi));
254 wi.info = ctx;
255 wi.want_locations = true;
256
257 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
258 }
259
260 static void lower_omp (gimple_seq *, omp_context *);
261 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
262 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
263
264 /* Return true if CTX is for an omp parallel. */
265
266 static inline bool
267 is_parallel_ctx (omp_context *ctx)
268 {
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
270 }
271
272
273 /* Return true if CTX is for an omp task. */
274
275 static inline bool
276 is_task_ctx (omp_context *ctx)
277 {
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
279 }
280
281
282 /* Return true if CTX is for an omp taskloop. */
283
284 static inline bool
285 is_taskloop_ctx (omp_context *ctx)
286 {
287 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
288 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
289 }
290
291
292 /* Return true if CTX is for a host omp teams. */
293
294 static inline bool
295 is_host_teams_ctx (omp_context *ctx)
296 {
297 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
298 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
299 }
300
301 /* Return true if CTX is for an omp parallel or omp task or host omp teams
302 (the last one is strictly not a task region in OpenMP speak, but we
303 need to treat it similarly). */
304
305 static inline bool
306 is_taskreg_ctx (omp_context *ctx)
307 {
308 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
309 }
310
311 /* Return true if EXPR is variable sized. */
312
313 static inline bool
314 is_variable_sized (const_tree expr)
315 {
316 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
317 }
318
319 /* Lookup variables. The "maybe" form
320 allows for the variable form to not have been entered, otherwise we
321 assert that the variable must have been entered. */
322
323 static inline tree
324 lookup_decl (tree var, omp_context *ctx)
325 {
326 tree *n = ctx->cb.decl_map->get (var);
327 return *n;
328 }
329
330 static inline tree
331 maybe_lookup_decl (const_tree var, omp_context *ctx)
332 {
333 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
334 return n ? *n : NULL_TREE;
335 }
336
337 static inline tree
338 lookup_field (tree var, omp_context *ctx)
339 {
340 splay_tree_node n;
341 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
342 return (tree) n->value;
343 }
344
345 static inline tree
346 lookup_sfield (splay_tree_key key, omp_context *ctx)
347 {
348 splay_tree_node n;
349 n = splay_tree_lookup (ctx->sfield_map
350 ? ctx->sfield_map : ctx->field_map, key);
351 return (tree) n->value;
352 }
353
354 static inline tree
355 lookup_sfield (tree var, omp_context *ctx)
356 {
357 return lookup_sfield ((splay_tree_key) var, ctx);
358 }
359
360 static inline tree
361 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
362 {
363 splay_tree_node n;
364 n = splay_tree_lookup (ctx->field_map, key);
365 return n ? (tree) n->value : NULL_TREE;
366 }
367
368 static inline tree
369 maybe_lookup_field (tree var, omp_context *ctx)
370 {
371 return maybe_lookup_field ((splay_tree_key) var, ctx);
372 }
373
374 /* Return true if DECL should be copied by pointer. SHARED_CTX is
375 the parallel context if DECL is to be shared. */
376
377 static bool
378 use_pointer_for_field (tree decl, omp_context *shared_ctx)
379 {
380 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
381 || TYPE_ATOMIC (TREE_TYPE (decl)))
382 return true;
383
384 /* We can only use copy-in/copy-out semantics for shared variables
385 when we know the value is not accessible from an outer scope. */
386 if (shared_ctx)
387 {
388 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
389
390 /* ??? Trivially accessible from anywhere. But why would we even
391 be passing an address in this case? Should we simply assert
392 this to be false, or should we have a cleanup pass that removes
393 these from the list of mappings? */
394 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
395 return true;
396
397 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
398 without analyzing the expression whether or not its location
399 is accessible to anyone else. In the case of nested parallel
400 regions it certainly may be. */
401 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
402 return true;
403
404 /* Do not use copy-in/copy-out for variables that have their
405 address taken. */
406 if (TREE_ADDRESSABLE (decl))
407 return true;
408
409 /* lower_send_shared_vars only uses copy-in, but not copy-out
410 for these. */
411 if (TREE_READONLY (decl)
412 || ((TREE_CODE (decl) == RESULT_DECL
413 || TREE_CODE (decl) == PARM_DECL)
414 && DECL_BY_REFERENCE (decl)))
415 return false;
416
417 /* Disallow copy-in/out in nested parallel if
418 decl is shared in outer parallel, otherwise
419 each thread could store the shared variable
420 in its own copy-in location, making the
421 variable no longer really shared. */
422 if (shared_ctx->is_nested)
423 {
424 omp_context *up;
425
426 for (up = shared_ctx->outer; up; up = up->outer)
427 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
428 break;
429
430 if (up)
431 {
432 tree c;
433
434 for (c = gimple_omp_taskreg_clauses (up->stmt);
435 c; c = OMP_CLAUSE_CHAIN (c))
436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
437 && OMP_CLAUSE_DECL (c) == decl)
438 break;
439
440 if (c)
441 goto maybe_mark_addressable_and_ret;
442 }
443 }
444
445 /* For tasks avoid using copy-in/out. As tasks can be
446 deferred or executed in different thread, when GOMP_task
447 returns, the task hasn't necessarily terminated. */
448 if (is_task_ctx (shared_ctx))
449 {
450 tree outer;
451 maybe_mark_addressable_and_ret:
452 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
453 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
454 {
455 /* Taking address of OUTER in lower_send_shared_vars
456 might need regimplification of everything that uses the
457 variable. */
458 if (!task_shared_vars)
459 task_shared_vars = BITMAP_ALLOC (NULL);
460 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
461 TREE_ADDRESSABLE (outer) = 1;
462 }
463 return true;
464 }
465 }
466
467 return false;
468 }
469
470 /* Construct a new automatic decl similar to VAR. */
471
472 static tree
473 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
474 {
475 tree copy = copy_var_decl (var, name, type);
476
477 DECL_CONTEXT (copy) = current_function_decl;
478 DECL_CHAIN (copy) = ctx->block_vars;
479 /* If VAR is listed in task_shared_vars, it means it wasn't
480 originally addressable and is just because task needs to take
481 it's address. But we don't need to take address of privatizations
482 from that var. */
483 if (TREE_ADDRESSABLE (var)
484 && task_shared_vars
485 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
486 TREE_ADDRESSABLE (copy) = 0;
487 ctx->block_vars = copy;
488
489 return copy;
490 }
491
492 static tree
493 omp_copy_decl_1 (tree var, omp_context *ctx)
494 {
495 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
496 }
497
498 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
499 as appropriate. */
500 static tree
501 omp_build_component_ref (tree obj, tree field)
502 {
503 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
504 if (TREE_THIS_VOLATILE (field))
505 TREE_THIS_VOLATILE (ret) |= 1;
506 if (TREE_READONLY (field))
507 TREE_READONLY (ret) |= 1;
508 return ret;
509 }
510
511 /* Build tree nodes to access the field for VAR on the receiver side. */
512
513 static tree
514 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
515 {
516 tree x, field = lookup_field (var, ctx);
517
518 /* If the receiver record type was remapped in the child function,
519 remap the field into the new record type. */
520 x = maybe_lookup_field (field, ctx);
521 if (x != NULL)
522 field = x;
523
524 x = build_simple_mem_ref (ctx->receiver_decl);
525 TREE_THIS_NOTRAP (x) = 1;
526 x = omp_build_component_ref (x, field);
527 if (by_ref)
528 {
529 x = build_simple_mem_ref (x);
530 TREE_THIS_NOTRAP (x) = 1;
531 }
532
533 return x;
534 }
535
536 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
537 of a parallel, this is a component reference; for workshare constructs
538 this is some variable. */
539
540 static tree
541 build_outer_var_ref (tree var, omp_context *ctx,
542 enum omp_clause_code code = OMP_CLAUSE_ERROR)
543 {
544 tree x;
545 omp_context *outer = ctx->outer;
546 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
547 outer = outer->outer;
548
549 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
550 x = var;
551 else if (is_variable_sized (var))
552 {
553 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
554 x = build_outer_var_ref (x, ctx, code);
555 x = build_simple_mem_ref (x);
556 }
557 else if (is_taskreg_ctx (ctx))
558 {
559 bool by_ref = use_pointer_for_field (var, NULL);
560 x = build_receiver_ref (var, by_ref, ctx);
561 }
562 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
563 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
564 || (code == OMP_CLAUSE_PRIVATE
565 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
566 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
567 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
568 {
569 /* #pragma omp simd isn't a worksharing construct, and can reference
570 even private vars in its linear etc. clauses.
571 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
572 to private vars in all worksharing constructs. */
573 x = NULL_TREE;
574 if (outer && is_taskreg_ctx (outer))
575 x = lookup_decl (var, outer);
576 else if (outer)
577 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
578 if (x == NULL_TREE)
579 x = var;
580 }
581 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
582 {
583 gcc_assert (outer);
584 splay_tree_node n
585 = splay_tree_lookup (outer->field_map,
586 (splay_tree_key) &DECL_UID (var));
587 if (n == NULL)
588 {
589 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
590 x = var;
591 else
592 x = lookup_decl (var, outer);
593 }
594 else
595 {
596 tree field = (tree) n->value;
597 /* If the receiver record type was remapped in the child function,
598 remap the field into the new record type. */
599 x = maybe_lookup_field (field, outer);
600 if (x != NULL)
601 field = x;
602
603 x = build_simple_mem_ref (outer->receiver_decl);
604 x = omp_build_component_ref (x, field);
605 if (use_pointer_for_field (var, outer))
606 x = build_simple_mem_ref (x);
607 }
608 }
609 else if (outer)
610 {
611 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
612 {
613 outer = outer->outer;
614 gcc_assert (outer
615 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
616 }
617 x = lookup_decl (var, outer);
618 }
619 else if (omp_is_reference (var))
620 /* This can happen with orphaned constructs. If var is reference, it is
621 possible it is shared and as such valid. */
622 x = var;
623 else if (omp_member_access_dummy_var (var))
624 x = var;
625 else
626 gcc_unreachable ();
627
628 if (x == var)
629 {
630 tree t = omp_member_access_dummy_var (var);
631 if (t)
632 {
633 x = DECL_VALUE_EXPR (var);
634 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
635 if (o != t)
636 x = unshare_and_remap (x, t, o);
637 else
638 x = unshare_expr (x);
639 }
640 }
641
642 if (omp_is_reference (var))
643 x = build_simple_mem_ref (x);
644
645 return x;
646 }
647
648 /* Build tree nodes to access the field for VAR on the sender side. */
649
650 static tree
651 build_sender_ref (splay_tree_key key, omp_context *ctx)
652 {
653 tree field = lookup_sfield (key, ctx);
654 return omp_build_component_ref (ctx->sender_decl, field);
655 }
656
657 static tree
658 build_sender_ref (tree var, omp_context *ctx)
659 {
660 return build_sender_ref ((splay_tree_key) var, ctx);
661 }
662
663 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
664 BASE_POINTERS_RESTRICT, declare the field with restrict. */
665
666 static void
667 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
668 {
669 tree field, type, sfield = NULL_TREE;
670 splay_tree_key key = (splay_tree_key) var;
671
672 if ((mask & 8) != 0)
673 {
674 key = (splay_tree_key) &DECL_UID (var);
675 gcc_checking_assert (key != (splay_tree_key) var);
676 }
677 gcc_assert ((mask & 1) == 0
678 || !splay_tree_lookup (ctx->field_map, key));
679 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
680 || !splay_tree_lookup (ctx->sfield_map, key));
681 gcc_assert ((mask & 3) == 3
682 || !is_gimple_omp_oacc (ctx->stmt));
683
684 type = TREE_TYPE (var);
685 /* Prevent redeclaring the var in the split-off function with a restrict
686 pointer type. Note that we only clear type itself, restrict qualifiers in
687 the pointed-to type will be ignored by points-to analysis. */
688 if (POINTER_TYPE_P (type)
689 && TYPE_RESTRICT (type))
690 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
691
692 if (mask & 4)
693 {
694 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
695 type = build_pointer_type (build_pointer_type (type));
696 }
697 else if (by_ref)
698 type = build_pointer_type (type);
699 else if ((mask & 3) == 1 && omp_is_reference (var))
700 type = TREE_TYPE (type);
701
702 field = build_decl (DECL_SOURCE_LOCATION (var),
703 FIELD_DECL, DECL_NAME (var), type);
704
705 /* Remember what variable this field was created for. This does have a
706 side effect of making dwarf2out ignore this member, so for helpful
707 debugging we clear it later in delete_omp_context. */
708 DECL_ABSTRACT_ORIGIN (field) = var;
709 if (type == TREE_TYPE (var))
710 {
711 SET_DECL_ALIGN (field, DECL_ALIGN (var));
712 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
713 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
714 }
715 else
716 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
717
718 if ((mask & 3) == 3)
719 {
720 insert_field_into_struct (ctx->record_type, field);
721 if (ctx->srecord_type)
722 {
723 sfield = build_decl (DECL_SOURCE_LOCATION (var),
724 FIELD_DECL, DECL_NAME (var), type);
725 DECL_ABSTRACT_ORIGIN (sfield) = var;
726 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
727 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
728 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
729 insert_field_into_struct (ctx->srecord_type, sfield);
730 }
731 }
732 else
733 {
734 if (ctx->srecord_type == NULL_TREE)
735 {
736 tree t;
737
738 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
739 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
740 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
741 {
742 sfield = build_decl (DECL_SOURCE_LOCATION (t),
743 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
744 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
745 insert_field_into_struct (ctx->srecord_type, sfield);
746 splay_tree_insert (ctx->sfield_map,
747 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
748 (splay_tree_value) sfield);
749 }
750 }
751 sfield = field;
752 insert_field_into_struct ((mask & 1) ? ctx->record_type
753 : ctx->srecord_type, field);
754 }
755
756 if (mask & 1)
757 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
758 if ((mask & 2) && ctx->sfield_map)
759 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
760 }
761
762 static tree
763 install_var_local (tree var, omp_context *ctx)
764 {
765 tree new_var = omp_copy_decl_1 (var, ctx);
766 insert_decl_map (&ctx->cb, var, new_var);
767 return new_var;
768 }
769
770 /* Adjust the replacement for DECL in CTX for the new context. This means
771 copying the DECL_VALUE_EXPR, and fixing up the type. */
772
773 static void
774 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
775 {
776 tree new_decl, size;
777
778 new_decl = lookup_decl (decl, ctx);
779
780 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
781
782 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
783 && DECL_HAS_VALUE_EXPR_P (decl))
784 {
785 tree ve = DECL_VALUE_EXPR (decl);
786 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
787 SET_DECL_VALUE_EXPR (new_decl, ve);
788 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
789 }
790
791 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
792 {
793 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
794 if (size == error_mark_node)
795 size = TYPE_SIZE (TREE_TYPE (new_decl));
796 DECL_SIZE (new_decl) = size;
797
798 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
799 if (size == error_mark_node)
800 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
801 DECL_SIZE_UNIT (new_decl) = size;
802 }
803 }
804
805 /* The callback for remap_decl. Search all containing contexts for a
806 mapping of the variable; this avoids having to duplicate the splay
807 tree ahead of time. We know a mapping doesn't already exist in the
808 given context. Create new mappings to implement default semantics. */
809
810 static tree
811 omp_copy_decl (tree var, copy_body_data *cb)
812 {
813 omp_context *ctx = (omp_context *) cb;
814 tree new_var;
815
816 if (TREE_CODE (var) == LABEL_DECL)
817 {
818 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
819 return var;
820 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
821 DECL_CONTEXT (new_var) = current_function_decl;
822 insert_decl_map (&ctx->cb, var, new_var);
823 return new_var;
824 }
825
826 while (!is_taskreg_ctx (ctx))
827 {
828 ctx = ctx->outer;
829 if (ctx == NULL)
830 return var;
831 new_var = maybe_lookup_decl (var, ctx);
832 if (new_var)
833 return new_var;
834 }
835
836 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
837 return var;
838
839 return error_mark_node;
840 }
841
842 /* Create a new context, with OUTER_CTX being the surrounding context. */
843
844 static omp_context *
845 new_omp_context (gimple *stmt, omp_context *outer_ctx)
846 {
847 omp_context *ctx = XCNEW (omp_context);
848
849 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
850 (splay_tree_value) ctx);
851 ctx->stmt = stmt;
852
853 if (outer_ctx)
854 {
855 ctx->outer = outer_ctx;
856 ctx->cb = outer_ctx->cb;
857 ctx->cb.block = NULL;
858 ctx->depth = outer_ctx->depth + 1;
859 }
860 else
861 {
862 ctx->cb.src_fn = current_function_decl;
863 ctx->cb.dst_fn = current_function_decl;
864 ctx->cb.src_node = cgraph_node::get (current_function_decl);
865 gcc_checking_assert (ctx->cb.src_node);
866 ctx->cb.dst_node = ctx->cb.src_node;
867 ctx->cb.src_cfun = cfun;
868 ctx->cb.copy_decl = omp_copy_decl;
869 ctx->cb.eh_lp_nr = 0;
870 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
871 ctx->cb.adjust_array_error_bounds = true;
872 ctx->cb.dont_remap_vla_if_no_change = true;
873 ctx->depth = 1;
874 }
875
876 ctx->cb.decl_map = new hash_map<tree, tree>;
877
878 return ctx;
879 }
880
881 static gimple_seq maybe_catch_exception (gimple_seq);
882
883 /* Finalize task copyfn. */
884
885 static void
886 finalize_task_copyfn (gomp_task *task_stmt)
887 {
888 struct function *child_cfun;
889 tree child_fn;
890 gimple_seq seq = NULL, new_seq;
891 gbind *bind;
892
893 child_fn = gimple_omp_task_copy_fn (task_stmt);
894 if (child_fn == NULL_TREE)
895 return;
896
897 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
898 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
899
900 push_cfun (child_cfun);
901 bind = gimplify_body (child_fn, false);
902 gimple_seq_add_stmt (&seq, bind);
903 new_seq = maybe_catch_exception (seq);
904 if (new_seq != seq)
905 {
906 bind = gimple_build_bind (NULL, new_seq, NULL);
907 seq = NULL;
908 gimple_seq_add_stmt (&seq, bind);
909 }
910 gimple_set_body (child_fn, seq);
911 pop_cfun ();
912
913 /* Inform the callgraph about the new function. */
914 cgraph_node *node = cgraph_node::get_create (child_fn);
915 node->parallelized_function = 1;
916 cgraph_node::add_new_function (child_fn, false);
917 }
918
919 /* Destroy a omp_context data structures. Called through the splay tree
920 value delete callback. */
921
922 static void
923 delete_omp_context (splay_tree_value value)
924 {
925 omp_context *ctx = (omp_context *) value;
926
927 delete ctx->cb.decl_map;
928
929 if (ctx->field_map)
930 splay_tree_delete (ctx->field_map);
931 if (ctx->sfield_map)
932 splay_tree_delete (ctx->sfield_map);
933
934 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
935 it produces corrupt debug information. */
936 if (ctx->record_type)
937 {
938 tree t;
939 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
940 DECL_ABSTRACT_ORIGIN (t) = NULL;
941 }
942 if (ctx->srecord_type)
943 {
944 tree t;
945 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
946 DECL_ABSTRACT_ORIGIN (t) = NULL;
947 }
948
949 if (is_task_ctx (ctx))
950 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
951
952 if (ctx->task_reduction_map)
953 {
954 ctx->task_reductions.release ();
955 delete ctx->task_reduction_map;
956 }
957
958 XDELETE (ctx);
959 }
960
961 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
962 context. */
963
964 static void
965 fixup_child_record_type (omp_context *ctx)
966 {
967 tree f, type = ctx->record_type;
968
969 if (!ctx->receiver_decl)
970 return;
971 /* ??? It isn't sufficient to just call remap_type here, because
972 variably_modified_type_p doesn't work the way we expect for
973 record types. Testing each field for whether it needs remapping
974 and creating a new record by hand works, however. */
975 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
976 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
977 break;
978 if (f)
979 {
980 tree name, new_fields = NULL;
981
982 type = lang_hooks.types.make_type (RECORD_TYPE);
983 name = DECL_NAME (TYPE_NAME (ctx->record_type));
984 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
985 TYPE_DECL, name, type);
986 TYPE_NAME (type) = name;
987
988 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
989 {
990 tree new_f = copy_node (f);
991 DECL_CONTEXT (new_f) = type;
992 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
993 DECL_CHAIN (new_f) = new_fields;
994 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
995 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
996 &ctx->cb, NULL);
997 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
998 &ctx->cb, NULL);
999 new_fields = new_f;
1000
1001 /* Arrange to be able to look up the receiver field
1002 given the sender field. */
1003 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1004 (splay_tree_value) new_f);
1005 }
1006 TYPE_FIELDS (type) = nreverse (new_fields);
1007 layout_type (type);
1008 }
1009
1010 /* In a target region we never modify any of the pointers in *.omp_data_i,
1011 so attempt to help the optimizers. */
1012 if (is_gimple_omp_offloaded (ctx->stmt))
1013 type = build_qualified_type (type, TYPE_QUAL_CONST);
1014
1015 TREE_TYPE (ctx->receiver_decl)
1016 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1017 }
1018
1019 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1020 specified by CLAUSES. */
1021
1022 static void
1023 scan_sharing_clauses (tree clauses, omp_context *ctx)
1024 {
1025 tree c, decl;
1026 bool scan_array_reductions = false;
1027
1028 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1029 {
1030 bool by_ref;
1031
1032 switch (OMP_CLAUSE_CODE (c))
1033 {
1034 case OMP_CLAUSE_PRIVATE:
1035 decl = OMP_CLAUSE_DECL (c);
1036 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1037 goto do_private;
1038 else if (!is_variable_sized (decl))
1039 install_var_local (decl, ctx);
1040 break;
1041
1042 case OMP_CLAUSE_SHARED:
1043 decl = OMP_CLAUSE_DECL (c);
1044 /* Ignore shared directives in teams construct inside of
1045 target construct. */
1046 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1047 && !is_host_teams_ctx (ctx))
1048 {
1049 /* Global variables don't need to be copied,
1050 the receiver side will use them directly. */
1051 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1052 if (is_global_var (odecl))
1053 break;
1054 insert_decl_map (&ctx->cb, decl, odecl);
1055 break;
1056 }
1057 gcc_assert (is_taskreg_ctx (ctx));
1058 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1059 || !is_variable_sized (decl));
1060 /* Global variables don't need to be copied,
1061 the receiver side will use them directly. */
1062 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1063 break;
1064 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1065 {
1066 use_pointer_for_field (decl, ctx);
1067 break;
1068 }
1069 by_ref = use_pointer_for_field (decl, NULL);
1070 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1071 || TREE_ADDRESSABLE (decl)
1072 || by_ref
1073 || omp_is_reference (decl))
1074 {
1075 by_ref = use_pointer_for_field (decl, ctx);
1076 install_var_field (decl, by_ref, 3, ctx);
1077 install_var_local (decl, ctx);
1078 break;
1079 }
1080 /* We don't need to copy const scalar vars back. */
1081 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1082 goto do_private;
1083
1084 case OMP_CLAUSE_REDUCTION:
1085 case OMP_CLAUSE_IN_REDUCTION:
1086 decl = OMP_CLAUSE_DECL (c);
1087 if (TREE_CODE (decl) == MEM_REF)
1088 {
1089 tree t = TREE_OPERAND (decl, 0);
1090 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1091 t = TREE_OPERAND (t, 0);
1092 if (TREE_CODE (t) == INDIRECT_REF
1093 || TREE_CODE (t) == ADDR_EXPR)
1094 t = TREE_OPERAND (t, 0);
1095 install_var_local (t, ctx);
1096 if (is_taskreg_ctx (ctx)
1097 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1098 || (is_task_ctx (ctx)
1099 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1100 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1101 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1102 == POINTER_TYPE)))))
1103 && !is_variable_sized (t)
1104 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1105 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1106 && !is_task_ctx (ctx))))
1107 {
1108 by_ref = use_pointer_for_field (t, NULL);
1109 if (is_task_ctx (ctx)
1110 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1111 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1112 {
1113 install_var_field (t, false, 1, ctx);
1114 install_var_field (t, by_ref, 2, ctx);
1115 }
1116 else
1117 install_var_field (t, by_ref, 3, ctx);
1118 }
1119 break;
1120 }
1121 if (is_task_ctx (ctx)
1122 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1123 && OMP_CLAUSE_REDUCTION_TASK (c)
1124 && is_parallel_ctx (ctx)))
1125 {
1126 /* Global variables don't need to be copied,
1127 the receiver side will use them directly. */
1128 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1129 {
1130 by_ref = use_pointer_for_field (decl, ctx);
1131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1132 install_var_field (decl, by_ref, 3, ctx);
1133 }
1134 install_var_local (decl, ctx);
1135 break;
1136 }
1137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1138 && OMP_CLAUSE_REDUCTION_TASK (c))
1139 {
1140 install_var_local (decl, ctx);
1141 break;
1142 }
1143 goto do_private;
1144
1145 case OMP_CLAUSE_LASTPRIVATE:
1146 /* Let the corresponding firstprivate clause create
1147 the variable. */
1148 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1149 break;
1150 /* FALLTHRU */
1151
1152 case OMP_CLAUSE_FIRSTPRIVATE:
1153 case OMP_CLAUSE_LINEAR:
1154 decl = OMP_CLAUSE_DECL (c);
1155 do_private:
1156 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1157 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1158 && is_gimple_omp_offloaded (ctx->stmt))
1159 {
1160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1161 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1162 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1163 install_var_field (decl, true, 3, ctx);
1164 else
1165 install_var_field (decl, false, 3, ctx);
1166 }
1167 if (is_variable_sized (decl))
1168 {
1169 if (is_task_ctx (ctx))
1170 install_var_field (decl, false, 1, ctx);
1171 break;
1172 }
1173 else if (is_taskreg_ctx (ctx))
1174 {
1175 bool global
1176 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1177 by_ref = use_pointer_for_field (decl, NULL);
1178
1179 if (is_task_ctx (ctx)
1180 && (global || by_ref || omp_is_reference (decl)))
1181 {
1182 install_var_field (decl, false, 1, ctx);
1183 if (!global)
1184 install_var_field (decl, by_ref, 2, ctx);
1185 }
1186 else if (!global)
1187 install_var_field (decl, by_ref, 3, ctx);
1188 }
1189 install_var_local (decl, ctx);
1190 break;
1191
1192 case OMP_CLAUSE_USE_DEVICE_PTR:
1193 decl = OMP_CLAUSE_DECL (c);
1194 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1195 install_var_field (decl, true, 3, ctx);
1196 else
1197 install_var_field (decl, false, 3, ctx);
1198 if (DECL_SIZE (decl)
1199 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1200 {
1201 tree decl2 = DECL_VALUE_EXPR (decl);
1202 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1203 decl2 = TREE_OPERAND (decl2, 0);
1204 gcc_assert (DECL_P (decl2));
1205 install_var_local (decl2, ctx);
1206 }
1207 install_var_local (decl, ctx);
1208 break;
1209
1210 case OMP_CLAUSE_IS_DEVICE_PTR:
1211 decl = OMP_CLAUSE_DECL (c);
1212 goto do_private;
1213
1214 case OMP_CLAUSE__LOOPTEMP_:
1215 case OMP_CLAUSE__REDUCTEMP_:
1216 gcc_assert (is_taskreg_ctx (ctx));
1217 decl = OMP_CLAUSE_DECL (c);
1218 install_var_field (decl, false, 3, ctx);
1219 install_var_local (decl, ctx);
1220 break;
1221
1222 case OMP_CLAUSE_COPYPRIVATE:
1223 case OMP_CLAUSE_COPYIN:
1224 decl = OMP_CLAUSE_DECL (c);
1225 by_ref = use_pointer_for_field (decl, NULL);
1226 install_var_field (decl, by_ref, 3, ctx);
1227 break;
1228
1229 case OMP_CLAUSE_FINAL:
1230 case OMP_CLAUSE_IF:
1231 case OMP_CLAUSE_NUM_THREADS:
1232 case OMP_CLAUSE_NUM_TEAMS:
1233 case OMP_CLAUSE_THREAD_LIMIT:
1234 case OMP_CLAUSE_DEVICE:
1235 case OMP_CLAUSE_SCHEDULE:
1236 case OMP_CLAUSE_DIST_SCHEDULE:
1237 case OMP_CLAUSE_DEPEND:
1238 case OMP_CLAUSE_PRIORITY:
1239 case OMP_CLAUSE_GRAINSIZE:
1240 case OMP_CLAUSE_NUM_TASKS:
1241 case OMP_CLAUSE_NUM_GANGS:
1242 case OMP_CLAUSE_NUM_WORKERS:
1243 case OMP_CLAUSE_VECTOR_LENGTH:
1244 if (ctx->outer)
1245 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1246 break;
1247
1248 case OMP_CLAUSE_TO:
1249 case OMP_CLAUSE_FROM:
1250 case OMP_CLAUSE_MAP:
1251 if (ctx->outer)
1252 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1253 decl = OMP_CLAUSE_DECL (c);
1254 /* Global variables with "omp declare target" attribute
1255 don't need to be copied, the receiver side will use them
1256 directly. However, global variables with "omp declare target link"
1257 attribute need to be copied. Or when ALWAYS modifier is used. */
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && DECL_P (decl)
1260 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1261 && (OMP_CLAUSE_MAP_KIND (c)
1262 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1263 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1264 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1265 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1266 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1267 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1268 && varpool_node::get_create (decl)->offloadable
1269 && !lookup_attribute ("omp declare target link",
1270 DECL_ATTRIBUTES (decl)))
1271 break;
1272 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1274 {
1275 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1276 not offloaded; there is nothing to map for those. */
1277 if (!is_gimple_omp_offloaded (ctx->stmt)
1278 && !POINTER_TYPE_P (TREE_TYPE (decl))
1279 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1280 break;
1281 }
1282 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1283 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1284 || (OMP_CLAUSE_MAP_KIND (c)
1285 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1286 {
1287 if (TREE_CODE (decl) == COMPONENT_REF
1288 || (TREE_CODE (decl) == INDIRECT_REF
1289 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1290 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1291 == REFERENCE_TYPE)))
1292 break;
1293 if (DECL_SIZE (decl)
1294 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1295 {
1296 tree decl2 = DECL_VALUE_EXPR (decl);
1297 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1298 decl2 = TREE_OPERAND (decl2, 0);
1299 gcc_assert (DECL_P (decl2));
1300 install_var_local (decl2, ctx);
1301 }
1302 install_var_local (decl, ctx);
1303 break;
1304 }
1305 if (DECL_P (decl))
1306 {
1307 if (DECL_SIZE (decl)
1308 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1309 {
1310 tree decl2 = DECL_VALUE_EXPR (decl);
1311 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1312 decl2 = TREE_OPERAND (decl2, 0);
1313 gcc_assert (DECL_P (decl2));
1314 install_var_field (decl2, true, 3, ctx);
1315 install_var_local (decl2, ctx);
1316 install_var_local (decl, ctx);
1317 }
1318 else
1319 {
1320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1321 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1322 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1323 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1324 install_var_field (decl, true, 7, ctx);
1325 else
1326 install_var_field (decl, true, 3, ctx);
1327 if (is_gimple_omp_offloaded (ctx->stmt)
1328 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1329 install_var_local (decl, ctx);
1330 }
1331 }
1332 else
1333 {
1334 tree base = get_base_address (decl);
1335 tree nc = OMP_CLAUSE_CHAIN (c);
1336 if (DECL_P (base)
1337 && nc != NULL_TREE
1338 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1339 && OMP_CLAUSE_DECL (nc) == base
1340 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1341 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1342 {
1343 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1344 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1345 }
1346 else
1347 {
1348 if (ctx->outer)
1349 {
1350 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1351 decl = OMP_CLAUSE_DECL (c);
1352 }
1353 gcc_assert (!splay_tree_lookup (ctx->field_map,
1354 (splay_tree_key) decl));
1355 tree field
1356 = build_decl (OMP_CLAUSE_LOCATION (c),
1357 FIELD_DECL, NULL_TREE, ptr_type_node);
1358 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1359 insert_field_into_struct (ctx->record_type, field);
1360 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1361 (splay_tree_value) field);
1362 }
1363 }
1364 break;
1365
1366 case OMP_CLAUSE__GRIDDIM_:
1367 if (ctx->outer)
1368 {
1369 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1370 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1371 }
1372 break;
1373
1374 case OMP_CLAUSE_NOWAIT:
1375 case OMP_CLAUSE_ORDERED:
1376 case OMP_CLAUSE_COLLAPSE:
1377 case OMP_CLAUSE_UNTIED:
1378 case OMP_CLAUSE_MERGEABLE:
1379 case OMP_CLAUSE_PROC_BIND:
1380 case OMP_CLAUSE_SAFELEN:
1381 case OMP_CLAUSE_SIMDLEN:
1382 case OMP_CLAUSE_THREADS:
1383 case OMP_CLAUSE_SIMD:
1384 case OMP_CLAUSE_NOGROUP:
1385 case OMP_CLAUSE_DEFAULTMAP:
1386 case OMP_CLAUSE_ASYNC:
1387 case OMP_CLAUSE_WAIT:
1388 case OMP_CLAUSE_GANG:
1389 case OMP_CLAUSE_WORKER:
1390 case OMP_CLAUSE_VECTOR:
1391 case OMP_CLAUSE_INDEPENDENT:
1392 case OMP_CLAUSE_AUTO:
1393 case OMP_CLAUSE_SEQ:
1394 case OMP_CLAUSE_TILE:
1395 case OMP_CLAUSE__SIMT_:
1396 case OMP_CLAUSE_DEFAULT:
1397 case OMP_CLAUSE_NONTEMPORAL:
1398 case OMP_CLAUSE_IF_PRESENT:
1399 case OMP_CLAUSE_FINALIZE:
1400 case OMP_CLAUSE_TASK_REDUCTION:
1401 break;
1402
1403 case OMP_CLAUSE_ALIGNED:
1404 decl = OMP_CLAUSE_DECL (c);
1405 if (is_global_var (decl)
1406 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1407 install_var_local (decl, ctx);
1408 break;
1409
1410 case OMP_CLAUSE__CACHE_:
1411 default:
1412 gcc_unreachable ();
1413 }
1414 }
1415
1416 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1417 {
1418 switch (OMP_CLAUSE_CODE (c))
1419 {
1420 case OMP_CLAUSE_LASTPRIVATE:
1421 /* Let the corresponding firstprivate clause create
1422 the variable. */
1423 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1424 scan_array_reductions = true;
1425 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1426 break;
1427 /* FALLTHRU */
1428
1429 case OMP_CLAUSE_FIRSTPRIVATE:
1430 case OMP_CLAUSE_PRIVATE:
1431 case OMP_CLAUSE_LINEAR:
1432 case OMP_CLAUSE_IS_DEVICE_PTR:
1433 decl = OMP_CLAUSE_DECL (c);
1434 if (is_variable_sized (decl))
1435 {
1436 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1437 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1438 && is_gimple_omp_offloaded (ctx->stmt))
1439 {
1440 tree decl2 = DECL_VALUE_EXPR (decl);
1441 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1442 decl2 = TREE_OPERAND (decl2, 0);
1443 gcc_assert (DECL_P (decl2));
1444 install_var_local (decl2, ctx);
1445 fixup_remapped_decl (decl2, ctx, false);
1446 }
1447 install_var_local (decl, ctx);
1448 }
1449 fixup_remapped_decl (decl, ctx,
1450 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1451 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1452 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1453 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1454 scan_array_reductions = true;
1455 break;
1456
1457 case OMP_CLAUSE_REDUCTION:
1458 case OMP_CLAUSE_IN_REDUCTION:
1459 decl = OMP_CLAUSE_DECL (c);
1460 if (TREE_CODE (decl) != MEM_REF)
1461 {
1462 if (is_variable_sized (decl))
1463 install_var_local (decl, ctx);
1464 fixup_remapped_decl (decl, ctx, false);
1465 }
1466 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1467 scan_array_reductions = true;
1468 break;
1469
1470 case OMP_CLAUSE_TASK_REDUCTION:
1471 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1472 scan_array_reductions = true;
1473 break;
1474
1475 case OMP_CLAUSE_SHARED:
1476 /* Ignore shared directives in teams construct inside of
1477 target construct. */
1478 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1479 && !is_host_teams_ctx (ctx))
1480 break;
1481 decl = OMP_CLAUSE_DECL (c);
1482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1483 break;
1484 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1485 {
1486 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1487 ctx->outer)))
1488 break;
1489 bool by_ref = use_pointer_for_field (decl, ctx);
1490 install_var_field (decl, by_ref, 11, ctx);
1491 break;
1492 }
1493 fixup_remapped_decl (decl, ctx, false);
1494 break;
1495
1496 case OMP_CLAUSE_MAP:
1497 if (!is_gimple_omp_offloaded (ctx->stmt))
1498 break;
1499 decl = OMP_CLAUSE_DECL (c);
1500 if (DECL_P (decl)
1501 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1502 && (OMP_CLAUSE_MAP_KIND (c)
1503 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1504 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1505 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1506 && varpool_node::get_create (decl)->offloadable)
1507 break;
1508 if (DECL_P (decl))
1509 {
1510 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1511 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1512 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1513 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1514 {
1515 tree new_decl = lookup_decl (decl, ctx);
1516 TREE_TYPE (new_decl)
1517 = remap_type (TREE_TYPE (decl), &ctx->cb);
1518 }
1519 else if (DECL_SIZE (decl)
1520 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1521 {
1522 tree decl2 = DECL_VALUE_EXPR (decl);
1523 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1524 decl2 = TREE_OPERAND (decl2, 0);
1525 gcc_assert (DECL_P (decl2));
1526 fixup_remapped_decl (decl2, ctx, false);
1527 fixup_remapped_decl (decl, ctx, true);
1528 }
1529 else
1530 fixup_remapped_decl (decl, ctx, false);
1531 }
1532 break;
1533
1534 case OMP_CLAUSE_COPYPRIVATE:
1535 case OMP_CLAUSE_COPYIN:
1536 case OMP_CLAUSE_DEFAULT:
1537 case OMP_CLAUSE_IF:
1538 case OMP_CLAUSE_NUM_THREADS:
1539 case OMP_CLAUSE_NUM_TEAMS:
1540 case OMP_CLAUSE_THREAD_LIMIT:
1541 case OMP_CLAUSE_DEVICE:
1542 case OMP_CLAUSE_SCHEDULE:
1543 case OMP_CLAUSE_DIST_SCHEDULE:
1544 case OMP_CLAUSE_NOWAIT:
1545 case OMP_CLAUSE_ORDERED:
1546 case OMP_CLAUSE_COLLAPSE:
1547 case OMP_CLAUSE_UNTIED:
1548 case OMP_CLAUSE_FINAL:
1549 case OMP_CLAUSE_MERGEABLE:
1550 case OMP_CLAUSE_PROC_BIND:
1551 case OMP_CLAUSE_SAFELEN:
1552 case OMP_CLAUSE_SIMDLEN:
1553 case OMP_CLAUSE_ALIGNED:
1554 case OMP_CLAUSE_DEPEND:
1555 case OMP_CLAUSE__LOOPTEMP_:
1556 case OMP_CLAUSE__REDUCTEMP_:
1557 case OMP_CLAUSE_TO:
1558 case OMP_CLAUSE_FROM:
1559 case OMP_CLAUSE_PRIORITY:
1560 case OMP_CLAUSE_GRAINSIZE:
1561 case OMP_CLAUSE_NUM_TASKS:
1562 case OMP_CLAUSE_THREADS:
1563 case OMP_CLAUSE_SIMD:
1564 case OMP_CLAUSE_NOGROUP:
1565 case OMP_CLAUSE_DEFAULTMAP:
1566 case OMP_CLAUSE_USE_DEVICE_PTR:
1567 case OMP_CLAUSE_NONTEMPORAL:
1568 case OMP_CLAUSE_ASYNC:
1569 case OMP_CLAUSE_WAIT:
1570 case OMP_CLAUSE_NUM_GANGS:
1571 case OMP_CLAUSE_NUM_WORKERS:
1572 case OMP_CLAUSE_VECTOR_LENGTH:
1573 case OMP_CLAUSE_GANG:
1574 case OMP_CLAUSE_WORKER:
1575 case OMP_CLAUSE_VECTOR:
1576 case OMP_CLAUSE_INDEPENDENT:
1577 case OMP_CLAUSE_AUTO:
1578 case OMP_CLAUSE_SEQ:
1579 case OMP_CLAUSE_TILE:
1580 case OMP_CLAUSE__GRIDDIM_:
1581 case OMP_CLAUSE__SIMT_:
1582 case OMP_CLAUSE_IF_PRESENT:
1583 case OMP_CLAUSE_FINALIZE:
1584 break;
1585
1586 case OMP_CLAUSE__CACHE_:
1587 default:
1588 gcc_unreachable ();
1589 }
1590 }
1591
1592 gcc_checking_assert (!scan_array_reductions
1593 || !is_gimple_omp_oacc (ctx->stmt));
1594 if (scan_array_reductions)
1595 {
1596 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1597 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1598 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1599 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1600 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1601 {
1602 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1603 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1604 }
1605 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1606 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1607 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1608 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1609 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1610 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1611 }
1612 }
1613
1614 /* Create a new name for omp child function. Returns an identifier. */
1615
1616 static tree
1617 create_omp_child_function_name (bool task_copy)
1618 {
1619 return clone_function_name_numbered (current_function_decl,
1620 task_copy ? "_omp_cpyfn" : "_omp_fn");
1621 }
1622
1623 /* Return true if CTX may belong to offloaded code: either if current function
1624 is offloaded, or any enclosing context corresponds to a target region. */
1625
1626 static bool
1627 omp_maybe_offloaded_ctx (omp_context *ctx)
1628 {
1629 if (cgraph_node::get (current_function_decl)->offloadable)
1630 return true;
1631 for (; ctx; ctx = ctx->outer)
1632 if (is_gimple_omp_offloaded (ctx->stmt))
1633 return true;
1634 return false;
1635 }
1636
1637 /* Build a decl for the omp child function. It'll not contain a body
1638 yet, just the bare decl. */
1639
1640 static void
1641 create_omp_child_function (omp_context *ctx, bool task_copy)
1642 {
1643 tree decl, type, name, t;
1644
1645 name = create_omp_child_function_name (task_copy);
1646 if (task_copy)
1647 type = build_function_type_list (void_type_node, ptr_type_node,
1648 ptr_type_node, NULL_TREE);
1649 else
1650 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1651
1652 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1653
1654 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1655 || !task_copy);
1656 if (!task_copy)
1657 ctx->cb.dst_fn = decl;
1658 else
1659 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1660
1661 TREE_STATIC (decl) = 1;
1662 TREE_USED (decl) = 1;
1663 DECL_ARTIFICIAL (decl) = 1;
1664 DECL_IGNORED_P (decl) = 0;
1665 TREE_PUBLIC (decl) = 0;
1666 DECL_UNINLINABLE (decl) = 1;
1667 DECL_EXTERNAL (decl) = 0;
1668 DECL_CONTEXT (decl) = NULL_TREE;
1669 DECL_INITIAL (decl) = make_node (BLOCK);
1670 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1671 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1672 /* Remove omp declare simd attribute from the new attributes. */
1673 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1674 {
1675 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1676 a = a2;
1677 a = TREE_CHAIN (a);
1678 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1679 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1680 *p = TREE_CHAIN (*p);
1681 else
1682 {
1683 tree chain = TREE_CHAIN (*p);
1684 *p = copy_node (*p);
1685 p = &TREE_CHAIN (*p);
1686 *p = chain;
1687 }
1688 }
1689 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1690 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1691 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1692 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1693 DECL_FUNCTION_VERSIONED (decl)
1694 = DECL_FUNCTION_VERSIONED (current_function_decl);
1695
1696 if (omp_maybe_offloaded_ctx (ctx))
1697 {
1698 cgraph_node::get_create (decl)->offloadable = 1;
1699 if (ENABLE_OFFLOADING)
1700 g->have_offload = true;
1701 }
1702
1703 if (cgraph_node::get_create (decl)->offloadable
1704 && !lookup_attribute ("omp declare target",
1705 DECL_ATTRIBUTES (current_function_decl)))
1706 {
1707 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1708 ? "omp target entrypoint"
1709 : "omp declare target");
1710 DECL_ATTRIBUTES (decl)
1711 = tree_cons (get_identifier (target_attr),
1712 NULL_TREE, DECL_ATTRIBUTES (decl));
1713 }
1714
1715 t = build_decl (DECL_SOURCE_LOCATION (decl),
1716 RESULT_DECL, NULL_TREE, void_type_node);
1717 DECL_ARTIFICIAL (t) = 1;
1718 DECL_IGNORED_P (t) = 1;
1719 DECL_CONTEXT (t) = decl;
1720 DECL_RESULT (decl) = t;
1721
1722 tree data_name = get_identifier (".omp_data_i");
1723 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1724 ptr_type_node);
1725 DECL_ARTIFICIAL (t) = 1;
1726 DECL_NAMELESS (t) = 1;
1727 DECL_ARG_TYPE (t) = ptr_type_node;
1728 DECL_CONTEXT (t) = current_function_decl;
1729 TREE_USED (t) = 1;
1730 TREE_READONLY (t) = 1;
1731 DECL_ARGUMENTS (decl) = t;
1732 if (!task_copy)
1733 ctx->receiver_decl = t;
1734 else
1735 {
1736 t = build_decl (DECL_SOURCE_LOCATION (decl),
1737 PARM_DECL, get_identifier (".omp_data_o"),
1738 ptr_type_node);
1739 DECL_ARTIFICIAL (t) = 1;
1740 DECL_NAMELESS (t) = 1;
1741 DECL_ARG_TYPE (t) = ptr_type_node;
1742 DECL_CONTEXT (t) = current_function_decl;
1743 TREE_USED (t) = 1;
1744 TREE_ADDRESSABLE (t) = 1;
1745 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1746 DECL_ARGUMENTS (decl) = t;
1747 }
1748
1749 /* Allocate memory for the function structure. The call to
1750 allocate_struct_function clobbers CFUN, so we need to restore
1751 it afterward. */
1752 push_struct_function (decl);
1753 cfun->function_end_locus = gimple_location (ctx->stmt);
1754 init_tree_ssa (cfun);
1755 pop_cfun ();
1756 }
1757
1758 /* Callback for walk_gimple_seq. Check if combined parallel
1759 contains gimple_omp_for_combined_into_p OMP_FOR. */
1760
1761 tree
1762 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1763 bool *handled_ops_p,
1764 struct walk_stmt_info *wi)
1765 {
1766 gimple *stmt = gsi_stmt (*gsi_p);
1767
1768 *handled_ops_p = true;
1769 switch (gimple_code (stmt))
1770 {
1771 WALK_SUBSTMTS;
1772
1773 case GIMPLE_OMP_FOR:
1774 if (gimple_omp_for_combined_into_p (stmt)
1775 && gimple_omp_for_kind (stmt)
1776 == *(const enum gf_mask *) (wi->info))
1777 {
1778 wi->info = stmt;
1779 return integer_zero_node;
1780 }
1781 break;
1782 default:
1783 break;
1784 }
1785 return NULL;
1786 }
1787
1788 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1789
1790 static void
1791 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1792 omp_context *outer_ctx)
1793 {
1794 struct walk_stmt_info wi;
1795
1796 memset (&wi, 0, sizeof (wi));
1797 wi.val_only = true;
1798 wi.info = (void *) &msk;
1799 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1800 if (wi.info != (void *) &msk)
1801 {
1802 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1803 struct omp_for_data fd;
1804 omp_extract_for_data (for_stmt, &fd, NULL);
1805 /* We need two temporaries with fd.loop.v type (istart/iend)
1806 and then (fd.collapse - 1) temporaries with the same
1807 type for count2 ... countN-1 vars if not constant. */
1808 size_t count = 2, i;
1809 tree type = fd.iter_type;
1810 if (fd.collapse > 1
1811 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1812 {
1813 count += fd.collapse - 1;
1814 /* If there are lastprivate clauses on the inner
1815 GIMPLE_OMP_FOR, add one more temporaries for the total number
1816 of iterations (product of count1 ... countN-1). */
1817 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1818 OMP_CLAUSE_LASTPRIVATE))
1819 count++;
1820 else if (msk == GF_OMP_FOR_KIND_FOR
1821 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1822 OMP_CLAUSE_LASTPRIVATE))
1823 count++;
1824 }
1825 for (i = 0; i < count; i++)
1826 {
1827 tree temp = create_tmp_var (type);
1828 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1829 insert_decl_map (&outer_ctx->cb, temp, temp);
1830 OMP_CLAUSE_DECL (c) = temp;
1831 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1832 gimple_omp_taskreg_set_clauses (stmt, c);
1833 }
1834 }
1835 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1836 && omp_find_clause (gimple_omp_task_clauses (stmt),
1837 OMP_CLAUSE_REDUCTION))
1838 {
1839 tree type = build_pointer_type (pointer_sized_int_node);
1840 tree temp = create_tmp_var (type);
1841 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1842 insert_decl_map (&outer_ctx->cb, temp, temp);
1843 OMP_CLAUSE_DECL (c) = temp;
1844 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1845 gimple_omp_task_set_clauses (stmt, c);
1846 }
1847 }
1848
1849 /* Scan an OpenMP parallel directive. */
1850
1851 static void
1852 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1853 {
1854 omp_context *ctx;
1855 tree name;
1856 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1857
1858 /* Ignore parallel directives with empty bodies, unless there
1859 are copyin clauses. */
1860 if (optimize > 0
1861 && empty_body_p (gimple_omp_body (stmt))
1862 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1863 OMP_CLAUSE_COPYIN) == NULL)
1864 {
1865 gsi_replace (gsi, gimple_build_nop (), false);
1866 return;
1867 }
1868
1869 if (gimple_omp_parallel_combined_p (stmt))
1870 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1871 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1872 OMP_CLAUSE_REDUCTION);
1873 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1874 if (OMP_CLAUSE_REDUCTION_TASK (c))
1875 {
1876 tree type = build_pointer_type (pointer_sized_int_node);
1877 tree temp = create_tmp_var (type);
1878 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1879 if (outer_ctx)
1880 insert_decl_map (&outer_ctx->cb, temp, temp);
1881 OMP_CLAUSE_DECL (c) = temp;
1882 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1883 gimple_omp_parallel_set_clauses (stmt, c);
1884 break;
1885 }
1886 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1887 break;
1888
1889 ctx = new_omp_context (stmt, outer_ctx);
1890 taskreg_contexts.safe_push (ctx);
1891 if (taskreg_nesting_level > 1)
1892 ctx->is_nested = true;
1893 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1894 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1895 name = create_tmp_var_name (".omp_data_s");
1896 name = build_decl (gimple_location (stmt),
1897 TYPE_DECL, name, ctx->record_type);
1898 DECL_ARTIFICIAL (name) = 1;
1899 DECL_NAMELESS (name) = 1;
1900 TYPE_NAME (ctx->record_type) = name;
1901 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1902 if (!gimple_omp_parallel_grid_phony (stmt))
1903 {
1904 create_omp_child_function (ctx, false);
1905 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1906 }
1907
1908 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1909 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1910
1911 if (TYPE_FIELDS (ctx->record_type) == NULL)
1912 ctx->record_type = ctx->receiver_decl = NULL;
1913 }
1914
1915 /* Scan an OpenMP task directive. */
1916
1917 static void
1918 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1919 {
1920 omp_context *ctx;
1921 tree name, t;
1922 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1923
1924 /* Ignore task directives with empty bodies, unless they have depend
1925 clause. */
1926 if (optimize > 0
1927 && gimple_omp_body (stmt)
1928 && empty_body_p (gimple_omp_body (stmt))
1929 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1930 {
1931 gsi_replace (gsi, gimple_build_nop (), false);
1932 return;
1933 }
1934
1935 if (gimple_omp_task_taskloop_p (stmt))
1936 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1937
1938 ctx = new_omp_context (stmt, outer_ctx);
1939
1940 if (gimple_omp_task_taskwait_p (stmt))
1941 {
1942 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1943 return;
1944 }
1945
1946 taskreg_contexts.safe_push (ctx);
1947 if (taskreg_nesting_level > 1)
1948 ctx->is_nested = true;
1949 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1950 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1951 name = create_tmp_var_name (".omp_data_s");
1952 name = build_decl (gimple_location (stmt),
1953 TYPE_DECL, name, ctx->record_type);
1954 DECL_ARTIFICIAL (name) = 1;
1955 DECL_NAMELESS (name) = 1;
1956 TYPE_NAME (ctx->record_type) = name;
1957 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1958 create_omp_child_function (ctx, false);
1959 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1960
1961 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1962
1963 if (ctx->srecord_type)
1964 {
1965 name = create_tmp_var_name (".omp_data_a");
1966 name = build_decl (gimple_location (stmt),
1967 TYPE_DECL, name, ctx->srecord_type);
1968 DECL_ARTIFICIAL (name) = 1;
1969 DECL_NAMELESS (name) = 1;
1970 TYPE_NAME (ctx->srecord_type) = name;
1971 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1972 create_omp_child_function (ctx, true);
1973 }
1974
1975 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1976
1977 if (TYPE_FIELDS (ctx->record_type) == NULL)
1978 {
1979 ctx->record_type = ctx->receiver_decl = NULL;
1980 t = build_int_cst (long_integer_type_node, 0);
1981 gimple_omp_task_set_arg_size (stmt, t);
1982 t = build_int_cst (long_integer_type_node, 1);
1983 gimple_omp_task_set_arg_align (stmt, t);
1984 }
1985 }
1986
1987 /* Helper function for finish_taskreg_scan, called through walk_tree.
1988 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1989 tree, replace it in the expression. */
1990
1991 static tree
1992 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1993 {
1994 if (VAR_P (*tp))
1995 {
1996 omp_context *ctx = (omp_context *) data;
1997 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1998 if (t != *tp)
1999 {
2000 if (DECL_HAS_VALUE_EXPR_P (t))
2001 t = unshare_expr (DECL_VALUE_EXPR (t));
2002 *tp = t;
2003 }
2004 *walk_subtrees = 0;
2005 }
2006 else if (IS_TYPE_OR_DECL_P (*tp))
2007 *walk_subtrees = 0;
2008 return NULL_TREE;
2009 }
2010
2011 /* If any decls have been made addressable during scan_omp,
2012 adjust their fields if needed, and layout record types
2013 of parallel/task constructs. */
2014
2015 static void
2016 finish_taskreg_scan (omp_context *ctx)
2017 {
2018 if (ctx->record_type == NULL_TREE)
2019 return;
2020
2021 /* If any task_shared_vars were needed, verify all
2022 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2023 statements if use_pointer_for_field hasn't changed
2024 because of that. If it did, update field types now. */
2025 if (task_shared_vars)
2026 {
2027 tree c;
2028
2029 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2030 c; c = OMP_CLAUSE_CHAIN (c))
2031 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2032 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2033 {
2034 tree decl = OMP_CLAUSE_DECL (c);
2035
2036 /* Global variables don't need to be copied,
2037 the receiver side will use them directly. */
2038 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2039 continue;
2040 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2041 || !use_pointer_for_field (decl, ctx))
2042 continue;
2043 tree field = lookup_field (decl, ctx);
2044 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2045 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2046 continue;
2047 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2048 TREE_THIS_VOLATILE (field) = 0;
2049 DECL_USER_ALIGN (field) = 0;
2050 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2051 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2052 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2053 if (ctx->srecord_type)
2054 {
2055 tree sfield = lookup_sfield (decl, ctx);
2056 TREE_TYPE (sfield) = TREE_TYPE (field);
2057 TREE_THIS_VOLATILE (sfield) = 0;
2058 DECL_USER_ALIGN (sfield) = 0;
2059 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2060 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2061 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2062 }
2063 }
2064 }
2065
2066 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2067 {
2068 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2069 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2070 if (c)
2071 {
2072 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2073 expects to find it at the start of data. */
2074 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2075 tree *p = &TYPE_FIELDS (ctx->record_type);
2076 while (*p)
2077 if (*p == f)
2078 {
2079 *p = DECL_CHAIN (*p);
2080 break;
2081 }
2082 else
2083 p = &DECL_CHAIN (*p);
2084 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2085 TYPE_FIELDS (ctx->record_type) = f;
2086 }
2087 layout_type (ctx->record_type);
2088 fixup_child_record_type (ctx);
2089 }
2090 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2091 {
2092 layout_type (ctx->record_type);
2093 fixup_child_record_type (ctx);
2094 }
2095 else
2096 {
2097 location_t loc = gimple_location (ctx->stmt);
2098 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2099 /* Move VLA fields to the end. */
2100 p = &TYPE_FIELDS (ctx->record_type);
2101 while (*p)
2102 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2103 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2104 {
2105 *q = *p;
2106 *p = TREE_CHAIN (*p);
2107 TREE_CHAIN (*q) = NULL_TREE;
2108 q = &TREE_CHAIN (*q);
2109 }
2110 else
2111 p = &DECL_CHAIN (*p);
2112 *p = vla_fields;
2113 if (gimple_omp_task_taskloop_p (ctx->stmt))
2114 {
2115 /* Move fields corresponding to first and second _looptemp_
2116 clause first. There are filled by GOMP_taskloop
2117 and thus need to be in specific positions. */
2118 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2119 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2120 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2121 OMP_CLAUSE__LOOPTEMP_);
2122 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2123 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2124 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2125 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2126 p = &TYPE_FIELDS (ctx->record_type);
2127 while (*p)
2128 if (*p == f1 || *p == f2 || *p == f3)
2129 *p = DECL_CHAIN (*p);
2130 else
2131 p = &DECL_CHAIN (*p);
2132 DECL_CHAIN (f1) = f2;
2133 if (c3)
2134 {
2135 DECL_CHAIN (f2) = f3;
2136 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2137 }
2138 else
2139 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2140 TYPE_FIELDS (ctx->record_type) = f1;
2141 if (ctx->srecord_type)
2142 {
2143 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2144 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2145 if (c3)
2146 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2147 p = &TYPE_FIELDS (ctx->srecord_type);
2148 while (*p)
2149 if (*p == f1 || *p == f2 || *p == f3)
2150 *p = DECL_CHAIN (*p);
2151 else
2152 p = &DECL_CHAIN (*p);
2153 DECL_CHAIN (f1) = f2;
2154 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2155 if (c3)
2156 {
2157 DECL_CHAIN (f2) = f3;
2158 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2159 }
2160 else
2161 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2162 TYPE_FIELDS (ctx->srecord_type) = f1;
2163 }
2164 }
2165 layout_type (ctx->record_type);
2166 fixup_child_record_type (ctx);
2167 if (ctx->srecord_type)
2168 layout_type (ctx->srecord_type);
2169 tree t = fold_convert_loc (loc, long_integer_type_node,
2170 TYPE_SIZE_UNIT (ctx->record_type));
2171 if (TREE_CODE (t) != INTEGER_CST)
2172 {
2173 t = unshare_expr (t);
2174 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2175 }
2176 gimple_omp_task_set_arg_size (ctx->stmt, t);
2177 t = build_int_cst (long_integer_type_node,
2178 TYPE_ALIGN_UNIT (ctx->record_type));
2179 gimple_omp_task_set_arg_align (ctx->stmt, t);
2180 }
2181 }
2182
2183 /* Find the enclosing offload context. */
2184
2185 static omp_context *
2186 enclosing_target_ctx (omp_context *ctx)
2187 {
2188 for (; ctx; ctx = ctx->outer)
2189 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2190 break;
2191
2192 return ctx;
2193 }
2194
2195 /* Return true if ctx is part of an oacc kernels region. */
2196
2197 static bool
2198 ctx_in_oacc_kernels_region (omp_context *ctx)
2199 {
2200 for (;ctx != NULL; ctx = ctx->outer)
2201 {
2202 gimple *stmt = ctx->stmt;
2203 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2204 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2205 return true;
2206 }
2207
2208 return false;
2209 }
2210
2211 /* Check the parallelism clauses inside a kernels regions.
2212 Until kernels handling moves to use the same loop indirection
2213 scheme as parallel, we need to do this checking early. */
2214
2215 static unsigned
2216 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2217 {
2218 bool checking = true;
2219 unsigned outer_mask = 0;
2220 unsigned this_mask = 0;
2221 bool has_seq = false, has_auto = false;
2222
2223 if (ctx->outer)
2224 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2225 if (!stmt)
2226 {
2227 checking = false;
2228 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2229 return outer_mask;
2230 stmt = as_a <gomp_for *> (ctx->stmt);
2231 }
2232
2233 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2234 {
2235 switch (OMP_CLAUSE_CODE (c))
2236 {
2237 case OMP_CLAUSE_GANG:
2238 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2239 break;
2240 case OMP_CLAUSE_WORKER:
2241 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2242 break;
2243 case OMP_CLAUSE_VECTOR:
2244 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2245 break;
2246 case OMP_CLAUSE_SEQ:
2247 has_seq = true;
2248 break;
2249 case OMP_CLAUSE_AUTO:
2250 has_auto = true;
2251 break;
2252 default:
2253 break;
2254 }
2255 }
2256
2257 if (checking)
2258 {
2259 if (has_seq && (this_mask || has_auto))
2260 error_at (gimple_location (stmt), "%<seq%> overrides other"
2261 " OpenACC loop specifiers");
2262 else if (has_auto && this_mask)
2263 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2264 " OpenACC loop specifiers");
2265
2266 if (this_mask & outer_mask)
2267 error_at (gimple_location (stmt), "inner loop uses same"
2268 " OpenACC parallelism as containing loop");
2269 }
2270
2271 return outer_mask | this_mask;
2272 }
2273
2274 /* Scan a GIMPLE_OMP_FOR. */
2275
2276 static omp_context *
2277 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2278 {
2279 omp_context *ctx;
2280 size_t i;
2281 tree clauses = gimple_omp_for_clauses (stmt);
2282
2283 ctx = new_omp_context (stmt, outer_ctx);
2284
2285 if (is_gimple_omp_oacc (stmt))
2286 {
2287 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2288
2289 if (!tgt || is_oacc_parallel (tgt))
2290 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2291 {
2292 char const *check = NULL;
2293
2294 switch (OMP_CLAUSE_CODE (c))
2295 {
2296 case OMP_CLAUSE_GANG:
2297 check = "gang";
2298 break;
2299
2300 case OMP_CLAUSE_WORKER:
2301 check = "worker";
2302 break;
2303
2304 case OMP_CLAUSE_VECTOR:
2305 check = "vector";
2306 break;
2307
2308 default:
2309 break;
2310 }
2311
2312 if (check && OMP_CLAUSE_OPERAND (c, 0))
2313 error_at (gimple_location (stmt),
2314 "argument not permitted on %qs clause in"
2315 " OpenACC %<parallel%>", check);
2316 }
2317
2318 if (tgt && is_oacc_kernels (tgt))
2319 {
2320 /* Strip out reductions, as they are not handled yet. */
2321 tree *prev_ptr = &clauses;
2322
2323 while (tree probe = *prev_ptr)
2324 {
2325 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2326
2327 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2328 *prev_ptr = *next_ptr;
2329 else
2330 prev_ptr = next_ptr;
2331 }
2332
2333 gimple_omp_for_set_clauses (stmt, clauses);
2334 check_oacc_kernel_gwv (stmt, ctx);
2335 }
2336 }
2337
2338 scan_sharing_clauses (clauses, ctx);
2339
2340 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2341 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2342 {
2343 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2344 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2345 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2346 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2347 }
2348 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2349 return ctx;
2350 }
2351
2352 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2353
2354 static void
2355 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2356 omp_context *outer_ctx)
2357 {
2358 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2359 gsi_replace (gsi, bind, false);
2360 gimple_seq seq = NULL;
2361 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2362 tree cond = create_tmp_var_raw (integer_type_node);
2363 DECL_CONTEXT (cond) = current_function_decl;
2364 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2365 gimple_bind_set_vars (bind, cond);
2366 gimple_call_set_lhs (g, cond);
2367 gimple_seq_add_stmt (&seq, g);
2368 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2369 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2370 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2371 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2372 gimple_seq_add_stmt (&seq, g);
2373 g = gimple_build_label (lab1);
2374 gimple_seq_add_stmt (&seq, g);
2375 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2376 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2377 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2378 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2379 gimple_omp_for_set_clauses (new_stmt, clause);
2380 gimple_seq_add_stmt (&seq, new_stmt);
2381 g = gimple_build_goto (lab3);
2382 gimple_seq_add_stmt (&seq, g);
2383 g = gimple_build_label (lab2);
2384 gimple_seq_add_stmt (&seq, g);
2385 gimple_seq_add_stmt (&seq, stmt);
2386 g = gimple_build_label (lab3);
2387 gimple_seq_add_stmt (&seq, g);
2388 gimple_bind_set_body (bind, seq);
2389 update_stmt (bind);
2390 scan_omp_for (new_stmt, outer_ctx);
2391 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2392 }
2393
2394 /* Scan an OpenMP sections directive. */
2395
2396 static void
2397 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2398 {
2399 omp_context *ctx;
2400
2401 ctx = new_omp_context (stmt, outer_ctx);
2402 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2403 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2404 }
2405
2406 /* Scan an OpenMP single directive. */
2407
2408 static void
2409 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2410 {
2411 omp_context *ctx;
2412 tree name;
2413
2414 ctx = new_omp_context (stmt, outer_ctx);
2415 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2416 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2417 name = create_tmp_var_name (".omp_copy_s");
2418 name = build_decl (gimple_location (stmt),
2419 TYPE_DECL, name, ctx->record_type);
2420 TYPE_NAME (ctx->record_type) = name;
2421
2422 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2423 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2424
2425 if (TYPE_FIELDS (ctx->record_type) == NULL)
2426 ctx->record_type = NULL;
2427 else
2428 layout_type (ctx->record_type);
2429 }
2430
2431 /* Scan a GIMPLE_OMP_TARGET. */
2432
2433 static void
2434 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2435 {
2436 omp_context *ctx;
2437 tree name;
2438 bool offloaded = is_gimple_omp_offloaded (stmt);
2439 tree clauses = gimple_omp_target_clauses (stmt);
2440
2441 ctx = new_omp_context (stmt, outer_ctx);
2442 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2443 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2444 name = create_tmp_var_name (".omp_data_t");
2445 name = build_decl (gimple_location (stmt),
2446 TYPE_DECL, name, ctx->record_type);
2447 DECL_ARTIFICIAL (name) = 1;
2448 DECL_NAMELESS (name) = 1;
2449 TYPE_NAME (ctx->record_type) = name;
2450 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2451
2452 if (offloaded)
2453 {
2454 create_omp_child_function (ctx, false);
2455 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2456 }
2457
2458 scan_sharing_clauses (clauses, ctx);
2459 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2460
2461 if (TYPE_FIELDS (ctx->record_type) == NULL)
2462 ctx->record_type = ctx->receiver_decl = NULL;
2463 else
2464 {
2465 TYPE_FIELDS (ctx->record_type)
2466 = nreverse (TYPE_FIELDS (ctx->record_type));
2467 if (flag_checking)
2468 {
2469 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2470 for (tree field = TYPE_FIELDS (ctx->record_type);
2471 field;
2472 field = DECL_CHAIN (field))
2473 gcc_assert (DECL_ALIGN (field) == align);
2474 }
2475 layout_type (ctx->record_type);
2476 if (offloaded)
2477 fixup_child_record_type (ctx);
2478 }
2479 }
2480
2481 /* Scan an OpenMP teams directive. */
2482
2483 static void
2484 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2485 {
2486 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2487
2488 if (!gimple_omp_teams_host (stmt))
2489 {
2490 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2491 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2492 return;
2493 }
2494 taskreg_contexts.safe_push (ctx);
2495 gcc_assert (taskreg_nesting_level == 1);
2496 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2497 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2498 tree name = create_tmp_var_name (".omp_data_s");
2499 name = build_decl (gimple_location (stmt),
2500 TYPE_DECL, name, ctx->record_type);
2501 DECL_ARTIFICIAL (name) = 1;
2502 DECL_NAMELESS (name) = 1;
2503 TYPE_NAME (ctx->record_type) = name;
2504 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2505 create_omp_child_function (ctx, false);
2506 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2507
2508 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2509 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2510
2511 if (TYPE_FIELDS (ctx->record_type) == NULL)
2512 ctx->record_type = ctx->receiver_decl = NULL;
2513 }
2514
2515 /* Check nesting restrictions. */
2516 static bool
2517 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2518 {
2519 tree c;
2520
2521 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2522 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2523 the original copy of its contents. */
2524 return true;
2525
2526 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2527 inside an OpenACC CTX. */
2528 if (!(is_gimple_omp (stmt)
2529 && is_gimple_omp_oacc (stmt))
2530 /* Except for atomic codes that we share with OpenMP. */
2531 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2532 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2533 {
2534 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2535 {
2536 error_at (gimple_location (stmt),
2537 "non-OpenACC construct inside of OpenACC routine");
2538 return false;
2539 }
2540 else
2541 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2542 if (is_gimple_omp (octx->stmt)
2543 && is_gimple_omp_oacc (octx->stmt))
2544 {
2545 error_at (gimple_location (stmt),
2546 "non-OpenACC construct inside of OpenACC region");
2547 return false;
2548 }
2549 }
2550
2551 if (ctx != NULL)
2552 {
2553 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2554 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2555 {
2556 c = NULL_TREE;
2557 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2558 {
2559 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2560 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2561 {
2562 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2563 && (ctx->outer == NULL
2564 || !gimple_omp_for_combined_into_p (ctx->stmt)
2565 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2566 || (gimple_omp_for_kind (ctx->outer->stmt)
2567 != GF_OMP_FOR_KIND_FOR)
2568 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2569 {
2570 error_at (gimple_location (stmt),
2571 "%<ordered simd threads%> must be closely "
2572 "nested inside of %<for simd%> region");
2573 return false;
2574 }
2575 return true;
2576 }
2577 }
2578 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2579 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
2580 return true;
2581 error_at (gimple_location (stmt),
2582 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2583 " or %<#pragma omp atomic%> may not be nested inside"
2584 " %<simd%> region");
2585 return false;
2586 }
2587 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2588 {
2589 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2590 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2591 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2592 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2593 {
2594 error_at (gimple_location (stmt),
2595 "only %<distribute%> or %<parallel%> regions are "
2596 "allowed to be strictly nested inside %<teams%> "
2597 "region");
2598 return false;
2599 }
2600 }
2601 }
2602 switch (gimple_code (stmt))
2603 {
2604 case GIMPLE_OMP_FOR:
2605 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2606 return true;
2607 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2608 {
2609 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2610 {
2611 error_at (gimple_location (stmt),
2612 "%<distribute%> region must be strictly nested "
2613 "inside %<teams%> construct");
2614 return false;
2615 }
2616 return true;
2617 }
2618 /* We split taskloop into task and nested taskloop in it. */
2619 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2620 return true;
2621 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2622 {
2623 bool ok = false;
2624
2625 if (ctx)
2626 switch (gimple_code (ctx->stmt))
2627 {
2628 case GIMPLE_OMP_FOR:
2629 ok = (gimple_omp_for_kind (ctx->stmt)
2630 == GF_OMP_FOR_KIND_OACC_LOOP);
2631 break;
2632
2633 case GIMPLE_OMP_TARGET:
2634 switch (gimple_omp_target_kind (ctx->stmt))
2635 {
2636 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2637 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2638 ok = true;
2639 break;
2640
2641 default:
2642 break;
2643 }
2644
2645 default:
2646 break;
2647 }
2648 else if (oacc_get_fn_attrib (current_function_decl))
2649 ok = true;
2650 if (!ok)
2651 {
2652 error_at (gimple_location (stmt),
2653 "OpenACC loop directive must be associated with"
2654 " an OpenACC compute region");
2655 return false;
2656 }
2657 }
2658 /* FALLTHRU */
2659 case GIMPLE_CALL:
2660 if (is_gimple_call (stmt)
2661 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2662 == BUILT_IN_GOMP_CANCEL
2663 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2664 == BUILT_IN_GOMP_CANCELLATION_POINT))
2665 {
2666 const char *bad = NULL;
2667 const char *kind = NULL;
2668 const char *construct
2669 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2670 == BUILT_IN_GOMP_CANCEL)
2671 ? "#pragma omp cancel"
2672 : "#pragma omp cancellation point";
2673 if (ctx == NULL)
2674 {
2675 error_at (gimple_location (stmt), "orphaned %qs construct",
2676 construct);
2677 return false;
2678 }
2679 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2680 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2681 : 0)
2682 {
2683 case 1:
2684 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2685 bad = "#pragma omp parallel";
2686 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2687 == BUILT_IN_GOMP_CANCEL
2688 && !integer_zerop (gimple_call_arg (stmt, 1)))
2689 ctx->cancellable = true;
2690 kind = "parallel";
2691 break;
2692 case 2:
2693 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2694 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2695 bad = "#pragma omp for";
2696 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2697 == BUILT_IN_GOMP_CANCEL
2698 && !integer_zerop (gimple_call_arg (stmt, 1)))
2699 {
2700 ctx->cancellable = true;
2701 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2702 OMP_CLAUSE_NOWAIT))
2703 warning_at (gimple_location (stmt), 0,
2704 "%<#pragma omp cancel for%> inside "
2705 "%<nowait%> for construct");
2706 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2707 OMP_CLAUSE_ORDERED))
2708 warning_at (gimple_location (stmt), 0,
2709 "%<#pragma omp cancel for%> inside "
2710 "%<ordered%> for construct");
2711 }
2712 kind = "for";
2713 break;
2714 case 4:
2715 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2716 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2717 bad = "#pragma omp sections";
2718 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2719 == BUILT_IN_GOMP_CANCEL
2720 && !integer_zerop (gimple_call_arg (stmt, 1)))
2721 {
2722 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2723 {
2724 ctx->cancellable = true;
2725 if (omp_find_clause (gimple_omp_sections_clauses
2726 (ctx->stmt),
2727 OMP_CLAUSE_NOWAIT))
2728 warning_at (gimple_location (stmt), 0,
2729 "%<#pragma omp cancel sections%> inside "
2730 "%<nowait%> sections construct");
2731 }
2732 else
2733 {
2734 gcc_assert (ctx->outer
2735 && gimple_code (ctx->outer->stmt)
2736 == GIMPLE_OMP_SECTIONS);
2737 ctx->outer->cancellable = true;
2738 if (omp_find_clause (gimple_omp_sections_clauses
2739 (ctx->outer->stmt),
2740 OMP_CLAUSE_NOWAIT))
2741 warning_at (gimple_location (stmt), 0,
2742 "%<#pragma omp cancel sections%> inside "
2743 "%<nowait%> sections construct");
2744 }
2745 }
2746 kind = "sections";
2747 break;
2748 case 8:
2749 if (!is_task_ctx (ctx)
2750 && (!is_taskloop_ctx (ctx)
2751 || ctx->outer == NULL
2752 || !is_task_ctx (ctx->outer)))
2753 bad = "#pragma omp task";
2754 else
2755 {
2756 for (omp_context *octx = ctx->outer;
2757 octx; octx = octx->outer)
2758 {
2759 switch (gimple_code (octx->stmt))
2760 {
2761 case GIMPLE_OMP_TASKGROUP:
2762 break;
2763 case GIMPLE_OMP_TARGET:
2764 if (gimple_omp_target_kind (octx->stmt)
2765 != GF_OMP_TARGET_KIND_REGION)
2766 continue;
2767 /* FALLTHRU */
2768 case GIMPLE_OMP_PARALLEL:
2769 case GIMPLE_OMP_TEAMS:
2770 error_at (gimple_location (stmt),
2771 "%<%s taskgroup%> construct not closely "
2772 "nested inside of %<taskgroup%> region",
2773 construct);
2774 return false;
2775 case GIMPLE_OMP_TASK:
2776 if (gimple_omp_task_taskloop_p (octx->stmt)
2777 && octx->outer
2778 && is_taskloop_ctx (octx->outer))
2779 {
2780 tree clauses
2781 = gimple_omp_for_clauses (octx->outer->stmt);
2782 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2783 break;
2784 }
2785 continue;
2786 default:
2787 continue;
2788 }
2789 break;
2790 }
2791 ctx->cancellable = true;
2792 }
2793 kind = "taskgroup";
2794 break;
2795 default:
2796 error_at (gimple_location (stmt), "invalid arguments");
2797 return false;
2798 }
2799 if (bad)
2800 {
2801 error_at (gimple_location (stmt),
2802 "%<%s %s%> construct not closely nested inside of %qs",
2803 construct, kind, bad);
2804 return false;
2805 }
2806 }
2807 /* FALLTHRU */
2808 case GIMPLE_OMP_SECTIONS:
2809 case GIMPLE_OMP_SINGLE:
2810 for (; ctx != NULL; ctx = ctx->outer)
2811 switch (gimple_code (ctx->stmt))
2812 {
2813 case GIMPLE_OMP_FOR:
2814 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2815 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2816 break;
2817 /* FALLTHRU */
2818 case GIMPLE_OMP_SECTIONS:
2819 case GIMPLE_OMP_SINGLE:
2820 case GIMPLE_OMP_ORDERED:
2821 case GIMPLE_OMP_MASTER:
2822 case GIMPLE_OMP_TASK:
2823 case GIMPLE_OMP_CRITICAL:
2824 if (is_gimple_call (stmt))
2825 {
2826 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2827 != BUILT_IN_GOMP_BARRIER)
2828 return true;
2829 error_at (gimple_location (stmt),
2830 "barrier region may not be closely nested inside "
2831 "of work-sharing, %<critical%>, %<ordered%>, "
2832 "%<master%>, explicit %<task%> or %<taskloop%> "
2833 "region");
2834 return false;
2835 }
2836 error_at (gimple_location (stmt),
2837 "work-sharing region may not be closely nested inside "
2838 "of work-sharing, %<critical%>, %<ordered%>, "
2839 "%<master%>, explicit %<task%> or %<taskloop%> region");
2840 return false;
2841 case GIMPLE_OMP_PARALLEL:
2842 case GIMPLE_OMP_TEAMS:
2843 return true;
2844 case GIMPLE_OMP_TARGET:
2845 if (gimple_omp_target_kind (ctx->stmt)
2846 == GF_OMP_TARGET_KIND_REGION)
2847 return true;
2848 break;
2849 default:
2850 break;
2851 }
2852 break;
2853 case GIMPLE_OMP_MASTER:
2854 for (; ctx != NULL; ctx = ctx->outer)
2855 switch (gimple_code (ctx->stmt))
2856 {
2857 case GIMPLE_OMP_FOR:
2858 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2859 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2860 break;
2861 /* FALLTHRU */
2862 case GIMPLE_OMP_SECTIONS:
2863 case GIMPLE_OMP_SINGLE:
2864 case GIMPLE_OMP_TASK:
2865 error_at (gimple_location (stmt),
2866 "%<master%> region may not be closely nested inside "
2867 "of work-sharing, explicit %<task%> or %<taskloop%> "
2868 "region");
2869 return false;
2870 case GIMPLE_OMP_PARALLEL:
2871 case GIMPLE_OMP_TEAMS:
2872 return true;
2873 case GIMPLE_OMP_TARGET:
2874 if (gimple_omp_target_kind (ctx->stmt)
2875 == GF_OMP_TARGET_KIND_REGION)
2876 return true;
2877 break;
2878 default:
2879 break;
2880 }
2881 break;
2882 case GIMPLE_OMP_TASK:
2883 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2884 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2885 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2886 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2887 {
2888 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2889 error_at (OMP_CLAUSE_LOCATION (c),
2890 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2891 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2892 return false;
2893 }
2894 break;
2895 case GIMPLE_OMP_ORDERED:
2896 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2897 c; c = OMP_CLAUSE_CHAIN (c))
2898 {
2899 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2900 {
2901 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2902 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2903 continue;
2904 }
2905 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2906 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2907 || kind == OMP_CLAUSE_DEPEND_SINK)
2908 {
2909 tree oclause;
2910 /* Look for containing ordered(N) loop. */
2911 if (ctx == NULL
2912 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2913 || (oclause
2914 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2915 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2916 {
2917 error_at (OMP_CLAUSE_LOCATION (c),
2918 "%<ordered%> construct with %<depend%> clause "
2919 "must be closely nested inside an %<ordered%> "
2920 "loop");
2921 return false;
2922 }
2923 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2924 {
2925 error_at (OMP_CLAUSE_LOCATION (c),
2926 "%<ordered%> construct with %<depend%> clause "
2927 "must be closely nested inside a loop with "
2928 "%<ordered%> clause with a parameter");
2929 return false;
2930 }
2931 }
2932 else
2933 {
2934 error_at (OMP_CLAUSE_LOCATION (c),
2935 "invalid depend kind in omp %<ordered%> %<depend%>");
2936 return false;
2937 }
2938 }
2939 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2940 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2941 {
2942 /* ordered simd must be closely nested inside of simd region,
2943 and simd region must not encounter constructs other than
2944 ordered simd, therefore ordered simd may be either orphaned,
2945 or ctx->stmt must be simd. The latter case is handled already
2946 earlier. */
2947 if (ctx != NULL)
2948 {
2949 error_at (gimple_location (stmt),
2950 "%<ordered%> %<simd%> must be closely nested inside "
2951 "%<simd%> region");
2952 return false;
2953 }
2954 }
2955 for (; ctx != NULL; ctx = ctx->outer)
2956 switch (gimple_code (ctx->stmt))
2957 {
2958 case GIMPLE_OMP_CRITICAL:
2959 case GIMPLE_OMP_TASK:
2960 case GIMPLE_OMP_ORDERED:
2961 ordered_in_taskloop:
2962 error_at (gimple_location (stmt),
2963 "%<ordered%> region may not be closely nested inside "
2964 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2965 "%<taskloop%> region");
2966 return false;
2967 case GIMPLE_OMP_FOR:
2968 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2969 goto ordered_in_taskloop;
2970 tree o;
2971 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2972 OMP_CLAUSE_ORDERED);
2973 if (o == NULL)
2974 {
2975 error_at (gimple_location (stmt),
2976 "%<ordered%> region must be closely nested inside "
2977 "a loop region with an %<ordered%> clause");
2978 return false;
2979 }
2980 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2981 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2982 {
2983 error_at (gimple_location (stmt),
2984 "%<ordered%> region without %<depend%> clause may "
2985 "not be closely nested inside a loop region with "
2986 "an %<ordered%> clause with a parameter");
2987 return false;
2988 }
2989 return true;
2990 case GIMPLE_OMP_TARGET:
2991 if (gimple_omp_target_kind (ctx->stmt)
2992 != GF_OMP_TARGET_KIND_REGION)
2993 break;
2994 /* FALLTHRU */
2995 case GIMPLE_OMP_PARALLEL:
2996 case GIMPLE_OMP_TEAMS:
2997 error_at (gimple_location (stmt),
2998 "%<ordered%> region must be closely nested inside "
2999 "a loop region with an %<ordered%> clause");
3000 return false;
3001 default:
3002 break;
3003 }
3004 break;
3005 case GIMPLE_OMP_CRITICAL:
3006 {
3007 tree this_stmt_name
3008 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3009 for (; ctx != NULL; ctx = ctx->outer)
3010 if (gomp_critical *other_crit
3011 = dyn_cast <gomp_critical *> (ctx->stmt))
3012 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3013 {
3014 error_at (gimple_location (stmt),
3015 "%<critical%> region may not be nested inside "
3016 "a %<critical%> region with the same name");
3017 return false;
3018 }
3019 }
3020 break;
3021 case GIMPLE_OMP_TEAMS:
3022 if (ctx == NULL)
3023 break;
3024 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3025 || (gimple_omp_target_kind (ctx->stmt)
3026 != GF_OMP_TARGET_KIND_REGION))
3027 {
3028 /* Teams construct can appear either strictly nested inside of
3029 target construct with no intervening stmts, or can be encountered
3030 only by initial task (so must not appear inside any OpenMP
3031 construct. */
3032 error_at (gimple_location (stmt),
3033 "%<teams%> construct must be closely nested inside of "
3034 "%<target%> construct or not nested in any OpenMP "
3035 "construct");
3036 return false;
3037 }
3038 break;
3039 case GIMPLE_OMP_TARGET:
3040 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3041 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3042 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3043 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3044 {
3045 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3046 error_at (OMP_CLAUSE_LOCATION (c),
3047 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3048 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3049 return false;
3050 }
3051 if (is_gimple_omp_offloaded (stmt)
3052 && oacc_get_fn_attrib (cfun->decl) != NULL)
3053 {
3054 error_at (gimple_location (stmt),
3055 "OpenACC region inside of OpenACC routine, nested "
3056 "parallelism not supported yet");
3057 return false;
3058 }
3059 for (; ctx != NULL; ctx = ctx->outer)
3060 {
3061 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3062 {
3063 if (is_gimple_omp (stmt)
3064 && is_gimple_omp_oacc (stmt)
3065 && is_gimple_omp (ctx->stmt))
3066 {
3067 error_at (gimple_location (stmt),
3068 "OpenACC construct inside of non-OpenACC region");
3069 return false;
3070 }
3071 continue;
3072 }
3073
3074 const char *stmt_name, *ctx_stmt_name;
3075 switch (gimple_omp_target_kind (stmt))
3076 {
3077 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3078 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3079 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3080 case GF_OMP_TARGET_KIND_ENTER_DATA:
3081 stmt_name = "target enter data"; break;
3082 case GF_OMP_TARGET_KIND_EXIT_DATA:
3083 stmt_name = "target exit data"; break;
3084 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3085 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3086 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3087 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3088 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3089 stmt_name = "enter/exit data"; break;
3090 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3091 break;
3092 default: gcc_unreachable ();
3093 }
3094 switch (gimple_omp_target_kind (ctx->stmt))
3095 {
3096 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3097 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3098 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3099 ctx_stmt_name = "parallel"; break;
3100 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3101 ctx_stmt_name = "kernels"; break;
3102 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3103 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3104 ctx_stmt_name = "host_data"; break;
3105 default: gcc_unreachable ();
3106 }
3107
3108 /* OpenACC/OpenMP mismatch? */
3109 if (is_gimple_omp_oacc (stmt)
3110 != is_gimple_omp_oacc (ctx->stmt))
3111 {
3112 error_at (gimple_location (stmt),
3113 "%s %qs construct inside of %s %qs region",
3114 (is_gimple_omp_oacc (stmt)
3115 ? "OpenACC" : "OpenMP"), stmt_name,
3116 (is_gimple_omp_oacc (ctx->stmt)
3117 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3118 return false;
3119 }
3120 if (is_gimple_omp_offloaded (ctx->stmt))
3121 {
3122 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3123 if (is_gimple_omp_oacc (ctx->stmt))
3124 {
3125 error_at (gimple_location (stmt),
3126 "%qs construct inside of %qs region",
3127 stmt_name, ctx_stmt_name);
3128 return false;
3129 }
3130 else
3131 {
3132 warning_at (gimple_location (stmt), 0,
3133 "%qs construct inside of %qs region",
3134 stmt_name, ctx_stmt_name);
3135 }
3136 }
3137 }
3138 break;
3139 default:
3140 break;
3141 }
3142 return true;
3143 }
3144
3145
3146 /* Helper function scan_omp.
3147
3148 Callback for walk_tree or operators in walk_gimple_stmt used to
3149 scan for OMP directives in TP. */
3150
3151 static tree
3152 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3153 {
3154 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3155 omp_context *ctx = (omp_context *) wi->info;
3156 tree t = *tp;
3157
3158 switch (TREE_CODE (t))
3159 {
3160 case VAR_DECL:
3161 case PARM_DECL:
3162 case LABEL_DECL:
3163 case RESULT_DECL:
3164 if (ctx)
3165 {
3166 tree repl = remap_decl (t, &ctx->cb);
3167 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3168 *tp = repl;
3169 }
3170 break;
3171
3172 default:
3173 if (ctx && TYPE_P (t))
3174 *tp = remap_type (t, &ctx->cb);
3175 else if (!DECL_P (t))
3176 {
3177 *walk_subtrees = 1;
3178 if (ctx)
3179 {
3180 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3181 if (tem != TREE_TYPE (t))
3182 {
3183 if (TREE_CODE (t) == INTEGER_CST)
3184 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3185 else
3186 TREE_TYPE (t) = tem;
3187 }
3188 }
3189 }
3190 break;
3191 }
3192
3193 return NULL_TREE;
3194 }
3195
3196 /* Return true if FNDECL is a setjmp or a longjmp. */
3197
3198 static bool
3199 setjmp_or_longjmp_p (const_tree fndecl)
3200 {
3201 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3202 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3203 return true;
3204
3205 tree declname = DECL_NAME (fndecl);
3206 if (!declname)
3207 return false;
3208 const char *name = IDENTIFIER_POINTER (declname);
3209 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3210 }
3211
3212
3213 /* Helper function for scan_omp.
3214
3215 Callback for walk_gimple_stmt used to scan for OMP directives in
3216 the current statement in GSI. */
3217
3218 static tree
3219 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3220 struct walk_stmt_info *wi)
3221 {
3222 gimple *stmt = gsi_stmt (*gsi);
3223 omp_context *ctx = (omp_context *) wi->info;
3224
3225 if (gimple_has_location (stmt))
3226 input_location = gimple_location (stmt);
3227
3228 /* Check the nesting restrictions. */
3229 bool remove = false;
3230 if (is_gimple_omp (stmt))
3231 remove = !check_omp_nesting_restrictions (stmt, ctx);
3232 else if (is_gimple_call (stmt))
3233 {
3234 tree fndecl = gimple_call_fndecl (stmt);
3235 if (fndecl)
3236 {
3237 if (setjmp_or_longjmp_p (fndecl)
3238 && ctx
3239 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3240 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3241 {
3242 remove = true;
3243 error_at (gimple_location (stmt),
3244 "setjmp/longjmp inside simd construct");
3245 }
3246 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3247 switch (DECL_FUNCTION_CODE (fndecl))
3248 {
3249 case BUILT_IN_GOMP_BARRIER:
3250 case BUILT_IN_GOMP_CANCEL:
3251 case BUILT_IN_GOMP_CANCELLATION_POINT:
3252 case BUILT_IN_GOMP_TASKYIELD:
3253 case BUILT_IN_GOMP_TASKWAIT:
3254 case BUILT_IN_GOMP_TASKGROUP_START:
3255 case BUILT_IN_GOMP_TASKGROUP_END:
3256 remove = !check_omp_nesting_restrictions (stmt, ctx);
3257 break;
3258 default:
3259 break;
3260 }
3261 }
3262 }
3263 if (remove)
3264 {
3265 stmt = gimple_build_nop ();
3266 gsi_replace (gsi, stmt, false);
3267 }
3268
3269 *handled_ops_p = true;
3270
3271 switch (gimple_code (stmt))
3272 {
3273 case GIMPLE_OMP_PARALLEL:
3274 taskreg_nesting_level++;
3275 scan_omp_parallel (gsi, ctx);
3276 taskreg_nesting_level--;
3277 break;
3278
3279 case GIMPLE_OMP_TASK:
3280 taskreg_nesting_level++;
3281 scan_omp_task (gsi, ctx);
3282 taskreg_nesting_level--;
3283 break;
3284
3285 case GIMPLE_OMP_FOR:
3286 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3287 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3288 && omp_maybe_offloaded_ctx (ctx)
3289 && omp_max_simt_vf ())
3290 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3291 else
3292 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3293 break;
3294
3295 case GIMPLE_OMP_SECTIONS:
3296 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3297 break;
3298
3299 case GIMPLE_OMP_SINGLE:
3300 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3301 break;
3302
3303 case GIMPLE_OMP_SECTION:
3304 case GIMPLE_OMP_MASTER:
3305 case GIMPLE_OMP_ORDERED:
3306 case GIMPLE_OMP_CRITICAL:
3307 case GIMPLE_OMP_GRID_BODY:
3308 ctx = new_omp_context (stmt, ctx);
3309 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3310 break;
3311
3312 case GIMPLE_OMP_TASKGROUP:
3313 ctx = new_omp_context (stmt, ctx);
3314 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3315 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3316 break;
3317
3318 case GIMPLE_OMP_TARGET:
3319 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3320 break;
3321
3322 case GIMPLE_OMP_TEAMS:
3323 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3324 {
3325 taskreg_nesting_level++;
3326 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3327 taskreg_nesting_level--;
3328 }
3329 else
3330 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3331 break;
3332
3333 case GIMPLE_BIND:
3334 {
3335 tree var;
3336
3337 *handled_ops_p = false;
3338 if (ctx)
3339 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3340 var ;
3341 var = DECL_CHAIN (var))
3342 insert_decl_map (&ctx->cb, var, var);
3343 }
3344 break;
3345 default:
3346 *handled_ops_p = false;
3347 break;
3348 }
3349
3350 return NULL_TREE;
3351 }
3352
3353
3354 /* Scan all the statements starting at the current statement. CTX
3355 contains context information about the OMP directives and
3356 clauses found during the scan. */
3357
3358 static void
3359 scan_omp (gimple_seq *body_p, omp_context *ctx)
3360 {
3361 location_t saved_location;
3362 struct walk_stmt_info wi;
3363
3364 memset (&wi, 0, sizeof (wi));
3365 wi.info = ctx;
3366 wi.want_locations = true;
3367
3368 saved_location = input_location;
3369 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3370 input_location = saved_location;
3371 }
3372 \f
3373 /* Re-gimplification and code generation routines. */
3374
3375 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3376 of BIND if in a method. */
3377
3378 static void
3379 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3380 {
3381 if (DECL_ARGUMENTS (current_function_decl)
3382 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3383 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3384 == POINTER_TYPE))
3385 {
3386 tree vars = gimple_bind_vars (bind);
3387 for (tree *pvar = &vars; *pvar; )
3388 if (omp_member_access_dummy_var (*pvar))
3389 *pvar = DECL_CHAIN (*pvar);
3390 else
3391 pvar = &DECL_CHAIN (*pvar);
3392 gimple_bind_set_vars (bind, vars);
3393 }
3394 }
3395
3396 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3397 block and its subblocks. */
3398
3399 static void
3400 remove_member_access_dummy_vars (tree block)
3401 {
3402 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3403 if (omp_member_access_dummy_var (*pvar))
3404 *pvar = DECL_CHAIN (*pvar);
3405 else
3406 pvar = &DECL_CHAIN (*pvar);
3407
3408 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3409 remove_member_access_dummy_vars (block);
3410 }
3411
3412 /* If a context was created for STMT when it was scanned, return it. */
3413
3414 static omp_context *
3415 maybe_lookup_ctx (gimple *stmt)
3416 {
3417 splay_tree_node n;
3418 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3419 return n ? (omp_context *) n->value : NULL;
3420 }
3421
3422
3423 /* Find the mapping for DECL in CTX or the immediately enclosing
3424 context that has a mapping for DECL.
3425
3426 If CTX is a nested parallel directive, we may have to use the decl
3427 mappings created in CTX's parent context. Suppose that we have the
3428 following parallel nesting (variable UIDs showed for clarity):
3429
3430 iD.1562 = 0;
3431 #omp parallel shared(iD.1562) -> outer parallel
3432 iD.1562 = iD.1562 + 1;
3433
3434 #omp parallel shared (iD.1562) -> inner parallel
3435 iD.1562 = iD.1562 - 1;
3436
3437 Each parallel structure will create a distinct .omp_data_s structure
3438 for copying iD.1562 in/out of the directive:
3439
3440 outer parallel .omp_data_s.1.i -> iD.1562
3441 inner parallel .omp_data_s.2.i -> iD.1562
3442
3443 A shared variable mapping will produce a copy-out operation before
3444 the parallel directive and a copy-in operation after it. So, in
3445 this case we would have:
3446
3447 iD.1562 = 0;
3448 .omp_data_o.1.i = iD.1562;
3449 #omp parallel shared(iD.1562) -> outer parallel
3450 .omp_data_i.1 = &.omp_data_o.1
3451 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3452
3453 .omp_data_o.2.i = iD.1562; -> **
3454 #omp parallel shared(iD.1562) -> inner parallel
3455 .omp_data_i.2 = &.omp_data_o.2
3456 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3457
3458
3459 ** This is a problem. The symbol iD.1562 cannot be referenced
3460 inside the body of the outer parallel region. But since we are
3461 emitting this copy operation while expanding the inner parallel
3462 directive, we need to access the CTX structure of the outer
3463 parallel directive to get the correct mapping:
3464
3465 .omp_data_o.2.i = .omp_data_i.1->i
3466
3467 Since there may be other workshare or parallel directives enclosing
3468 the parallel directive, it may be necessary to walk up the context
3469 parent chain. This is not a problem in general because nested
3470 parallelism happens only rarely. */
3471
3472 static tree
3473 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3474 {
3475 tree t;
3476 omp_context *up;
3477
3478 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3479 t = maybe_lookup_decl (decl, up);
3480
3481 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3482
3483 return t ? t : decl;
3484 }
3485
3486
3487 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3488 in outer contexts. */
3489
3490 static tree
3491 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3492 {
3493 tree t = NULL;
3494 omp_context *up;
3495
3496 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3497 t = maybe_lookup_decl (decl, up);
3498
3499 return t ? t : decl;
3500 }
3501
3502
3503 /* Construct the initialization value for reduction operation OP. */
3504
3505 tree
3506 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3507 {
3508 switch (op)
3509 {
3510 case PLUS_EXPR:
3511 case MINUS_EXPR:
3512 case BIT_IOR_EXPR:
3513 case BIT_XOR_EXPR:
3514 case TRUTH_OR_EXPR:
3515 case TRUTH_ORIF_EXPR:
3516 case TRUTH_XOR_EXPR:
3517 case NE_EXPR:
3518 return build_zero_cst (type);
3519
3520 case MULT_EXPR:
3521 case TRUTH_AND_EXPR:
3522 case TRUTH_ANDIF_EXPR:
3523 case EQ_EXPR:
3524 return fold_convert_loc (loc, type, integer_one_node);
3525
3526 case BIT_AND_EXPR:
3527 return fold_convert_loc (loc, type, integer_minus_one_node);
3528
3529 case MAX_EXPR:
3530 if (SCALAR_FLOAT_TYPE_P (type))
3531 {
3532 REAL_VALUE_TYPE max, min;
3533 if (HONOR_INFINITIES (type))
3534 {
3535 real_inf (&max);
3536 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3537 }
3538 else
3539 real_maxval (&min, 1, TYPE_MODE (type));
3540 return build_real (type, min);
3541 }
3542 else if (POINTER_TYPE_P (type))
3543 {
3544 wide_int min
3545 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3546 return wide_int_to_tree (type, min);
3547 }
3548 else
3549 {
3550 gcc_assert (INTEGRAL_TYPE_P (type));
3551 return TYPE_MIN_VALUE (type);
3552 }
3553
3554 case MIN_EXPR:
3555 if (SCALAR_FLOAT_TYPE_P (type))
3556 {
3557 REAL_VALUE_TYPE max;
3558 if (HONOR_INFINITIES (type))
3559 real_inf (&max);
3560 else
3561 real_maxval (&max, 0, TYPE_MODE (type));
3562 return build_real (type, max);
3563 }
3564 else if (POINTER_TYPE_P (type))
3565 {
3566 wide_int max
3567 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3568 return wide_int_to_tree (type, max);
3569 }
3570 else
3571 {
3572 gcc_assert (INTEGRAL_TYPE_P (type));
3573 return TYPE_MAX_VALUE (type);
3574 }
3575
3576 default:
3577 gcc_unreachable ();
3578 }
3579 }
3580
3581 /* Construct the initialization value for reduction CLAUSE. */
3582
3583 tree
3584 omp_reduction_init (tree clause, tree type)
3585 {
3586 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3587 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3588 }
3589
3590 /* Return alignment to be assumed for var in CLAUSE, which should be
3591 OMP_CLAUSE_ALIGNED. */
3592
3593 static tree
3594 omp_clause_aligned_alignment (tree clause)
3595 {
3596 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3597 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3598
3599 /* Otherwise return implementation defined alignment. */
3600 unsigned int al = 1;
3601 opt_scalar_mode mode_iter;
3602 auto_vector_sizes sizes;
3603 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3604 poly_uint64 vs = 0;
3605 for (unsigned int i = 0; i < sizes.length (); ++i)
3606 vs = ordered_max (vs, sizes[i]);
3607 static enum mode_class classes[]
3608 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3609 for (int i = 0; i < 4; i += 2)
3610 /* The for loop above dictates that we only walk through scalar classes. */
3611 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3612 {
3613 scalar_mode mode = mode_iter.require ();
3614 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3615 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3616 continue;
3617 while (maybe_ne (vs, 0U)
3618 && known_lt (GET_MODE_SIZE (vmode), vs)
3619 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3620 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3621
3622 tree type = lang_hooks.types.type_for_mode (mode, 1);
3623 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3624 continue;
3625 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3626 GET_MODE_SIZE (mode));
3627 type = build_vector_type (type, nelts);
3628 if (TYPE_MODE (type) != vmode)
3629 continue;
3630 if (TYPE_ALIGN_UNIT (type) > al)
3631 al = TYPE_ALIGN_UNIT (type);
3632 }
3633 return build_int_cst (integer_type_node, al);
3634 }
3635
3636
3637 /* This structure is part of the interface between lower_rec_simd_input_clauses
3638 and lower_rec_input_clauses. */
3639
3640 struct omplow_simd_context {
3641 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3642 tree idx;
3643 tree lane;
3644 vec<tree, va_heap> simt_eargs;
3645 gimple_seq simt_dlist;
3646 poly_uint64_pod max_vf;
3647 bool is_simt;
3648 };
3649
3650 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3651 privatization. */
3652
3653 static bool
3654 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3655 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3656 {
3657 if (known_eq (sctx->max_vf, 0U))
3658 {
3659 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3660 if (maybe_gt (sctx->max_vf, 1U))
3661 {
3662 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3663 OMP_CLAUSE_SAFELEN);
3664 if (c)
3665 {
3666 poly_uint64 safe_len;
3667 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3668 || maybe_lt (safe_len, 1U))
3669 sctx->max_vf = 1;
3670 else
3671 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3672 }
3673 }
3674 if (maybe_gt (sctx->max_vf, 1U))
3675 {
3676 sctx->idx = create_tmp_var (unsigned_type_node);
3677 sctx->lane = create_tmp_var (unsigned_type_node);
3678 }
3679 }
3680 if (known_eq (sctx->max_vf, 1U))
3681 return false;
3682
3683 if (sctx->is_simt)
3684 {
3685 if (is_gimple_reg (new_var))
3686 {
3687 ivar = lvar = new_var;
3688 return true;
3689 }
3690 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3691 ivar = lvar = create_tmp_var (type);
3692 TREE_ADDRESSABLE (ivar) = 1;
3693 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3694 NULL, DECL_ATTRIBUTES (ivar));
3695 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3696 tree clobber = build_constructor (type, NULL);
3697 TREE_THIS_VOLATILE (clobber) = 1;
3698 gimple *g = gimple_build_assign (ivar, clobber);
3699 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3700 }
3701 else
3702 {
3703 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3704 tree avar = create_tmp_var_raw (atype);
3705 if (TREE_ADDRESSABLE (new_var))
3706 TREE_ADDRESSABLE (avar) = 1;
3707 DECL_ATTRIBUTES (avar)
3708 = tree_cons (get_identifier ("omp simd array"), NULL,
3709 DECL_ATTRIBUTES (avar));
3710 gimple_add_tmp_var (avar);
3711 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3712 NULL_TREE, NULL_TREE);
3713 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3714 NULL_TREE, NULL_TREE);
3715 }
3716 if (DECL_P (new_var))
3717 {
3718 SET_DECL_VALUE_EXPR (new_var, lvar);
3719 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3720 }
3721 return true;
3722 }
3723
3724 /* Helper function of lower_rec_input_clauses. For a reference
3725 in simd reduction, add an underlying variable it will reference. */
3726
3727 static void
3728 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3729 {
3730 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3731 if (TREE_CONSTANT (z))
3732 {
3733 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3734 get_name (new_vard));
3735 gimple_add_tmp_var (z);
3736 TREE_ADDRESSABLE (z) = 1;
3737 z = build_fold_addr_expr_loc (loc, z);
3738 gimplify_assign (new_vard, z, ilist);
3739 }
3740 }
3741
3742 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3743 code to emit (type) (tskred_temp[idx]). */
3744
3745 static tree
3746 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3747 unsigned idx)
3748 {
3749 unsigned HOST_WIDE_INT sz
3750 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3751 tree r = build2 (MEM_REF, pointer_sized_int_node,
3752 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3753 idx * sz));
3754 tree v = create_tmp_var (pointer_sized_int_node);
3755 gimple *g = gimple_build_assign (v, r);
3756 gimple_seq_add_stmt (ilist, g);
3757 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3758 {
3759 v = create_tmp_var (type);
3760 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3761 gimple_seq_add_stmt (ilist, g);
3762 }
3763 return v;
3764 }
3765
3766 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3767 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3768 private variables. Initialization statements go in ILIST, while calls
3769 to destructors go in DLIST. */
3770
3771 static void
3772 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3773 omp_context *ctx, struct omp_for_data *fd)
3774 {
3775 tree c, dtor, copyin_seq, x, ptr;
3776 bool copyin_by_ref = false;
3777 bool lastprivate_firstprivate = false;
3778 bool reduction_omp_orig_ref = false;
3779 int pass;
3780 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3781 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3782 omplow_simd_context sctx = omplow_simd_context ();
3783 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3784 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3785 gimple_seq llist[3] = { };
3786
3787 copyin_seq = NULL;
3788 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3789
3790 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3791 with data sharing clauses referencing variable sized vars. That
3792 is unnecessarily hard to support and very unlikely to result in
3793 vectorized code anyway. */
3794 if (is_simd)
3795 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3796 switch (OMP_CLAUSE_CODE (c))
3797 {
3798 case OMP_CLAUSE_LINEAR:
3799 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3800 sctx.max_vf = 1;
3801 /* FALLTHRU */
3802 case OMP_CLAUSE_PRIVATE:
3803 case OMP_CLAUSE_FIRSTPRIVATE:
3804 case OMP_CLAUSE_LASTPRIVATE:
3805 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3806 sctx.max_vf = 1;
3807 break;
3808 case OMP_CLAUSE_REDUCTION:
3809 case OMP_CLAUSE_IN_REDUCTION:
3810 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3811 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3812 sctx.max_vf = 1;
3813 break;
3814 default:
3815 continue;
3816 }
3817
3818 /* Add a placeholder for simduid. */
3819 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3820 sctx.simt_eargs.safe_push (NULL_TREE);
3821
3822 unsigned task_reduction_cnt = 0;
3823 unsigned task_reduction_cntorig = 0;
3824 unsigned task_reduction_cnt_full = 0;
3825 unsigned task_reduction_cntorig_full = 0;
3826 unsigned task_reduction_other_cnt = 0;
3827 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3828 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3829 /* Do all the fixed sized types in the first pass, and the variable sized
3830 types in the second pass. This makes sure that the scalar arguments to
3831 the variable sized types are processed before we use them in the
3832 variable sized operations. For task reductions we use 4 passes, in the
3833 first two we ignore them, in the third one gather arguments for
3834 GOMP_task_reduction_remap call and in the last pass actually handle
3835 the task reductions. */
3836 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3837 ? 4 : 2); ++pass)
3838 {
3839 if (pass == 2 && task_reduction_cnt)
3840 {
3841 tskred_atype
3842 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3843 + task_reduction_cntorig);
3844 tskred_avar = create_tmp_var_raw (tskred_atype);
3845 gimple_add_tmp_var (tskred_avar);
3846 TREE_ADDRESSABLE (tskred_avar) = 1;
3847 task_reduction_cnt_full = task_reduction_cnt;
3848 task_reduction_cntorig_full = task_reduction_cntorig;
3849 }
3850 else if (pass == 3 && task_reduction_cnt)
3851 {
3852 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3853 gimple *g
3854 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3855 size_int (task_reduction_cntorig),
3856 build_fold_addr_expr (tskred_avar));
3857 gimple_seq_add_stmt (ilist, g);
3858 }
3859 if (pass == 3 && task_reduction_other_cnt)
3860 {
3861 /* For reduction clauses, build
3862 tskred_base = (void *) tskred_temp[2]
3863 + omp_get_thread_num () * tskred_temp[1]
3864 or if tskred_temp[1] is known to be constant, that constant
3865 directly. This is the start of the private reduction copy block
3866 for the current thread. */
3867 tree v = create_tmp_var (integer_type_node);
3868 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3869 gimple *g = gimple_build_call (x, 0);
3870 gimple_call_set_lhs (g, v);
3871 gimple_seq_add_stmt (ilist, g);
3872 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3873 tskred_temp = OMP_CLAUSE_DECL (c);
3874 if (is_taskreg_ctx (ctx))
3875 tskred_temp = lookup_decl (tskred_temp, ctx);
3876 tree v2 = create_tmp_var (sizetype);
3877 g = gimple_build_assign (v2, NOP_EXPR, v);
3878 gimple_seq_add_stmt (ilist, g);
3879 if (ctx->task_reductions[0])
3880 v = fold_convert (sizetype, ctx->task_reductions[0]);
3881 else
3882 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3883 tree v3 = create_tmp_var (sizetype);
3884 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3885 gimple_seq_add_stmt (ilist, g);
3886 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3887 tskred_base = create_tmp_var (ptr_type_node);
3888 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3889 gimple_seq_add_stmt (ilist, g);
3890 }
3891 task_reduction_cnt = 0;
3892 task_reduction_cntorig = 0;
3893 task_reduction_other_cnt = 0;
3894 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3895 {
3896 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3897 tree var, new_var;
3898 bool by_ref;
3899 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3900 bool task_reduction_p = false;
3901 bool task_reduction_needs_orig_p = false;
3902 tree cond = NULL_TREE;
3903
3904 switch (c_kind)
3905 {
3906 case OMP_CLAUSE_PRIVATE:
3907 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3908 continue;
3909 break;
3910 case OMP_CLAUSE_SHARED:
3911 /* Ignore shared directives in teams construct inside
3912 of target construct. */
3913 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
3914 && !is_host_teams_ctx (ctx))
3915 continue;
3916 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3917 {
3918 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3919 || is_global_var (OMP_CLAUSE_DECL (c)));
3920 continue;
3921 }
3922 case OMP_CLAUSE_FIRSTPRIVATE:
3923 case OMP_CLAUSE_COPYIN:
3924 break;
3925 case OMP_CLAUSE_LINEAR:
3926 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3927 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3928 lastprivate_firstprivate = true;
3929 break;
3930 case OMP_CLAUSE_REDUCTION:
3931 case OMP_CLAUSE_IN_REDUCTION:
3932 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
3933 {
3934 task_reduction_p = true;
3935 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
3936 {
3937 task_reduction_other_cnt++;
3938 if (pass == 2)
3939 continue;
3940 }
3941 else
3942 task_reduction_cnt++;
3943 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3944 {
3945 var = OMP_CLAUSE_DECL (c);
3946 /* If var is a global variable that isn't privatized
3947 in outer contexts, we don't need to look up the
3948 original address, it is always the address of the
3949 global variable itself. */
3950 if (!DECL_P (var)
3951 || omp_is_reference (var)
3952 || !is_global_var
3953 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
3954 {
3955 task_reduction_needs_orig_p = true;
3956 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
3957 task_reduction_cntorig++;
3958 }
3959 }
3960 }
3961 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3962 reduction_omp_orig_ref = true;
3963 break;
3964 case OMP_CLAUSE__REDUCTEMP_:
3965 if (!is_taskreg_ctx (ctx))
3966 continue;
3967 /* FALLTHRU */
3968 case OMP_CLAUSE__LOOPTEMP_:
3969 /* Handle _looptemp_/_reductemp_ clauses only on
3970 parallel/task. */
3971 if (fd)
3972 continue;
3973 break;
3974 case OMP_CLAUSE_LASTPRIVATE:
3975 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3976 {
3977 lastprivate_firstprivate = true;
3978 if (pass != 0 || is_taskloop_ctx (ctx))
3979 continue;
3980 }
3981 /* Even without corresponding firstprivate, if
3982 decl is Fortran allocatable, it needs outer var
3983 reference. */
3984 else if (pass == 0
3985 && lang_hooks.decls.omp_private_outer_ref
3986 (OMP_CLAUSE_DECL (c)))
3987 lastprivate_firstprivate = true;
3988 break;
3989 case OMP_CLAUSE_ALIGNED:
3990 if (pass != 1)
3991 continue;
3992 var = OMP_CLAUSE_DECL (c);
3993 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3994 && !is_global_var (var))
3995 {
3996 new_var = maybe_lookup_decl (var, ctx);
3997 if (new_var == NULL_TREE)
3998 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3999 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4000 tree alarg = omp_clause_aligned_alignment (c);
4001 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4002 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4003 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4004 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4005 gimplify_and_add (x, ilist);
4006 }
4007 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4008 && is_global_var (var))
4009 {
4010 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4011 new_var = lookup_decl (var, ctx);
4012 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4013 t = build_fold_addr_expr_loc (clause_loc, t);
4014 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4015 tree alarg = omp_clause_aligned_alignment (c);
4016 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4017 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4018 t = fold_convert_loc (clause_loc, ptype, t);
4019 x = create_tmp_var (ptype);
4020 t = build2 (MODIFY_EXPR, ptype, x, t);
4021 gimplify_and_add (t, ilist);
4022 t = build_simple_mem_ref_loc (clause_loc, x);
4023 SET_DECL_VALUE_EXPR (new_var, t);
4024 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4025 }
4026 continue;
4027 default:
4028 continue;
4029 }
4030
4031 if (task_reduction_p != (pass >= 2))
4032 continue;
4033
4034 new_var = var = OMP_CLAUSE_DECL (c);
4035 if ((c_kind == OMP_CLAUSE_REDUCTION
4036 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4037 && TREE_CODE (var) == MEM_REF)
4038 {
4039 var = TREE_OPERAND (var, 0);
4040 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4041 var = TREE_OPERAND (var, 0);
4042 if (TREE_CODE (var) == INDIRECT_REF
4043 || TREE_CODE (var) == ADDR_EXPR)
4044 var = TREE_OPERAND (var, 0);
4045 if (is_variable_sized (var))
4046 {
4047 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4048 var = DECL_VALUE_EXPR (var);
4049 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4050 var = TREE_OPERAND (var, 0);
4051 gcc_assert (DECL_P (var));
4052 }
4053 new_var = var;
4054 }
4055 if (c_kind != OMP_CLAUSE_COPYIN)
4056 new_var = lookup_decl (var, ctx);
4057
4058 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4059 {
4060 if (pass != 0)
4061 continue;
4062 }
4063 /* C/C++ array section reductions. */
4064 else if ((c_kind == OMP_CLAUSE_REDUCTION
4065 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4066 && var != OMP_CLAUSE_DECL (c))
4067 {
4068 if (pass == 0)
4069 continue;
4070
4071 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4072 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4073
4074 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4075 {
4076 tree b = TREE_OPERAND (orig_var, 1);
4077 b = maybe_lookup_decl (b, ctx);
4078 if (b == NULL)
4079 {
4080 b = TREE_OPERAND (orig_var, 1);
4081 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4082 }
4083 if (integer_zerop (bias))
4084 bias = b;
4085 else
4086 {
4087 bias = fold_convert_loc (clause_loc,
4088 TREE_TYPE (b), bias);
4089 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4090 TREE_TYPE (b), b, bias);
4091 }
4092 orig_var = TREE_OPERAND (orig_var, 0);
4093 }
4094 if (pass == 2)
4095 {
4096 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4097 if (is_global_var (out)
4098 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4099 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4100 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4101 != POINTER_TYPE)))
4102 x = var;
4103 else
4104 {
4105 bool by_ref = use_pointer_for_field (var, NULL);
4106 x = build_receiver_ref (var, by_ref, ctx);
4107 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4108 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4109 == POINTER_TYPE))
4110 x = build_fold_addr_expr (x);
4111 }
4112 if (TREE_CODE (orig_var) == INDIRECT_REF)
4113 x = build_simple_mem_ref (x);
4114 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4115 {
4116 if (var == TREE_OPERAND (orig_var, 0))
4117 x = build_fold_addr_expr (x);
4118 }
4119 bias = fold_convert (sizetype, bias);
4120 x = fold_convert (ptr_type_node, x);
4121 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4122 TREE_TYPE (x), x, bias);
4123 unsigned cnt = task_reduction_cnt - 1;
4124 if (!task_reduction_needs_orig_p)
4125 cnt += (task_reduction_cntorig_full
4126 - task_reduction_cntorig);
4127 else
4128 cnt = task_reduction_cntorig - 1;
4129 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4130 size_int (cnt), NULL_TREE, NULL_TREE);
4131 gimplify_assign (r, x, ilist);
4132 continue;
4133 }
4134
4135 if (TREE_CODE (orig_var) == INDIRECT_REF
4136 || TREE_CODE (orig_var) == ADDR_EXPR)
4137 orig_var = TREE_OPERAND (orig_var, 0);
4138 tree d = OMP_CLAUSE_DECL (c);
4139 tree type = TREE_TYPE (d);
4140 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4141 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4142 const char *name = get_name (orig_var);
4143 if (pass == 3)
4144 {
4145 tree xv = create_tmp_var (ptr_type_node);
4146 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4147 {
4148 unsigned cnt = task_reduction_cnt - 1;
4149 if (!task_reduction_needs_orig_p)
4150 cnt += (task_reduction_cntorig_full
4151 - task_reduction_cntorig);
4152 else
4153 cnt = task_reduction_cntorig - 1;
4154 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4155 size_int (cnt), NULL_TREE, NULL_TREE);
4156
4157 gimple *g = gimple_build_assign (xv, x);
4158 gimple_seq_add_stmt (ilist, g);
4159 }
4160 else
4161 {
4162 unsigned int idx = *ctx->task_reduction_map->get (c);
4163 tree off;
4164 if (ctx->task_reductions[1 + idx])
4165 off = fold_convert (sizetype,
4166 ctx->task_reductions[1 + idx]);
4167 else
4168 off = task_reduction_read (ilist, tskred_temp, sizetype,
4169 7 + 3 * idx + 1);
4170 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4171 tskred_base, off);
4172 gimple_seq_add_stmt (ilist, g);
4173 }
4174 x = fold_convert (build_pointer_type (boolean_type_node),
4175 xv);
4176 if (TREE_CONSTANT (v))
4177 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4178 TYPE_SIZE_UNIT (type));
4179 else
4180 {
4181 tree t = maybe_lookup_decl (v, ctx);
4182 if (t)
4183 v = t;
4184 else
4185 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4186 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4187 fb_rvalue);
4188 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4189 TREE_TYPE (v), v,
4190 build_int_cst (TREE_TYPE (v), 1));
4191 t = fold_build2_loc (clause_loc, MULT_EXPR,
4192 TREE_TYPE (v), t,
4193 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4194 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4195 }
4196 cond = create_tmp_var (TREE_TYPE (x));
4197 gimplify_assign (cond, x, ilist);
4198 x = xv;
4199 }
4200 else if (TREE_CONSTANT (v))
4201 {
4202 x = create_tmp_var_raw (type, name);
4203 gimple_add_tmp_var (x);
4204 TREE_ADDRESSABLE (x) = 1;
4205 x = build_fold_addr_expr_loc (clause_loc, x);
4206 }
4207 else
4208 {
4209 tree atmp
4210 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4211 tree t = maybe_lookup_decl (v, ctx);
4212 if (t)
4213 v = t;
4214 else
4215 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4216 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4217 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4218 TREE_TYPE (v), v,
4219 build_int_cst (TREE_TYPE (v), 1));
4220 t = fold_build2_loc (clause_loc, MULT_EXPR,
4221 TREE_TYPE (v), t,
4222 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4223 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4224 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4225 }
4226
4227 tree ptype = build_pointer_type (TREE_TYPE (type));
4228 x = fold_convert_loc (clause_loc, ptype, x);
4229 tree y = create_tmp_var (ptype, name);
4230 gimplify_assign (y, x, ilist);
4231 x = y;
4232 tree yb = y;
4233
4234 if (!integer_zerop (bias))
4235 {
4236 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4237 bias);
4238 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4239 x);
4240 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4241 pointer_sized_int_node, yb, bias);
4242 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4243 yb = create_tmp_var (ptype, name);
4244 gimplify_assign (yb, x, ilist);
4245 x = yb;
4246 }
4247
4248 d = TREE_OPERAND (d, 0);
4249 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4250 d = TREE_OPERAND (d, 0);
4251 if (TREE_CODE (d) == ADDR_EXPR)
4252 {
4253 if (orig_var != var)
4254 {
4255 gcc_assert (is_variable_sized (orig_var));
4256 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4257 x);
4258 gimplify_assign (new_var, x, ilist);
4259 tree new_orig_var = lookup_decl (orig_var, ctx);
4260 tree t = build_fold_indirect_ref (new_var);
4261 DECL_IGNORED_P (new_var) = 0;
4262 TREE_THIS_NOTRAP (t) = 1;
4263 SET_DECL_VALUE_EXPR (new_orig_var, t);
4264 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4265 }
4266 else
4267 {
4268 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4269 build_int_cst (ptype, 0));
4270 SET_DECL_VALUE_EXPR (new_var, x);
4271 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4272 }
4273 }
4274 else
4275 {
4276 gcc_assert (orig_var == var);
4277 if (TREE_CODE (d) == INDIRECT_REF)
4278 {
4279 x = create_tmp_var (ptype, name);
4280 TREE_ADDRESSABLE (x) = 1;
4281 gimplify_assign (x, yb, ilist);
4282 x = build_fold_addr_expr_loc (clause_loc, x);
4283 }
4284 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4285 gimplify_assign (new_var, x, ilist);
4286 }
4287 /* GOMP_taskgroup_reduction_register memsets the whole
4288 array to zero. If the initializer is zero, we don't
4289 need to initialize it again, just mark it as ever
4290 used unconditionally, i.e. cond = true. */
4291 if (cond
4292 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4293 && initializer_zerop (omp_reduction_init (c,
4294 TREE_TYPE (type))))
4295 {
4296 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4297 boolean_true_node);
4298 gimple_seq_add_stmt (ilist, g);
4299 continue;
4300 }
4301 tree end = create_artificial_label (UNKNOWN_LOCATION);
4302 if (cond)
4303 {
4304 gimple *g;
4305 if (!is_parallel_ctx (ctx))
4306 {
4307 tree condv = create_tmp_var (boolean_type_node);
4308 g = gimple_build_assign (condv,
4309 build_simple_mem_ref (cond));
4310 gimple_seq_add_stmt (ilist, g);
4311 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4312 g = gimple_build_cond (NE_EXPR, condv,
4313 boolean_false_node, end, lab1);
4314 gimple_seq_add_stmt (ilist, g);
4315 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4316 }
4317 g = gimple_build_assign (build_simple_mem_ref (cond),
4318 boolean_true_node);
4319 gimple_seq_add_stmt (ilist, g);
4320 }
4321
4322 tree y1 = create_tmp_var (ptype);
4323 gimplify_assign (y1, y, ilist);
4324 tree i2 = NULL_TREE, y2 = NULL_TREE;
4325 tree body2 = NULL_TREE, end2 = NULL_TREE;
4326 tree y3 = NULL_TREE, y4 = NULL_TREE;
4327 if (task_reduction_needs_orig_p)
4328 {
4329 y3 = create_tmp_var (ptype);
4330 tree ref;
4331 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4332 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4333 size_int (task_reduction_cnt_full
4334 + task_reduction_cntorig - 1),
4335 NULL_TREE, NULL_TREE);
4336 else
4337 {
4338 unsigned int idx = *ctx->task_reduction_map->get (c);
4339 ref = task_reduction_read (ilist, tskred_temp, ptype,
4340 7 + 3 * idx);
4341 }
4342 gimplify_assign (y3, ref, ilist);
4343 }
4344 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4345 {
4346 if (pass != 3)
4347 {
4348 y2 = create_tmp_var (ptype);
4349 gimplify_assign (y2, y, ilist);
4350 }
4351 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4352 {
4353 tree ref = build_outer_var_ref (var, ctx);
4354 /* For ref build_outer_var_ref already performs this. */
4355 if (TREE_CODE (d) == INDIRECT_REF)
4356 gcc_assert (omp_is_reference (var));
4357 else if (TREE_CODE (d) == ADDR_EXPR)
4358 ref = build_fold_addr_expr (ref);
4359 else if (omp_is_reference (var))
4360 ref = build_fold_addr_expr (ref);
4361 ref = fold_convert_loc (clause_loc, ptype, ref);
4362 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4363 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4364 {
4365 y3 = create_tmp_var (ptype);
4366 gimplify_assign (y3, unshare_expr (ref), ilist);
4367 }
4368 if (is_simd)
4369 {
4370 y4 = create_tmp_var (ptype);
4371 gimplify_assign (y4, ref, dlist);
4372 }
4373 }
4374 }
4375 tree i = create_tmp_var (TREE_TYPE (v));
4376 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4377 tree body = create_artificial_label (UNKNOWN_LOCATION);
4378 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4379 if (y2)
4380 {
4381 i2 = create_tmp_var (TREE_TYPE (v));
4382 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4383 body2 = create_artificial_label (UNKNOWN_LOCATION);
4384 end2 = create_artificial_label (UNKNOWN_LOCATION);
4385 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4386 }
4387 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4388 {
4389 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4390 tree decl_placeholder
4391 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4392 SET_DECL_VALUE_EXPR (decl_placeholder,
4393 build_simple_mem_ref (y1));
4394 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4395 SET_DECL_VALUE_EXPR (placeholder,
4396 y3 ? build_simple_mem_ref (y3)
4397 : error_mark_node);
4398 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4399 x = lang_hooks.decls.omp_clause_default_ctor
4400 (c, build_simple_mem_ref (y1),
4401 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4402 if (x)
4403 gimplify_and_add (x, ilist);
4404 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4405 {
4406 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4407 lower_omp (&tseq, ctx);
4408 gimple_seq_add_seq (ilist, tseq);
4409 }
4410 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4411 if (is_simd)
4412 {
4413 SET_DECL_VALUE_EXPR (decl_placeholder,
4414 build_simple_mem_ref (y2));
4415 SET_DECL_VALUE_EXPR (placeholder,
4416 build_simple_mem_ref (y4));
4417 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4418 lower_omp (&tseq, ctx);
4419 gimple_seq_add_seq (dlist, tseq);
4420 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4421 }
4422 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4423 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4424 if (y2)
4425 {
4426 x = lang_hooks.decls.omp_clause_dtor
4427 (c, build_simple_mem_ref (y2));
4428 if (x)
4429 {
4430 gimple_seq tseq = NULL;
4431 dtor = x;
4432 gimplify_stmt (&dtor, &tseq);
4433 gimple_seq_add_seq (dlist, tseq);
4434 }
4435 }
4436 }
4437 else
4438 {
4439 x = omp_reduction_init (c, TREE_TYPE (type));
4440 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4441
4442 /* reduction(-:var) sums up the partial results, so it
4443 acts identically to reduction(+:var). */
4444 if (code == MINUS_EXPR)
4445 code = PLUS_EXPR;
4446
4447 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4448 if (is_simd)
4449 {
4450 x = build2 (code, TREE_TYPE (type),
4451 build_simple_mem_ref (y4),
4452 build_simple_mem_ref (y2));
4453 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4454 }
4455 }
4456 gimple *g
4457 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4458 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4459 gimple_seq_add_stmt (ilist, g);
4460 if (y3)
4461 {
4462 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4463 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4464 gimple_seq_add_stmt (ilist, g);
4465 }
4466 g = gimple_build_assign (i, PLUS_EXPR, i,
4467 build_int_cst (TREE_TYPE (i), 1));
4468 gimple_seq_add_stmt (ilist, g);
4469 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4470 gimple_seq_add_stmt (ilist, g);
4471 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4472 if (y2)
4473 {
4474 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4475 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4476 gimple_seq_add_stmt (dlist, g);
4477 if (y4)
4478 {
4479 g = gimple_build_assign
4480 (y4, POINTER_PLUS_EXPR, y4,
4481 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4482 gimple_seq_add_stmt (dlist, g);
4483 }
4484 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4485 build_int_cst (TREE_TYPE (i2), 1));
4486 gimple_seq_add_stmt (dlist, g);
4487 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4488 gimple_seq_add_stmt (dlist, g);
4489 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4490 }
4491 continue;
4492 }
4493 else if (pass == 2)
4494 {
4495 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4496 x = var;
4497 else
4498 {
4499 bool by_ref = use_pointer_for_field (var, ctx);
4500 x = build_receiver_ref (var, by_ref, ctx);
4501 }
4502 if (!omp_is_reference (var))
4503 x = build_fold_addr_expr (x);
4504 x = fold_convert (ptr_type_node, x);
4505 unsigned cnt = task_reduction_cnt - 1;
4506 if (!task_reduction_needs_orig_p)
4507 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4508 else
4509 cnt = task_reduction_cntorig - 1;
4510 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4511 size_int (cnt), NULL_TREE, NULL_TREE);
4512 gimplify_assign (r, x, ilist);
4513 continue;
4514 }
4515 else if (pass == 3)
4516 {
4517 tree type = TREE_TYPE (new_var);
4518 if (!omp_is_reference (var))
4519 type = build_pointer_type (type);
4520 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4521 {
4522 unsigned cnt = task_reduction_cnt - 1;
4523 if (!task_reduction_needs_orig_p)
4524 cnt += (task_reduction_cntorig_full
4525 - task_reduction_cntorig);
4526 else
4527 cnt = task_reduction_cntorig - 1;
4528 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4529 size_int (cnt), NULL_TREE, NULL_TREE);
4530 }
4531 else
4532 {
4533 unsigned int idx = *ctx->task_reduction_map->get (c);
4534 tree off;
4535 if (ctx->task_reductions[1 + idx])
4536 off = fold_convert (sizetype,
4537 ctx->task_reductions[1 + idx]);
4538 else
4539 off = task_reduction_read (ilist, tskred_temp, sizetype,
4540 7 + 3 * idx + 1);
4541 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4542 tskred_base, off);
4543 }
4544 x = fold_convert (type, x);
4545 tree t;
4546 if (omp_is_reference (var))
4547 {
4548 gimplify_assign (new_var, x, ilist);
4549 t = new_var;
4550 new_var = build_simple_mem_ref (new_var);
4551 }
4552 else
4553 {
4554 t = create_tmp_var (type);
4555 gimplify_assign (t, x, ilist);
4556 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4557 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4558 }
4559 t = fold_convert (build_pointer_type (boolean_type_node), t);
4560 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4561 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4562 cond = create_tmp_var (TREE_TYPE (t));
4563 gimplify_assign (cond, t, ilist);
4564 }
4565 else if (is_variable_sized (var))
4566 {
4567 /* For variable sized types, we need to allocate the
4568 actual storage here. Call alloca and store the
4569 result in the pointer decl that we created elsewhere. */
4570 if (pass == 0)
4571 continue;
4572
4573 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4574 {
4575 gcall *stmt;
4576 tree tmp, atmp;
4577
4578 ptr = DECL_VALUE_EXPR (new_var);
4579 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4580 ptr = TREE_OPERAND (ptr, 0);
4581 gcc_assert (DECL_P (ptr));
4582 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4583
4584 /* void *tmp = __builtin_alloca */
4585 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4586 stmt = gimple_build_call (atmp, 2, x,
4587 size_int (DECL_ALIGN (var)));
4588 tmp = create_tmp_var_raw (ptr_type_node);
4589 gimple_add_tmp_var (tmp);
4590 gimple_call_set_lhs (stmt, tmp);
4591
4592 gimple_seq_add_stmt (ilist, stmt);
4593
4594 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4595 gimplify_assign (ptr, x, ilist);
4596 }
4597 }
4598 else if (omp_is_reference (var)
4599 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4600 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4601 {
4602 /* For references that are being privatized for Fortran,
4603 allocate new backing storage for the new pointer
4604 variable. This allows us to avoid changing all the
4605 code that expects a pointer to something that expects
4606 a direct variable. */
4607 if (pass == 0)
4608 continue;
4609
4610 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4611 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4612 {
4613 x = build_receiver_ref (var, false, ctx);
4614 x = build_fold_addr_expr_loc (clause_loc, x);
4615 }
4616 else if (TREE_CONSTANT (x))
4617 {
4618 /* For reduction in SIMD loop, defer adding the
4619 initialization of the reference, because if we decide
4620 to use SIMD array for it, the initilization could cause
4621 expansion ICE. */
4622 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4623 x = NULL_TREE;
4624 else
4625 {
4626 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4627 get_name (var));
4628 gimple_add_tmp_var (x);
4629 TREE_ADDRESSABLE (x) = 1;
4630 x = build_fold_addr_expr_loc (clause_loc, x);
4631 }
4632 }
4633 else
4634 {
4635 tree atmp
4636 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4637 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4638 tree al = size_int (TYPE_ALIGN (rtype));
4639 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4640 }
4641
4642 if (x)
4643 {
4644 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4645 gimplify_assign (new_var, x, ilist);
4646 }
4647
4648 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4649 }
4650 else if ((c_kind == OMP_CLAUSE_REDUCTION
4651 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4652 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4653 {
4654 if (pass == 0)
4655 continue;
4656 }
4657 else if (pass != 0)
4658 continue;
4659
4660 switch (OMP_CLAUSE_CODE (c))
4661 {
4662 case OMP_CLAUSE_SHARED:
4663 /* Ignore shared directives in teams construct inside
4664 target construct. */
4665 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4666 && !is_host_teams_ctx (ctx))
4667 continue;
4668 /* Shared global vars are just accessed directly. */
4669 if (is_global_var (new_var))
4670 break;
4671 /* For taskloop firstprivate/lastprivate, represented
4672 as firstprivate and shared clause on the task, new_var
4673 is the firstprivate var. */
4674 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4675 break;
4676 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4677 needs to be delayed until after fixup_child_record_type so
4678 that we get the correct type during the dereference. */
4679 by_ref = use_pointer_for_field (var, ctx);
4680 x = build_receiver_ref (var, by_ref, ctx);
4681 SET_DECL_VALUE_EXPR (new_var, x);
4682 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4683
4684 /* ??? If VAR is not passed by reference, and the variable
4685 hasn't been initialized yet, then we'll get a warning for
4686 the store into the omp_data_s structure. Ideally, we'd be
4687 able to notice this and not store anything at all, but
4688 we're generating code too early. Suppress the warning. */
4689 if (!by_ref)
4690 TREE_NO_WARNING (var) = 1;
4691 break;
4692
4693 case OMP_CLAUSE_LASTPRIVATE:
4694 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4695 break;
4696 /* FALLTHRU */
4697
4698 case OMP_CLAUSE_PRIVATE:
4699 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4700 x = build_outer_var_ref (var, ctx);
4701 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4702 {
4703 if (is_task_ctx (ctx))
4704 x = build_receiver_ref (var, false, ctx);
4705 else
4706 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4707 }
4708 else
4709 x = NULL;
4710 do_private:
4711 tree nx;
4712 nx = lang_hooks.decls.omp_clause_default_ctor
4713 (c, unshare_expr (new_var), x);
4714 if (is_simd)
4715 {
4716 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4717 if ((TREE_ADDRESSABLE (new_var) || nx || y
4718 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4719 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4720 ivar, lvar))
4721 {
4722 if (nx)
4723 x = lang_hooks.decls.omp_clause_default_ctor
4724 (c, unshare_expr (ivar), x);
4725 if (nx && x)
4726 gimplify_and_add (x, &llist[0]);
4727 if (y)
4728 {
4729 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4730 if (y)
4731 {
4732 gimple_seq tseq = NULL;
4733
4734 dtor = y;
4735 gimplify_stmt (&dtor, &tseq);
4736 gimple_seq_add_seq (&llist[1], tseq);
4737 }
4738 }
4739 break;
4740 }
4741 }
4742 if (nx)
4743 gimplify_and_add (nx, ilist);
4744 /* FALLTHRU */
4745
4746 do_dtor:
4747 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4748 if (x)
4749 {
4750 gimple_seq tseq = NULL;
4751
4752 dtor = x;
4753 gimplify_stmt (&dtor, &tseq);
4754 gimple_seq_add_seq (dlist, tseq);
4755 }
4756 break;
4757
4758 case OMP_CLAUSE_LINEAR:
4759 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4760 goto do_firstprivate;
4761 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4762 x = NULL;
4763 else
4764 x = build_outer_var_ref (var, ctx);
4765 goto do_private;
4766
4767 case OMP_CLAUSE_FIRSTPRIVATE:
4768 if (is_task_ctx (ctx))
4769 {
4770 if ((omp_is_reference (var)
4771 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4772 || is_variable_sized (var))
4773 goto do_dtor;
4774 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4775 ctx))
4776 || use_pointer_for_field (var, NULL))
4777 {
4778 x = build_receiver_ref (var, false, ctx);
4779 SET_DECL_VALUE_EXPR (new_var, x);
4780 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4781 goto do_dtor;
4782 }
4783 }
4784 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4785 && omp_is_reference (var))
4786 {
4787 x = build_outer_var_ref (var, ctx);
4788 gcc_assert (TREE_CODE (x) == MEM_REF
4789 && integer_zerop (TREE_OPERAND (x, 1)));
4790 x = TREE_OPERAND (x, 0);
4791 x = lang_hooks.decls.omp_clause_copy_ctor
4792 (c, unshare_expr (new_var), x);
4793 gimplify_and_add (x, ilist);
4794 goto do_dtor;
4795 }
4796 do_firstprivate:
4797 x = build_outer_var_ref (var, ctx);
4798 if (is_simd)
4799 {
4800 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4801 && gimple_omp_for_combined_into_p (ctx->stmt))
4802 {
4803 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4804 tree stept = TREE_TYPE (t);
4805 tree ct = omp_find_clause (clauses,
4806 OMP_CLAUSE__LOOPTEMP_);
4807 gcc_assert (ct);
4808 tree l = OMP_CLAUSE_DECL (ct);
4809 tree n1 = fd->loop.n1;
4810 tree step = fd->loop.step;
4811 tree itype = TREE_TYPE (l);
4812 if (POINTER_TYPE_P (itype))
4813 itype = signed_type_for (itype);
4814 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4815 if (TYPE_UNSIGNED (itype)
4816 && fd->loop.cond_code == GT_EXPR)
4817 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4818 fold_build1 (NEGATE_EXPR, itype, l),
4819 fold_build1 (NEGATE_EXPR,
4820 itype, step));
4821 else
4822 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4823 t = fold_build2 (MULT_EXPR, stept,
4824 fold_convert (stept, l), t);
4825
4826 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4827 {
4828 x = lang_hooks.decls.omp_clause_linear_ctor
4829 (c, new_var, x, t);
4830 gimplify_and_add (x, ilist);
4831 goto do_dtor;
4832 }
4833
4834 if (POINTER_TYPE_P (TREE_TYPE (x)))
4835 x = fold_build2 (POINTER_PLUS_EXPR,
4836 TREE_TYPE (x), x, t);
4837 else
4838 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4839 }
4840
4841 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4842 || TREE_ADDRESSABLE (new_var))
4843 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4844 ivar, lvar))
4845 {
4846 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4847 {
4848 tree iv = create_tmp_var (TREE_TYPE (new_var));
4849 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4850 gimplify_and_add (x, ilist);
4851 gimple_stmt_iterator gsi
4852 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4853 gassign *g
4854 = gimple_build_assign (unshare_expr (lvar), iv);
4855 gsi_insert_before_without_update (&gsi, g,
4856 GSI_SAME_STMT);
4857 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4858 enum tree_code code = PLUS_EXPR;
4859 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4860 code = POINTER_PLUS_EXPR;
4861 g = gimple_build_assign (iv, code, iv, t);
4862 gsi_insert_before_without_update (&gsi, g,
4863 GSI_SAME_STMT);
4864 break;
4865 }
4866 x = lang_hooks.decls.omp_clause_copy_ctor
4867 (c, unshare_expr (ivar), x);
4868 gimplify_and_add (x, &llist[0]);
4869 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4870 if (x)
4871 {
4872 gimple_seq tseq = NULL;
4873
4874 dtor = x;
4875 gimplify_stmt (&dtor, &tseq);
4876 gimple_seq_add_seq (&llist[1], tseq);
4877 }
4878 break;
4879 }
4880 }
4881 x = lang_hooks.decls.omp_clause_copy_ctor
4882 (c, unshare_expr (new_var), x);
4883 gimplify_and_add (x, ilist);
4884 goto do_dtor;
4885
4886 case OMP_CLAUSE__LOOPTEMP_:
4887 case OMP_CLAUSE__REDUCTEMP_:
4888 gcc_assert (is_taskreg_ctx (ctx));
4889 x = build_outer_var_ref (var, ctx);
4890 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4891 gimplify_and_add (x, ilist);
4892 break;
4893
4894 case OMP_CLAUSE_COPYIN:
4895 by_ref = use_pointer_for_field (var, NULL);
4896 x = build_receiver_ref (var, by_ref, ctx);
4897 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4898 append_to_statement_list (x, &copyin_seq);
4899 copyin_by_ref |= by_ref;
4900 break;
4901
4902 case OMP_CLAUSE_REDUCTION:
4903 case OMP_CLAUSE_IN_REDUCTION:
4904 /* OpenACC reductions are initialized using the
4905 GOACC_REDUCTION internal function. */
4906 if (is_gimple_omp_oacc (ctx->stmt))
4907 break;
4908 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4909 {
4910 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4911 gimple *tseq;
4912 tree ptype = TREE_TYPE (placeholder);
4913 if (cond)
4914 {
4915 x = error_mark_node;
4916 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
4917 && !task_reduction_needs_orig_p)
4918 x = var;
4919 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4920 {
4921 tree pptype = build_pointer_type (ptype);
4922 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4923 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4924 size_int (task_reduction_cnt_full
4925 + task_reduction_cntorig - 1),
4926 NULL_TREE, NULL_TREE);
4927 else
4928 {
4929 unsigned int idx
4930 = *ctx->task_reduction_map->get (c);
4931 x = task_reduction_read (ilist, tskred_temp,
4932 pptype, 7 + 3 * idx);
4933 }
4934 x = fold_convert (pptype, x);
4935 x = build_simple_mem_ref (x);
4936 }
4937 }
4938 else
4939 {
4940 x = build_outer_var_ref (var, ctx);
4941
4942 if (omp_is_reference (var)
4943 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
4944 x = build_fold_addr_expr_loc (clause_loc, x);
4945 }
4946 SET_DECL_VALUE_EXPR (placeholder, x);
4947 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4948 tree new_vard = new_var;
4949 if (omp_is_reference (var))
4950 {
4951 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4952 new_vard = TREE_OPERAND (new_var, 0);
4953 gcc_assert (DECL_P (new_vard));
4954 }
4955 if (is_simd
4956 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4957 ivar, lvar))
4958 {
4959 if (new_vard == new_var)
4960 {
4961 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4962 SET_DECL_VALUE_EXPR (new_var, ivar);
4963 }
4964 else
4965 {
4966 SET_DECL_VALUE_EXPR (new_vard,
4967 build_fold_addr_expr (ivar));
4968 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4969 }
4970 x = lang_hooks.decls.omp_clause_default_ctor
4971 (c, unshare_expr (ivar),
4972 build_outer_var_ref (var, ctx));
4973 if (x)
4974 gimplify_and_add (x, &llist[0]);
4975 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4976 {
4977 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4978 lower_omp (&tseq, ctx);
4979 gimple_seq_add_seq (&llist[0], tseq);
4980 }
4981 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4982 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4983 lower_omp (&tseq, ctx);
4984 gimple_seq_add_seq (&llist[1], tseq);
4985 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4986 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4987 if (new_vard == new_var)
4988 SET_DECL_VALUE_EXPR (new_var, lvar);
4989 else
4990 SET_DECL_VALUE_EXPR (new_vard,
4991 build_fold_addr_expr (lvar));
4992 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4993 if (x)
4994 {
4995 tseq = NULL;
4996 dtor = x;
4997 gimplify_stmt (&dtor, &tseq);
4998 gimple_seq_add_seq (&llist[1], tseq);
4999 }
5000 break;
5001 }
5002 /* If this is a reference to constant size reduction var
5003 with placeholder, we haven't emitted the initializer
5004 for it because it is undesirable if SIMD arrays are used.
5005 But if they aren't used, we need to emit the deferred
5006 initialization now. */
5007 else if (omp_is_reference (var) && is_simd)
5008 handle_simd_reference (clause_loc, new_vard, ilist);
5009
5010 tree lab2 = NULL_TREE;
5011 if (cond)
5012 {
5013 gimple *g;
5014 if (!is_parallel_ctx (ctx))
5015 {
5016 tree condv = create_tmp_var (boolean_type_node);
5017 tree m = build_simple_mem_ref (cond);
5018 g = gimple_build_assign (condv, m);
5019 gimple_seq_add_stmt (ilist, g);
5020 tree lab1
5021 = create_artificial_label (UNKNOWN_LOCATION);
5022 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5023 g = gimple_build_cond (NE_EXPR, condv,
5024 boolean_false_node,
5025 lab2, lab1);
5026 gimple_seq_add_stmt (ilist, g);
5027 gimple_seq_add_stmt (ilist,
5028 gimple_build_label (lab1));
5029 }
5030 g = gimple_build_assign (build_simple_mem_ref (cond),
5031 boolean_true_node);
5032 gimple_seq_add_stmt (ilist, g);
5033 }
5034 x = lang_hooks.decls.omp_clause_default_ctor
5035 (c, unshare_expr (new_var),
5036 cond ? NULL_TREE
5037 : build_outer_var_ref (var, ctx));
5038 if (x)
5039 gimplify_and_add (x, ilist);
5040 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5041 {
5042 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5043 lower_omp (&tseq, ctx);
5044 gimple_seq_add_seq (ilist, tseq);
5045 }
5046 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5047 if (is_simd)
5048 {
5049 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5050 lower_omp (&tseq, ctx);
5051 gimple_seq_add_seq (dlist, tseq);
5052 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5053 }
5054 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5055 if (cond)
5056 {
5057 if (lab2)
5058 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5059 break;
5060 }
5061 goto do_dtor;
5062 }
5063 else
5064 {
5065 x = omp_reduction_init (c, TREE_TYPE (new_var));
5066 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5067 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5068
5069 if (cond)
5070 {
5071 gimple *g;
5072 tree lab2 = NULL_TREE;
5073 /* GOMP_taskgroup_reduction_register memsets the whole
5074 array to zero. If the initializer is zero, we don't
5075 need to initialize it again, just mark it as ever
5076 used unconditionally, i.e. cond = true. */
5077 if (initializer_zerop (x))
5078 {
5079 g = gimple_build_assign (build_simple_mem_ref (cond),
5080 boolean_true_node);
5081 gimple_seq_add_stmt (ilist, g);
5082 break;
5083 }
5084
5085 /* Otherwise, emit
5086 if (!cond) { cond = true; new_var = x; } */
5087 if (!is_parallel_ctx (ctx))
5088 {
5089 tree condv = create_tmp_var (boolean_type_node);
5090 tree m = build_simple_mem_ref (cond);
5091 g = gimple_build_assign (condv, m);
5092 gimple_seq_add_stmt (ilist, g);
5093 tree lab1
5094 = create_artificial_label (UNKNOWN_LOCATION);
5095 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5096 g = gimple_build_cond (NE_EXPR, condv,
5097 boolean_false_node,
5098 lab2, lab1);
5099 gimple_seq_add_stmt (ilist, g);
5100 gimple_seq_add_stmt (ilist,
5101 gimple_build_label (lab1));
5102 }
5103 g = gimple_build_assign (build_simple_mem_ref (cond),
5104 boolean_true_node);
5105 gimple_seq_add_stmt (ilist, g);
5106 gimplify_assign (new_var, x, ilist);
5107 if (lab2)
5108 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5109 break;
5110 }
5111
5112 /* reduction(-:var) sums up the partial results, so it
5113 acts identically to reduction(+:var). */
5114 if (code == MINUS_EXPR)
5115 code = PLUS_EXPR;
5116
5117 tree new_vard = new_var;
5118 if (is_simd && omp_is_reference (var))
5119 {
5120 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5121 new_vard = TREE_OPERAND (new_var, 0);
5122 gcc_assert (DECL_P (new_vard));
5123 }
5124 if (is_simd
5125 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5126 ivar, lvar))
5127 {
5128 tree ref = build_outer_var_ref (var, ctx);
5129
5130 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5131
5132 if (sctx.is_simt)
5133 {
5134 if (!simt_lane)
5135 simt_lane = create_tmp_var (unsigned_type_node);
5136 x = build_call_expr_internal_loc
5137 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5138 TREE_TYPE (ivar), 2, ivar, simt_lane);
5139 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5140 gimplify_assign (ivar, x, &llist[2]);
5141 }
5142 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5143 ref = build_outer_var_ref (var, ctx);
5144 gimplify_assign (ref, x, &llist[1]);
5145
5146 if (new_vard != new_var)
5147 {
5148 SET_DECL_VALUE_EXPR (new_vard,
5149 build_fold_addr_expr (lvar));
5150 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5151 }
5152 }
5153 else
5154 {
5155 if (omp_is_reference (var) && is_simd)
5156 handle_simd_reference (clause_loc, new_vard, ilist);
5157 gimplify_assign (new_var, x, ilist);
5158 if (is_simd)
5159 {
5160 tree ref = build_outer_var_ref (var, ctx);
5161
5162 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5163 ref = build_outer_var_ref (var, ctx);
5164 gimplify_assign (ref, x, dlist);
5165 }
5166 }
5167 }
5168 break;
5169
5170 default:
5171 gcc_unreachable ();
5172 }
5173 }
5174 }
5175 if (tskred_avar)
5176 {
5177 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5178 TREE_THIS_VOLATILE (clobber) = 1;
5179 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5180 }
5181
5182 if (known_eq (sctx.max_vf, 1U))
5183 sctx.is_simt = false;
5184
5185 if (sctx.lane || sctx.is_simt)
5186 {
5187 uid = create_tmp_var (ptr_type_node, "simduid");
5188 /* Don't want uninit warnings on simduid, it is always uninitialized,
5189 but we use it not for the value, but for the DECL_UID only. */
5190 TREE_NO_WARNING (uid) = 1;
5191 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5192 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5193 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5194 gimple_omp_for_set_clauses (ctx->stmt, c);
5195 }
5196 /* Emit calls denoting privatized variables and initializing a pointer to
5197 structure that holds private variables as fields after ompdevlow pass. */
5198 if (sctx.is_simt)
5199 {
5200 sctx.simt_eargs[0] = uid;
5201 gimple *g
5202 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5203 gimple_call_set_lhs (g, uid);
5204 gimple_seq_add_stmt (ilist, g);
5205 sctx.simt_eargs.release ();
5206
5207 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5208 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5209 gimple_call_set_lhs (g, simtrec);
5210 gimple_seq_add_stmt (ilist, g);
5211 }
5212 if (sctx.lane)
5213 {
5214 gimple *g
5215 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
5216 gimple_call_set_lhs (g, sctx.lane);
5217 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5218 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5219 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5220 build_int_cst (unsigned_type_node, 0));
5221 gimple_seq_add_stmt (ilist, g);
5222 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5223 if (llist[2])
5224 {
5225 tree simt_vf = create_tmp_var (unsigned_type_node);
5226 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5227 gimple_call_set_lhs (g, simt_vf);
5228 gimple_seq_add_stmt (dlist, g);
5229
5230 tree t = build_int_cst (unsigned_type_node, 1);
5231 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5232 gimple_seq_add_stmt (dlist, g);
5233
5234 t = build_int_cst (unsigned_type_node, 0);
5235 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5236 gimple_seq_add_stmt (dlist, g);
5237
5238 tree body = create_artificial_label (UNKNOWN_LOCATION);
5239 tree header = create_artificial_label (UNKNOWN_LOCATION);
5240 tree end = create_artificial_label (UNKNOWN_LOCATION);
5241 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5242 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5243
5244 gimple_seq_add_seq (dlist, llist[2]);
5245
5246 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5247 gimple_seq_add_stmt (dlist, g);
5248
5249 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5250 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5251 gimple_seq_add_stmt (dlist, g);
5252
5253 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5254 }
5255 for (int i = 0; i < 2; i++)
5256 if (llist[i])
5257 {
5258 tree vf = create_tmp_var (unsigned_type_node);
5259 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5260 gimple_call_set_lhs (g, vf);
5261 gimple_seq *seq = i == 0 ? ilist : dlist;
5262 gimple_seq_add_stmt (seq, g);
5263 tree t = build_int_cst (unsigned_type_node, 0);
5264 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5265 gimple_seq_add_stmt (seq, g);
5266 tree body = create_artificial_label (UNKNOWN_LOCATION);
5267 tree header = create_artificial_label (UNKNOWN_LOCATION);
5268 tree end = create_artificial_label (UNKNOWN_LOCATION);
5269 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5270 gimple_seq_add_stmt (seq, gimple_build_label (body));
5271 gimple_seq_add_seq (seq, llist[i]);
5272 t = build_int_cst (unsigned_type_node, 1);
5273 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5274 gimple_seq_add_stmt (seq, g);
5275 gimple_seq_add_stmt (seq, gimple_build_label (header));
5276 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5277 gimple_seq_add_stmt (seq, g);
5278 gimple_seq_add_stmt (seq, gimple_build_label (end));
5279 }
5280 }
5281 if (sctx.is_simt)
5282 {
5283 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5284 gimple *g
5285 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5286 gimple_seq_add_stmt (dlist, g);
5287 }
5288
5289 /* The copyin sequence is not to be executed by the main thread, since
5290 that would result in self-copies. Perhaps not visible to scalars,
5291 but it certainly is to C++ operator=. */
5292 if (copyin_seq)
5293 {
5294 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5295 0);
5296 x = build2 (NE_EXPR, boolean_type_node, x,
5297 build_int_cst (TREE_TYPE (x), 0));
5298 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5299 gimplify_and_add (x, ilist);
5300 }
5301
5302 /* If any copyin variable is passed by reference, we must ensure the
5303 master thread doesn't modify it before it is copied over in all
5304 threads. Similarly for variables in both firstprivate and
5305 lastprivate clauses we need to ensure the lastprivate copying
5306 happens after firstprivate copying in all threads. And similarly
5307 for UDRs if initializer expression refers to omp_orig. */
5308 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
5309 {
5310 /* Don't add any barrier for #pragma omp simd or
5311 #pragma omp distribute. */
5312 if (!is_task_ctx (ctx)
5313 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5314 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5315 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5316 }
5317
5318 /* If max_vf is non-zero, then we can use only a vectorization factor
5319 up to the max_vf we chose. So stick it into the safelen clause. */
5320 if (maybe_ne (sctx.max_vf, 0U))
5321 {
5322 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5323 OMP_CLAUSE_SAFELEN);
5324 poly_uint64 safe_len;
5325 if (c == NULL_TREE
5326 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5327 && maybe_gt (safe_len, sctx.max_vf)))
5328 {
5329 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5330 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5331 sctx.max_vf);
5332 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5333 gimple_omp_for_set_clauses (ctx->stmt, c);
5334 }
5335 }
5336 }
5337
5338
5339 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5340 both parallel and workshare constructs. PREDICATE may be NULL if it's
5341 always true. */
5342
5343 static void
5344 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
5345 omp_context *ctx)
5346 {
5347 tree x, c, label = NULL, orig_clauses = clauses;
5348 bool par_clauses = false;
5349 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5350
5351 /* Early exit if there are no lastprivate or linear clauses. */
5352 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5353 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5354 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5355 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5356 break;
5357 if (clauses == NULL)
5358 {
5359 /* If this was a workshare clause, see if it had been combined
5360 with its parallel. In that case, look for the clauses on the
5361 parallel statement itself. */
5362 if (is_parallel_ctx (ctx))
5363 return;
5364
5365 ctx = ctx->outer;
5366 if (ctx == NULL || !is_parallel_ctx (ctx))
5367 return;
5368
5369 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5370 OMP_CLAUSE_LASTPRIVATE);
5371 if (clauses == NULL)
5372 return;
5373 par_clauses = true;
5374 }
5375
5376 bool maybe_simt = false;
5377 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5378 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5379 {
5380 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5381 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5382 if (simduid)
5383 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5384 }
5385
5386 if (predicate)
5387 {
5388 gcond *stmt;
5389 tree label_true, arm1, arm2;
5390 enum tree_code pred_code = TREE_CODE (predicate);
5391
5392 label = create_artificial_label (UNKNOWN_LOCATION);
5393 label_true = create_artificial_label (UNKNOWN_LOCATION);
5394 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5395 {
5396 arm1 = TREE_OPERAND (predicate, 0);
5397 arm2 = TREE_OPERAND (predicate, 1);
5398 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5399 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5400 }
5401 else
5402 {
5403 arm1 = predicate;
5404 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5405 arm2 = boolean_false_node;
5406 pred_code = NE_EXPR;
5407 }
5408 if (maybe_simt)
5409 {
5410 c = build2 (pred_code, boolean_type_node, arm1, arm2);
5411 c = fold_convert (integer_type_node, c);
5412 simtcond = create_tmp_var (integer_type_node);
5413 gimplify_assign (simtcond, c, stmt_list);
5414 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5415 1, simtcond);
5416 c = create_tmp_var (integer_type_node);
5417 gimple_call_set_lhs (g, c);
5418 gimple_seq_add_stmt (stmt_list, g);
5419 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5420 label_true, label);
5421 }
5422 else
5423 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5424 gimple_seq_add_stmt (stmt_list, stmt);
5425 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5426 }
5427
5428 for (c = clauses; c ;)
5429 {
5430 tree var, new_var;
5431 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5432
5433 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5434 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5435 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
5436 {
5437 var = OMP_CLAUSE_DECL (c);
5438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5439 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5440 && is_taskloop_ctx (ctx))
5441 {
5442 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
5443 new_var = lookup_decl (var, ctx->outer);
5444 }
5445 else
5446 {
5447 new_var = lookup_decl (var, ctx);
5448 /* Avoid uninitialized warnings for lastprivate and
5449 for linear iterators. */
5450 if (predicate
5451 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5452 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
5453 TREE_NO_WARNING (new_var) = 1;
5454 }
5455
5456 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
5457 {
5458 tree val = DECL_VALUE_EXPR (new_var);
5459 if (TREE_CODE (val) == ARRAY_REF
5460 && VAR_P (TREE_OPERAND (val, 0))
5461 && lookup_attribute ("omp simd array",
5462 DECL_ATTRIBUTES (TREE_OPERAND (val,
5463 0))))
5464 {
5465 if (lastlane == NULL)
5466 {
5467 lastlane = create_tmp_var (unsigned_type_node);
5468 gcall *g
5469 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5470 2, simduid,
5471 TREE_OPERAND (val, 1));
5472 gimple_call_set_lhs (g, lastlane);
5473 gimple_seq_add_stmt (stmt_list, g);
5474 }
5475 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
5476 TREE_OPERAND (val, 0), lastlane,
5477 NULL_TREE, NULL_TREE);
5478 }
5479 }
5480 else if (maybe_simt)
5481 {
5482 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
5483 ? DECL_VALUE_EXPR (new_var)
5484 : new_var);
5485 if (simtlast == NULL)
5486 {
5487 simtlast = create_tmp_var (unsigned_type_node);
5488 gcall *g = gimple_build_call_internal
5489 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
5490 gimple_call_set_lhs (g, simtlast);
5491 gimple_seq_add_stmt (stmt_list, g);
5492 }
5493 x = build_call_expr_internal_loc
5494 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
5495 TREE_TYPE (val), 2, val, simtlast);
5496 new_var = unshare_expr (new_var);
5497 gimplify_assign (new_var, x, stmt_list);
5498 new_var = unshare_expr (new_var);
5499 }
5500
5501 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5502 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
5503 {
5504 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
5505 gimple_seq_add_seq (stmt_list,
5506 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5507 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
5508 }
5509 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5510 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
5511 {
5512 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
5513 gimple_seq_add_seq (stmt_list,
5514 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
5515 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
5516 }
5517
5518 x = NULL_TREE;
5519 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5520 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
5521 {
5522 gcc_checking_assert (is_taskloop_ctx (ctx));
5523 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
5524 ctx->outer->outer);
5525 if (is_global_var (ovar))
5526 x = ovar;
5527 }
5528 if (!x)
5529 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
5530 if (omp_is_reference (var))
5531 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5532 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
5533 gimplify_and_add (x, stmt_list);
5534 }
5535 c = OMP_CLAUSE_CHAIN (c);
5536 if (c == NULL && !par_clauses)
5537 {
5538 /* If this was a workshare clause, see if it had been combined
5539 with its parallel. In that case, continue looking for the
5540 clauses also on the parallel statement itself. */
5541 if (is_parallel_ctx (ctx))
5542 break;
5543
5544 ctx = ctx->outer;
5545 if (ctx == NULL || !is_parallel_ctx (ctx))
5546 break;
5547
5548 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5549 OMP_CLAUSE_LASTPRIVATE);
5550 par_clauses = true;
5551 }
5552 }
5553
5554 if (label)
5555 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
5556 }
5557
5558 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5559 (which might be a placeholder). INNER is true if this is an inner
5560 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5561 join markers. Generate the before-loop forking sequence in
5562 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5563 general form of these sequences is
5564
5565 GOACC_REDUCTION_SETUP
5566 GOACC_FORK
5567 GOACC_REDUCTION_INIT
5568 ...
5569 GOACC_REDUCTION_FINI
5570 GOACC_JOIN
5571 GOACC_REDUCTION_TEARDOWN. */
5572
5573 static void
5574 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
5575 gcall *fork, gcall *join, gimple_seq *fork_seq,
5576 gimple_seq *join_seq, omp_context *ctx)
5577 {
5578 gimple_seq before_fork = NULL;
5579 gimple_seq after_fork = NULL;
5580 gimple_seq before_join = NULL;
5581 gimple_seq after_join = NULL;
5582 tree init_code = NULL_TREE, fini_code = NULL_TREE,
5583 setup_code = NULL_TREE, teardown_code = NULL_TREE;
5584 unsigned offset = 0;
5585
5586 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5587 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5588 {
5589 tree orig = OMP_CLAUSE_DECL (c);
5590 tree var = maybe_lookup_decl (orig, ctx);
5591 tree ref_to_res = NULL_TREE;
5592 tree incoming, outgoing, v1, v2, v3;
5593 bool is_private = false;
5594
5595 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
5596 if (rcode == MINUS_EXPR)
5597 rcode = PLUS_EXPR;
5598 else if (rcode == TRUTH_ANDIF_EXPR)
5599 rcode = BIT_AND_EXPR;
5600 else if (rcode == TRUTH_ORIF_EXPR)
5601 rcode = BIT_IOR_EXPR;
5602 tree op = build_int_cst (unsigned_type_node, rcode);
5603
5604 if (!var)
5605 var = orig;
5606
5607 incoming = outgoing = var;
5608
5609 if (!inner)
5610 {
5611 /* See if an outer construct also reduces this variable. */
5612 omp_context *outer = ctx;
5613
5614 while (omp_context *probe = outer->outer)
5615 {
5616 enum gimple_code type = gimple_code (probe->stmt);
5617 tree cls;
5618
5619 switch (type)
5620 {
5621 case GIMPLE_OMP_FOR:
5622 cls = gimple_omp_for_clauses (probe->stmt);
5623 break;
5624
5625 case GIMPLE_OMP_TARGET:
5626 if (gimple_omp_target_kind (probe->stmt)
5627 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
5628 goto do_lookup;
5629
5630 cls = gimple_omp_target_clauses (probe->stmt);
5631 break;
5632
5633 default:
5634 goto do_lookup;
5635 }
5636
5637 outer = probe;
5638 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
5639 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5640 && orig == OMP_CLAUSE_DECL (cls))
5641 {
5642 incoming = outgoing = lookup_decl (orig, probe);
5643 goto has_outer_reduction;
5644 }
5645 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5646 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5647 && orig == OMP_CLAUSE_DECL (cls))
5648 {
5649 is_private = true;
5650 goto do_lookup;
5651 }
5652 }
5653
5654 do_lookup:
5655 /* This is the outermost construct with this reduction,
5656 see if there's a mapping for it. */
5657 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5658 && maybe_lookup_field (orig, outer) && !is_private)
5659 {
5660 ref_to_res = build_receiver_ref (orig, false, outer);
5661 if (omp_is_reference (orig))
5662 ref_to_res = build_simple_mem_ref (ref_to_res);
5663
5664 tree type = TREE_TYPE (var);
5665 if (POINTER_TYPE_P (type))
5666 type = TREE_TYPE (type);
5667
5668 outgoing = var;
5669 incoming = omp_reduction_init_op (loc, rcode, type);
5670 }
5671 else
5672 {
5673 /* Try to look at enclosing contexts for reduction var,
5674 use original if no mapping found. */
5675 tree t = NULL_TREE;
5676 omp_context *c = ctx->outer;
5677 while (c && !t)
5678 {
5679 t = maybe_lookup_decl (orig, c);
5680 c = c->outer;
5681 }
5682 incoming = outgoing = (t ? t : orig);
5683 }
5684
5685 has_outer_reduction:;
5686 }
5687
5688 if (!ref_to_res)
5689 ref_to_res = integer_zero_node;
5690
5691 if (omp_is_reference (orig))
5692 {
5693 tree type = TREE_TYPE (var);
5694 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5695
5696 if (!inner)
5697 {
5698 tree x = create_tmp_var (TREE_TYPE (type), id);
5699 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5700 }
5701
5702 v1 = create_tmp_var (type, id);
5703 v2 = create_tmp_var (type, id);
5704 v3 = create_tmp_var (type, id);
5705
5706 gimplify_assign (v1, var, fork_seq);
5707 gimplify_assign (v2, var, fork_seq);
5708 gimplify_assign (v3, var, fork_seq);
5709
5710 var = build_simple_mem_ref (var);
5711 v1 = build_simple_mem_ref (v1);
5712 v2 = build_simple_mem_ref (v2);
5713 v3 = build_simple_mem_ref (v3);
5714 outgoing = build_simple_mem_ref (outgoing);
5715
5716 if (!TREE_CONSTANT (incoming))
5717 incoming = build_simple_mem_ref (incoming);
5718 }
5719 else
5720 v1 = v2 = v3 = var;
5721
5722 /* Determine position in reduction buffer, which may be used
5723 by target. The parser has ensured that this is not a
5724 variable-sized type. */
5725 fixed_size_mode mode
5726 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5727 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5728 offset = (offset + align - 1) & ~(align - 1);
5729 tree off = build_int_cst (sizetype, offset);
5730 offset += GET_MODE_SIZE (mode);
5731
5732 if (!init_code)
5733 {
5734 init_code = build_int_cst (integer_type_node,
5735 IFN_GOACC_REDUCTION_INIT);
5736 fini_code = build_int_cst (integer_type_node,
5737 IFN_GOACC_REDUCTION_FINI);
5738 setup_code = build_int_cst (integer_type_node,
5739 IFN_GOACC_REDUCTION_SETUP);
5740 teardown_code = build_int_cst (integer_type_node,
5741 IFN_GOACC_REDUCTION_TEARDOWN);
5742 }
5743
5744 tree setup_call
5745 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5746 TREE_TYPE (var), 6, setup_code,
5747 unshare_expr (ref_to_res),
5748 incoming, level, op, off);
5749 tree init_call
5750 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5751 TREE_TYPE (var), 6, init_code,
5752 unshare_expr (ref_to_res),
5753 v1, level, op, off);
5754 tree fini_call
5755 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5756 TREE_TYPE (var), 6, fini_code,
5757 unshare_expr (ref_to_res),
5758 v2, level, op, off);
5759 tree teardown_call
5760 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5761 TREE_TYPE (var), 6, teardown_code,
5762 ref_to_res, v3, level, op, off);
5763
5764 gimplify_assign (v1, setup_call, &before_fork);
5765 gimplify_assign (v2, init_call, &after_fork);
5766 gimplify_assign (v3, fini_call, &before_join);
5767 gimplify_assign (outgoing, teardown_call, &after_join);
5768 }
5769
5770 /* Now stitch things together. */
5771 gimple_seq_add_seq (fork_seq, before_fork);
5772 if (fork)
5773 gimple_seq_add_stmt (fork_seq, fork);
5774 gimple_seq_add_seq (fork_seq, after_fork);
5775
5776 gimple_seq_add_seq (join_seq, before_join);
5777 if (join)
5778 gimple_seq_add_stmt (join_seq, join);
5779 gimple_seq_add_seq (join_seq, after_join);
5780 }
5781
5782 /* Generate code to implement the REDUCTION clauses. */
5783
5784 static void
5785 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5786 {
5787 gimple_seq sub_seq = NULL;
5788 gimple *stmt;
5789 tree x, c;
5790 int count = 0;
5791
5792 /* OpenACC loop reductions are handled elsewhere. */
5793 if (is_gimple_omp_oacc (ctx->stmt))
5794 return;
5795
5796 /* SIMD reductions are handled in lower_rec_input_clauses. */
5797 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5798 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5799 return;
5800
5801 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5802 update in that case, otherwise use a lock. */
5803 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5804 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5805 && !OMP_CLAUSE_REDUCTION_TASK (c))
5806 {
5807 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5808 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5809 {
5810 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5811 count = -1;
5812 break;
5813 }
5814 count++;
5815 }
5816
5817 if (count == 0)
5818 return;
5819
5820 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5821 {
5822 tree var, ref, new_var, orig_var;
5823 enum tree_code code;
5824 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5825
5826 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5827 || OMP_CLAUSE_REDUCTION_TASK (c))
5828 continue;
5829
5830 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5831 orig_var = var = OMP_CLAUSE_DECL (c);
5832 if (TREE_CODE (var) == MEM_REF)
5833 {
5834 var = TREE_OPERAND (var, 0);
5835 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5836 var = TREE_OPERAND (var, 0);
5837 if (TREE_CODE (var) == ADDR_EXPR)
5838 var = TREE_OPERAND (var, 0);
5839 else
5840 {
5841 /* If this is a pointer or referenced based array
5842 section, the var could be private in the outer
5843 context e.g. on orphaned loop construct. Pretend this
5844 is private variable's outer reference. */
5845 ccode = OMP_CLAUSE_PRIVATE;
5846 if (TREE_CODE (var) == INDIRECT_REF)
5847 var = TREE_OPERAND (var, 0);
5848 }
5849 orig_var = var;
5850 if (is_variable_sized (var))
5851 {
5852 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5853 var = DECL_VALUE_EXPR (var);
5854 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5855 var = TREE_OPERAND (var, 0);
5856 gcc_assert (DECL_P (var));
5857 }
5858 }
5859 new_var = lookup_decl (var, ctx);
5860 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5861 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5862 ref = build_outer_var_ref (var, ctx, ccode);
5863 code = OMP_CLAUSE_REDUCTION_CODE (c);
5864
5865 /* reduction(-:var) sums up the partial results, so it acts
5866 identically to reduction(+:var). */
5867 if (code == MINUS_EXPR)
5868 code = PLUS_EXPR;
5869
5870 if (count == 1)
5871 {
5872 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5873
5874 addr = save_expr (addr);
5875 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5876 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5877 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5878 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
5879 gimplify_and_add (x, stmt_seqp);
5880 return;
5881 }
5882 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5883 {
5884 tree d = OMP_CLAUSE_DECL (c);
5885 tree type = TREE_TYPE (d);
5886 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5887 tree i = create_tmp_var (TREE_TYPE (v));
5888 tree ptype = build_pointer_type (TREE_TYPE (type));
5889 tree bias = TREE_OPERAND (d, 1);
5890 d = TREE_OPERAND (d, 0);
5891 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5892 {
5893 tree b = TREE_OPERAND (d, 1);
5894 b = maybe_lookup_decl (b, ctx);
5895 if (b == NULL)
5896 {
5897 b = TREE_OPERAND (d, 1);
5898 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5899 }
5900 if (integer_zerop (bias))
5901 bias = b;
5902 else
5903 {
5904 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5905 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5906 TREE_TYPE (b), b, bias);
5907 }
5908 d = TREE_OPERAND (d, 0);
5909 }
5910 /* For ref build_outer_var_ref already performs this, so
5911 only new_var needs a dereference. */
5912 if (TREE_CODE (d) == INDIRECT_REF)
5913 {
5914 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5915 gcc_assert (omp_is_reference (var) && var == orig_var);
5916 }
5917 else if (TREE_CODE (d) == ADDR_EXPR)
5918 {
5919 if (orig_var == var)
5920 {
5921 new_var = build_fold_addr_expr (new_var);
5922 ref = build_fold_addr_expr (ref);
5923 }
5924 }
5925 else
5926 {
5927 gcc_assert (orig_var == var);
5928 if (omp_is_reference (var))
5929 ref = build_fold_addr_expr (ref);
5930 }
5931 if (DECL_P (v))
5932 {
5933 tree t = maybe_lookup_decl (v, ctx);
5934 if (t)
5935 v = t;
5936 else
5937 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5938 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5939 }
5940 if (!integer_zerop (bias))
5941 {
5942 bias = fold_convert_loc (clause_loc, sizetype, bias);
5943 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5944 TREE_TYPE (new_var), new_var,
5945 unshare_expr (bias));
5946 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5947 TREE_TYPE (ref), ref, bias);
5948 }
5949 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5950 ref = fold_convert_loc (clause_loc, ptype, ref);
5951 tree m = create_tmp_var (ptype);
5952 gimplify_assign (m, new_var, stmt_seqp);
5953 new_var = m;
5954 m = create_tmp_var (ptype);
5955 gimplify_assign (m, ref, stmt_seqp);
5956 ref = m;
5957 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5958 tree body = create_artificial_label (UNKNOWN_LOCATION);
5959 tree end = create_artificial_label (UNKNOWN_LOCATION);
5960 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5961 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5962 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5963 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5964 {
5965 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5966 tree decl_placeholder
5967 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5968 SET_DECL_VALUE_EXPR (placeholder, out);
5969 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5970 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5971 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5972 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5973 gimple_seq_add_seq (&sub_seq,
5974 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5975 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5976 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5977 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5978 }
5979 else
5980 {
5981 x = build2 (code, TREE_TYPE (out), out, priv);
5982 out = unshare_expr (out);
5983 gimplify_assign (out, x, &sub_seq);
5984 }
5985 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5986 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5987 gimple_seq_add_stmt (&sub_seq, g);
5988 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5989 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5990 gimple_seq_add_stmt (&sub_seq, g);
5991 g = gimple_build_assign (i, PLUS_EXPR, i,
5992 build_int_cst (TREE_TYPE (i), 1));
5993 gimple_seq_add_stmt (&sub_seq, g);
5994 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5995 gimple_seq_add_stmt (&sub_seq, g);
5996 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5997 }
5998 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5999 {
6000 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6001
6002 if (omp_is_reference (var)
6003 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6004 TREE_TYPE (ref)))
6005 ref = build_fold_addr_expr_loc (clause_loc, ref);
6006 SET_DECL_VALUE_EXPR (placeholder, ref);
6007 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6008 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6009 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6010 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6011 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6012 }
6013 else
6014 {
6015 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6016 ref = build_outer_var_ref (var, ctx);
6017 gimplify_assign (ref, x, &sub_seq);
6018 }
6019 }
6020
6021 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6022 0);
6023 gimple_seq_add_stmt (stmt_seqp, stmt);
6024
6025 gimple_seq_add_seq (stmt_seqp, sub_seq);
6026
6027 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6028 0);
6029 gimple_seq_add_stmt (stmt_seqp, stmt);
6030 }
6031
6032
6033 /* Generate code to implement the COPYPRIVATE clauses. */
6034
6035 static void
6036 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6037 omp_context *ctx)
6038 {
6039 tree c;
6040
6041 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6042 {
6043 tree var, new_var, ref, x;
6044 bool by_ref;
6045 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6046
6047 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6048 continue;
6049
6050 var = OMP_CLAUSE_DECL (c);
6051 by_ref = use_pointer_for_field (var, NULL);
6052
6053 ref = build_sender_ref (var, ctx);
6054 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6055 if (by_ref)
6056 {
6057 x = build_fold_addr_expr_loc (clause_loc, new_var);
6058 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6059 }
6060 gimplify_assign (ref, x, slist);
6061
6062 ref = build_receiver_ref (var, false, ctx);
6063 if (by_ref)
6064 {
6065 ref = fold_convert_loc (clause_loc,
6066 build_pointer_type (TREE_TYPE (new_var)),
6067 ref);
6068 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6069 }
6070 if (omp_is_reference (var))
6071 {
6072 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6073 ref = build_simple_mem_ref_loc (clause_loc, ref);
6074 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6075 }
6076 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6077 gimplify_and_add (x, rlist);
6078 }
6079 }
6080
6081
6082 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6083 and REDUCTION from the sender (aka parent) side. */
6084
6085 static void
6086 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6087 omp_context *ctx)
6088 {
6089 tree c, t;
6090 int ignored_looptemp = 0;
6091 bool is_taskloop = false;
6092
6093 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6094 by GOMP_taskloop. */
6095 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6096 {
6097 ignored_looptemp = 2;
6098 is_taskloop = true;
6099 }
6100
6101 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6102 {
6103 tree val, ref, x, var;
6104 bool by_ref, do_in = false, do_out = false;
6105 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6106
6107 switch (OMP_CLAUSE_CODE (c))
6108 {
6109 case OMP_CLAUSE_PRIVATE:
6110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6111 break;
6112 continue;
6113 case OMP_CLAUSE_FIRSTPRIVATE:
6114 case OMP_CLAUSE_COPYIN:
6115 case OMP_CLAUSE_LASTPRIVATE:
6116 case OMP_CLAUSE_IN_REDUCTION:
6117 case OMP_CLAUSE__REDUCTEMP_:
6118 break;
6119 case OMP_CLAUSE_REDUCTION:
6120 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6121 continue;
6122 break;
6123 case OMP_CLAUSE_SHARED:
6124 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6125 break;
6126 continue;
6127 case OMP_CLAUSE__LOOPTEMP_:
6128 if (ignored_looptemp)
6129 {
6130 ignored_looptemp--;
6131 continue;
6132 }
6133 break;
6134 default:
6135 continue;
6136 }
6137
6138 val = OMP_CLAUSE_DECL (c);
6139 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6140 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6141 && TREE_CODE (val) == MEM_REF)
6142 {
6143 val = TREE_OPERAND (val, 0);
6144 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6145 val = TREE_OPERAND (val, 0);
6146 if (TREE_CODE (val) == INDIRECT_REF
6147 || TREE_CODE (val) == ADDR_EXPR)
6148 val = TREE_OPERAND (val, 0);
6149 if (is_variable_sized (val))
6150 continue;
6151 }
6152
6153 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6154 outer taskloop region. */
6155 omp_context *ctx_for_o = ctx;
6156 if (is_taskloop
6157 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6158 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6159 ctx_for_o = ctx->outer;
6160
6161 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6162
6163 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6164 && is_global_var (var)
6165 && (val == OMP_CLAUSE_DECL (c)
6166 || !is_task_ctx (ctx)
6167 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6168 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6169 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6170 != POINTER_TYPE)))))
6171 continue;
6172
6173 t = omp_member_access_dummy_var (var);
6174 if (t)
6175 {
6176 var = DECL_VALUE_EXPR (var);
6177 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6178 if (o != t)
6179 var = unshare_and_remap (var, t, o);
6180 else
6181 var = unshare_expr (var);
6182 }
6183
6184 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6185 {
6186 /* Handle taskloop firstprivate/lastprivate, where the
6187 lastprivate on GIMPLE_OMP_TASK is represented as
6188 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6189 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6190 x = omp_build_component_ref (ctx->sender_decl, f);
6191 if (use_pointer_for_field (val, ctx))
6192 var = build_fold_addr_expr (var);
6193 gimplify_assign (x, var, ilist);
6194 DECL_ABSTRACT_ORIGIN (f) = NULL;
6195 continue;
6196 }
6197
6198 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6199 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6200 || val == OMP_CLAUSE_DECL (c))
6201 && is_variable_sized (val))
6202 continue;
6203 by_ref = use_pointer_for_field (val, NULL);
6204
6205 switch (OMP_CLAUSE_CODE (c))
6206 {
6207 case OMP_CLAUSE_FIRSTPRIVATE:
6208 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6209 && !by_ref
6210 && is_task_ctx (ctx))
6211 TREE_NO_WARNING (var) = 1;
6212 do_in = true;
6213 break;
6214
6215 case OMP_CLAUSE_PRIVATE:
6216 case OMP_CLAUSE_COPYIN:
6217 case OMP_CLAUSE__LOOPTEMP_:
6218 case OMP_CLAUSE__REDUCTEMP_:
6219 do_in = true;
6220 break;
6221
6222 case OMP_CLAUSE_LASTPRIVATE:
6223 if (by_ref || omp_is_reference (val))
6224 {
6225 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6226 continue;
6227 do_in = true;
6228 }
6229 else
6230 {
6231 do_out = true;
6232 if (lang_hooks.decls.omp_private_outer_ref (val))
6233 do_in = true;
6234 }
6235 break;
6236
6237 case OMP_CLAUSE_REDUCTION:
6238 case OMP_CLAUSE_IN_REDUCTION:
6239 do_in = true;
6240 if (val == OMP_CLAUSE_DECL (c))
6241 {
6242 if (is_task_ctx (ctx))
6243 by_ref = use_pointer_for_field (val, ctx);
6244 else
6245 do_out = !(by_ref || omp_is_reference (val));
6246 }
6247 else
6248 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6249 break;
6250
6251 default:
6252 gcc_unreachable ();
6253 }
6254
6255 if (do_in)
6256 {
6257 ref = build_sender_ref (val, ctx);
6258 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6259 gimplify_assign (ref, x, ilist);
6260 if (is_task_ctx (ctx))
6261 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6262 }
6263
6264 if (do_out)
6265 {
6266 ref = build_sender_ref (val, ctx);
6267 gimplify_assign (var, ref, olist);
6268 }
6269 }
6270 }
6271
6272 /* Generate code to implement SHARED from the sender (aka parent)
6273 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6274 list things that got automatically shared. */
6275
6276 static void
6277 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6278 {
6279 tree var, ovar, nvar, t, f, x, record_type;
6280
6281 if (ctx->record_type == NULL)
6282 return;
6283
6284 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6285 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6286 {
6287 ovar = DECL_ABSTRACT_ORIGIN (f);
6288 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6289 continue;
6290
6291 nvar = maybe_lookup_decl (ovar, ctx);
6292 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6293 continue;
6294
6295 /* If CTX is a nested parallel directive. Find the immediately
6296 enclosing parallel or workshare construct that contains a
6297 mapping for OVAR. */
6298 var = lookup_decl_in_outer_ctx (ovar, ctx);
6299
6300 t = omp_member_access_dummy_var (var);
6301 if (t)
6302 {
6303 var = DECL_VALUE_EXPR (var);
6304 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6305 if (o != t)
6306 var = unshare_and_remap (var, t, o);
6307 else
6308 var = unshare_expr (var);
6309 }
6310
6311 if (use_pointer_for_field (ovar, ctx))
6312 {
6313 x = build_sender_ref (ovar, ctx);
6314 var = build_fold_addr_expr (var);
6315 gimplify_assign (x, var, ilist);
6316 }
6317 else
6318 {
6319 x = build_sender_ref (ovar, ctx);
6320 gimplify_assign (x, var, ilist);
6321
6322 if (!TREE_READONLY (var)
6323 /* We don't need to receive a new reference to a result
6324 or parm decl. In fact we may not store to it as we will
6325 invalidate any pending RSO and generate wrong gimple
6326 during inlining. */
6327 && !((TREE_CODE (var) == RESULT_DECL
6328 || TREE_CODE (var) == PARM_DECL)
6329 && DECL_BY_REFERENCE (var)))
6330 {
6331 x = build_sender_ref (ovar, ctx);
6332 gimplify_assign (var, x, olist);
6333 }
6334 }
6335 }
6336 }
6337
6338 /* Emit an OpenACC head marker call, encapulating the partitioning and
6339 other information that must be processed by the target compiler.
6340 Return the maximum number of dimensions the associated loop might
6341 be partitioned over. */
6342
6343 static unsigned
6344 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6345 gimple_seq *seq, omp_context *ctx)
6346 {
6347 unsigned levels = 0;
6348 unsigned tag = 0;
6349 tree gang_static = NULL_TREE;
6350 auto_vec<tree, 5> args;
6351
6352 args.quick_push (build_int_cst
6353 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6354 args.quick_push (ddvar);
6355 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6356 {
6357 switch (OMP_CLAUSE_CODE (c))
6358 {
6359 case OMP_CLAUSE_GANG:
6360 tag |= OLF_DIM_GANG;
6361 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6362 /* static:* is represented by -1, and we can ignore it, as
6363 scheduling is always static. */
6364 if (gang_static && integer_minus_onep (gang_static))
6365 gang_static = NULL_TREE;
6366 levels++;
6367 break;
6368
6369 case OMP_CLAUSE_WORKER:
6370 tag |= OLF_DIM_WORKER;
6371 levels++;
6372 break;
6373
6374 case OMP_CLAUSE_VECTOR:
6375 tag |= OLF_DIM_VECTOR;
6376 levels++;
6377 break;
6378
6379 case OMP_CLAUSE_SEQ:
6380 tag |= OLF_SEQ;
6381 break;
6382
6383 case OMP_CLAUSE_AUTO:
6384 tag |= OLF_AUTO;
6385 break;
6386
6387 case OMP_CLAUSE_INDEPENDENT:
6388 tag |= OLF_INDEPENDENT;
6389 break;
6390
6391 case OMP_CLAUSE_TILE:
6392 tag |= OLF_TILE;
6393 break;
6394
6395 default:
6396 continue;
6397 }
6398 }
6399
6400 if (gang_static)
6401 {
6402 if (DECL_P (gang_static))
6403 gang_static = build_outer_var_ref (gang_static, ctx);
6404 tag |= OLF_GANG_STATIC;
6405 }
6406
6407 /* In a parallel region, loops are implicitly INDEPENDENT. */
6408 omp_context *tgt = enclosing_target_ctx (ctx);
6409 if (!tgt || is_oacc_parallel (tgt))
6410 tag |= OLF_INDEPENDENT;
6411
6412 if (tag & OLF_TILE)
6413 /* Tiling could use all 3 levels. */
6414 levels = 3;
6415 else
6416 {
6417 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6418 Ensure at least one level, or 2 for possible auto
6419 partitioning */
6420 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
6421 << OLF_DIM_BASE) | OLF_SEQ));
6422
6423 if (levels < 1u + maybe_auto)
6424 levels = 1u + maybe_auto;
6425 }
6426
6427 args.quick_push (build_int_cst (integer_type_node, levels));
6428 args.quick_push (build_int_cst (integer_type_node, tag));
6429 if (gang_static)
6430 args.quick_push (gang_static);
6431
6432 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
6433 gimple_set_location (call, loc);
6434 gimple_set_lhs (call, ddvar);
6435 gimple_seq_add_stmt (seq, call);
6436
6437 return levels;
6438 }
6439
6440 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6441 partitioning level of the enclosed region. */
6442
6443 static void
6444 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
6445 tree tofollow, gimple_seq *seq)
6446 {
6447 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
6448 : IFN_UNIQUE_OACC_TAIL_MARK);
6449 tree marker = build_int_cst (integer_type_node, marker_kind);
6450 int nargs = 2 + (tofollow != NULL_TREE);
6451 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
6452 marker, ddvar, tofollow);
6453 gimple_set_location (call, loc);
6454 gimple_set_lhs (call, ddvar);
6455 gimple_seq_add_stmt (seq, call);
6456 }
6457
6458 /* Generate the before and after OpenACC loop sequences. CLAUSES are
6459 the loop clauses, from which we extract reductions. Initialize
6460 HEAD and TAIL. */
6461
6462 static void
6463 lower_oacc_head_tail (location_t loc, tree clauses,
6464 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
6465 {
6466 bool inner = false;
6467 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
6468 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
6469
6470 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
6471 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
6472 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
6473
6474 gcc_assert (count);
6475 for (unsigned done = 1; count; count--, done++)
6476 {
6477 gimple_seq fork_seq = NULL;
6478 gimple_seq join_seq = NULL;
6479
6480 tree place = build_int_cst (integer_type_node, -1);
6481 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
6482 fork_kind, ddvar, place);
6483 gimple_set_location (fork, loc);
6484 gimple_set_lhs (fork, ddvar);
6485
6486 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
6487 join_kind, ddvar, place);
6488 gimple_set_location (join, loc);
6489 gimple_set_lhs (join, ddvar);
6490
6491 /* Mark the beginning of this level sequence. */
6492 if (inner)
6493 lower_oacc_loop_marker (loc, ddvar, true,
6494 build_int_cst (integer_type_node, count),
6495 &fork_seq);
6496 lower_oacc_loop_marker (loc, ddvar, false,
6497 build_int_cst (integer_type_node, done),
6498 &join_seq);
6499
6500 lower_oacc_reductions (loc, clauses, place, inner,
6501 fork, join, &fork_seq, &join_seq, ctx);
6502
6503 /* Append this level to head. */
6504 gimple_seq_add_seq (head, fork_seq);
6505 /* Prepend it to tail. */
6506 gimple_seq_add_seq (&join_seq, *tail);
6507 *tail = join_seq;
6508
6509 inner = true;
6510 }
6511
6512 /* Mark the end of the sequence. */
6513 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
6514 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
6515 }
6516
6517 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6518 catch handler and return it. This prevents programs from violating the
6519 structured block semantics with throws. */
6520
6521 static gimple_seq
6522 maybe_catch_exception (gimple_seq body)
6523 {
6524 gimple *g;
6525 tree decl;
6526
6527 if (!flag_exceptions)
6528 return body;
6529
6530 if (lang_hooks.eh_protect_cleanup_actions != NULL)
6531 decl = lang_hooks.eh_protect_cleanup_actions ();
6532 else
6533 decl = builtin_decl_explicit (BUILT_IN_TRAP);
6534
6535 g = gimple_build_eh_must_not_throw (decl);
6536 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
6537 GIMPLE_TRY_CATCH);
6538
6539 return gimple_seq_alloc_with_stmt (g);
6540 }
6541
6542 \f
6543 /* Routines to lower OMP directives into OMP-GIMPLE. */
6544
6545 /* If ctx is a worksharing context inside of a cancellable parallel
6546 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6547 and conditional branch to parallel's cancel_label to handle
6548 cancellation in the implicit barrier. */
6549
6550 static void
6551 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
6552 gimple_seq *body)
6553 {
6554 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
6555 if (gimple_omp_return_nowait_p (omp_return))
6556 return;
6557 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6558 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6559 && outer->cancellable)
6560 {
6561 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
6562 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
6563 tree lhs = create_tmp_var (c_bool_type);
6564 gimple_omp_return_set_lhs (omp_return, lhs);
6565 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
6566 gimple *g = gimple_build_cond (NE_EXPR, lhs,
6567 fold_convert (c_bool_type,
6568 boolean_false_node),
6569 outer->cancel_label, fallthru_label);
6570 gimple_seq_add_stmt (body, g);
6571 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
6572 }
6573 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
6574 return;
6575 }
6576
6577 /* Find the first task_reduction or reduction clause or return NULL
6578 if there are none. */
6579
6580 static inline tree
6581 omp_task_reductions_find_first (tree clauses, enum tree_code code,
6582 enum omp_clause_code ccode)
6583 {
6584 while (1)
6585 {
6586 clauses = omp_find_clause (clauses, ccode);
6587 if (clauses == NULL_TREE)
6588 return NULL_TREE;
6589 if (ccode != OMP_CLAUSE_REDUCTION
6590 || code == OMP_TASKLOOP
6591 || OMP_CLAUSE_REDUCTION_TASK (clauses))
6592 return clauses;
6593 clauses = OMP_CLAUSE_CHAIN (clauses);
6594 }
6595 }
6596
6597 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
6598 gimple_seq *, gimple_seq *);
6599
6600 /* Lower the OpenMP sections directive in the current statement in GSI_P.
6601 CTX is the enclosing OMP context for the current statement. */
6602
6603 static void
6604 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6605 {
6606 tree block, control;
6607 gimple_stmt_iterator tgsi;
6608 gomp_sections *stmt;
6609 gimple *t;
6610 gbind *new_stmt, *bind;
6611 gimple_seq ilist, dlist, olist, tred_dlist = NULL, new_body;
6612
6613 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
6614
6615 push_gimplify_context ();
6616
6617 dlist = NULL;
6618 ilist = NULL;
6619
6620 tree rclauses
6621 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
6622 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
6623 tree rtmp = NULL_TREE;
6624 if (rclauses)
6625 {
6626 tree type = build_pointer_type (pointer_sized_int_node);
6627 tree temp = create_tmp_var (type);
6628 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
6629 OMP_CLAUSE_DECL (c) = temp;
6630 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
6631 gimple_omp_sections_set_clauses (stmt, c);
6632 lower_omp_task_reductions (ctx, OMP_SECTIONS,
6633 gimple_omp_sections_clauses (stmt),
6634 &ilist, &tred_dlist);
6635 rclauses = c;
6636 rtmp = make_ssa_name (type);
6637 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
6638 }
6639
6640 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
6641 &ilist, &dlist, ctx, NULL);
6642
6643 new_body = gimple_omp_body (stmt);
6644 gimple_omp_set_body (stmt, NULL);
6645 tgsi = gsi_start (new_body);
6646 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
6647 {
6648 omp_context *sctx;
6649 gimple *sec_start;
6650
6651 sec_start = gsi_stmt (tgsi);
6652 sctx = maybe_lookup_ctx (sec_start);
6653 gcc_assert (sctx);
6654
6655 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
6656 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
6657 GSI_CONTINUE_LINKING);
6658 gimple_omp_set_body (sec_start, NULL);
6659
6660 if (gsi_one_before_end_p (tgsi))
6661 {
6662 gimple_seq l = NULL;
6663 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
6664 &l, ctx);
6665 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
6666 gimple_omp_section_set_last (sec_start);
6667 }
6668
6669 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
6670 GSI_CONTINUE_LINKING);
6671 }
6672
6673 block = make_node (BLOCK);
6674 bind = gimple_build_bind (NULL, new_body, block);
6675
6676 olist = NULL;
6677 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
6678
6679 block = make_node (BLOCK);
6680 new_stmt = gimple_build_bind (NULL, NULL, block);
6681 gsi_replace (gsi_p, new_stmt, true);
6682
6683 pop_gimplify_context (new_stmt);
6684 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6685 BLOCK_VARS (block) = gimple_bind_vars (bind);
6686 if (BLOCK_VARS (block))
6687 TREE_USED (block) = 1;
6688
6689 new_body = NULL;
6690 gimple_seq_add_seq (&new_body, ilist);
6691 gimple_seq_add_stmt (&new_body, stmt);
6692 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6693 gimple_seq_add_stmt (&new_body, bind);
6694
6695 control = create_tmp_var (unsigned_type_node, ".section");
6696 t = gimple_build_omp_continue (control, control);
6697 gimple_omp_sections_set_control (stmt, control);
6698 gimple_seq_add_stmt (&new_body, t);
6699
6700 gimple_seq_add_seq (&new_body, olist);
6701 if (ctx->cancellable)
6702 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6703 gimple_seq_add_seq (&new_body, dlist);
6704
6705 new_body = maybe_catch_exception (new_body);
6706
6707 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6708 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6709 t = gimple_build_omp_return (nowait);
6710 gimple_seq_add_stmt (&new_body, t);
6711 gimple_seq_add_seq (&new_body, tred_dlist);
6712 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
6713
6714 if (rclauses)
6715 OMP_CLAUSE_DECL (rclauses) = rtmp;
6716
6717 gimple_bind_set_body (new_stmt, new_body);
6718 }
6719
6720
6721 /* A subroutine of lower_omp_single. Expand the simple form of
6722 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6723
6724 if (GOMP_single_start ())
6725 BODY;
6726 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6727
6728 FIXME. It may be better to delay expanding the logic of this until
6729 pass_expand_omp. The expanded logic may make the job more difficult
6730 to a synchronization analysis pass. */
6731
6732 static void
6733 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6734 {
6735 location_t loc = gimple_location (single_stmt);
6736 tree tlabel = create_artificial_label (loc);
6737 tree flabel = create_artificial_label (loc);
6738 gimple *call, *cond;
6739 tree lhs, decl;
6740
6741 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6742 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6743 call = gimple_build_call (decl, 0);
6744 gimple_call_set_lhs (call, lhs);
6745 gimple_seq_add_stmt (pre_p, call);
6746
6747 cond = gimple_build_cond (EQ_EXPR, lhs,
6748 fold_convert_loc (loc, TREE_TYPE (lhs),
6749 boolean_true_node),
6750 tlabel, flabel);
6751 gimple_seq_add_stmt (pre_p, cond);
6752 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6753 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6754 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6755 }
6756
6757
6758 /* A subroutine of lower_omp_single. Expand the simple form of
6759 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6760
6761 #pragma omp single copyprivate (a, b, c)
6762
6763 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6764
6765 {
6766 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6767 {
6768 BODY;
6769 copyout.a = a;
6770 copyout.b = b;
6771 copyout.c = c;
6772 GOMP_single_copy_end (&copyout);
6773 }
6774 else
6775 {
6776 a = copyout_p->a;
6777 b = copyout_p->b;
6778 c = copyout_p->c;
6779 }
6780 GOMP_barrier ();
6781 }
6782
6783 FIXME. It may be better to delay expanding the logic of this until
6784 pass_expand_omp. The expanded logic may make the job more difficult
6785 to a synchronization analysis pass. */
6786
6787 static void
6788 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6789 omp_context *ctx)
6790 {
6791 tree ptr_type, t, l0, l1, l2, bfn_decl;
6792 gimple_seq copyin_seq;
6793 location_t loc = gimple_location (single_stmt);
6794
6795 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6796
6797 ptr_type = build_pointer_type (ctx->record_type);
6798 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6799
6800 l0 = create_artificial_label (loc);
6801 l1 = create_artificial_label (loc);
6802 l2 = create_artificial_label (loc);
6803
6804 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6805 t = build_call_expr_loc (loc, bfn_decl, 0);
6806 t = fold_convert_loc (loc, ptr_type, t);
6807 gimplify_assign (ctx->receiver_decl, t, pre_p);
6808
6809 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6810 build_int_cst (ptr_type, 0));
6811 t = build3 (COND_EXPR, void_type_node, t,
6812 build_and_jump (&l0), build_and_jump (&l1));
6813 gimplify_and_add (t, pre_p);
6814
6815 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6816
6817 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6818
6819 copyin_seq = NULL;
6820 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6821 &copyin_seq, ctx);
6822
6823 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6824 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6825 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6826 gimplify_and_add (t, pre_p);
6827
6828 t = build_and_jump (&l2);
6829 gimplify_and_add (t, pre_p);
6830
6831 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6832
6833 gimple_seq_add_seq (pre_p, copyin_seq);
6834
6835 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6836 }
6837
6838
6839 /* Expand code for an OpenMP single directive. */
6840
6841 static void
6842 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6843 {
6844 tree block;
6845 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6846 gbind *bind;
6847 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6848
6849 push_gimplify_context ();
6850
6851 block = make_node (BLOCK);
6852 bind = gimple_build_bind (NULL, NULL, block);
6853 gsi_replace (gsi_p, bind, true);
6854 bind_body = NULL;
6855 dlist = NULL;
6856 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6857 &bind_body, &dlist, ctx, NULL);
6858 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6859
6860 gimple_seq_add_stmt (&bind_body, single_stmt);
6861
6862 if (ctx->record_type)
6863 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6864 else
6865 lower_omp_single_simple (single_stmt, &bind_body);
6866
6867 gimple_omp_set_body (single_stmt, NULL);
6868
6869 gimple_seq_add_seq (&bind_body, dlist);
6870
6871 bind_body = maybe_catch_exception (bind_body);
6872
6873 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6874 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6875 gimple *g = gimple_build_omp_return (nowait);
6876 gimple_seq_add_stmt (&bind_body_tail, g);
6877 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
6878 if (ctx->record_type)
6879 {
6880 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6881 tree clobber = build_constructor (ctx->record_type, NULL);
6882 TREE_THIS_VOLATILE (clobber) = 1;
6883 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6884 clobber), GSI_SAME_STMT);
6885 }
6886 gimple_seq_add_seq (&bind_body, bind_body_tail);
6887 gimple_bind_set_body (bind, bind_body);
6888
6889 pop_gimplify_context (bind);
6890
6891 gimple_bind_append_vars (bind, ctx->block_vars);
6892 BLOCK_VARS (block) = ctx->block_vars;
6893 if (BLOCK_VARS (block))
6894 TREE_USED (block) = 1;
6895 }
6896
6897
6898 /* Expand code for an OpenMP master directive. */
6899
6900 static void
6901 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6902 {
6903 tree block, lab = NULL, x, bfn_decl;
6904 gimple *stmt = gsi_stmt (*gsi_p);
6905 gbind *bind;
6906 location_t loc = gimple_location (stmt);
6907 gimple_seq tseq;
6908
6909 push_gimplify_context ();
6910
6911 block = make_node (BLOCK);
6912 bind = gimple_build_bind (NULL, NULL, block);
6913 gsi_replace (gsi_p, bind, true);
6914 gimple_bind_add_stmt (bind, stmt);
6915
6916 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6917 x = build_call_expr_loc (loc, bfn_decl, 0);
6918 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6919 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6920 tseq = NULL;
6921 gimplify_and_add (x, &tseq);
6922 gimple_bind_add_seq (bind, tseq);
6923
6924 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6925 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6926 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6927 gimple_omp_set_body (stmt, NULL);
6928
6929 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6930
6931 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6932
6933 pop_gimplify_context (bind);
6934
6935 gimple_bind_append_vars (bind, ctx->block_vars);
6936 BLOCK_VARS (block) = ctx->block_vars;
6937 }
6938
6939 /* Helper function for lower_omp_task_reductions. For a specific PASS
6940 find out the current clause it should be processed, or return false
6941 if all have been processed already. */
6942
6943 static inline bool
6944 omp_task_reduction_iterate (int pass, enum tree_code code,
6945 enum omp_clause_code ccode, tree *c, tree *decl,
6946 tree *type, tree *next)
6947 {
6948 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
6949 {
6950 if (ccode == OMP_CLAUSE_REDUCTION
6951 && code != OMP_TASKLOOP
6952 && !OMP_CLAUSE_REDUCTION_TASK (*c))
6953 continue;
6954 *decl = OMP_CLAUSE_DECL (*c);
6955 *type = TREE_TYPE (*decl);
6956 if (TREE_CODE (*decl) == MEM_REF)
6957 {
6958 if (pass != 1)
6959 continue;
6960 }
6961 else
6962 {
6963 if (omp_is_reference (*decl))
6964 *type = TREE_TYPE (*type);
6965 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
6966 continue;
6967 }
6968 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
6969 return true;
6970 }
6971 *decl = NULL_TREE;
6972 *type = NULL_TREE;
6973 *next = NULL_TREE;
6974 return false;
6975 }
6976
6977 /* Lower task_reduction and reduction clauses (the latter unless CODE is
6978 OMP_TASKGROUP only with task modifier). Register mapping of those in
6979 START sequence and reducing them and unregister them in the END sequence. */
6980
6981 static void
6982 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
6983 gimple_seq *start, gimple_seq *end)
6984 {
6985 enum omp_clause_code ccode
6986 = (code == OMP_TASKGROUP
6987 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
6988 tree cancellable = NULL_TREE;
6989 clauses = omp_task_reductions_find_first (clauses, code, ccode);
6990 if (clauses == NULL_TREE)
6991 return;
6992 if (code == OMP_FOR || code == OMP_SECTIONS)
6993 {
6994 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6995 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6996 && outer->cancellable)
6997 {
6998 cancellable = error_mark_node;
6999 break;
7000 }
7001 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7002 break;
7003 }
7004 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7005 tree *last = &TYPE_FIELDS (record_type);
7006 unsigned cnt = 0;
7007 if (cancellable)
7008 {
7009 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7010 ptr_type_node);
7011 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7012 integer_type_node);
7013 *last = field;
7014 DECL_CHAIN (field) = ifield;
7015 last = &DECL_CHAIN (ifield);
7016 DECL_CONTEXT (field) = record_type;
7017 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7018 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7019 DECL_CONTEXT (ifield) = record_type;
7020 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7021 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7022 }
7023 for (int pass = 0; pass < 2; pass++)
7024 {
7025 tree decl, type, next;
7026 for (tree c = clauses;
7027 omp_task_reduction_iterate (pass, code, ccode,
7028 &c, &decl, &type, &next); c = next)
7029 {
7030 ++cnt;
7031 tree new_type = type;
7032 if (ctx->outer)
7033 new_type = remap_type (type, &ctx->outer->cb);
7034 tree field
7035 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7036 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7037 new_type);
7038 if (DECL_P (decl) && type == TREE_TYPE (decl))
7039 {
7040 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7041 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7042 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7043 }
7044 else
7045 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7046 DECL_CONTEXT (field) = record_type;
7047 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7048 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7049 *last = field;
7050 last = &DECL_CHAIN (field);
7051 tree bfield
7052 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7053 boolean_type_node);
7054 DECL_CONTEXT (bfield) = record_type;
7055 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7056 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7057 *last = bfield;
7058 last = &DECL_CHAIN (bfield);
7059 }
7060 }
7061 *last = NULL_TREE;
7062 layout_type (record_type);
7063
7064 /* Build up an array which registers with the runtime all the reductions
7065 and deregisters them at the end. Format documented in libgomp/task.c. */
7066 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7067 tree avar = create_tmp_var_raw (atype);
7068 gimple_add_tmp_var (avar);
7069 TREE_ADDRESSABLE (avar) = 1;
7070 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7071 NULL_TREE, NULL_TREE);
7072 tree t = build_int_cst (pointer_sized_int_node, cnt);
7073 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7074 gimple_seq seq = NULL;
7075 tree sz = fold_convert (pointer_sized_int_node,
7076 TYPE_SIZE_UNIT (record_type));
7077 int cachesz = 64;
7078 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7079 build_int_cst (pointer_sized_int_node, cachesz - 1));
7080 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7081 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7082 ctx->task_reductions.create (1 + cnt);
7083 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7084 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7085 ? sz : NULL_TREE);
7086 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7087 gimple_seq_add_seq (start, seq);
7088 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7089 NULL_TREE, NULL_TREE);
7090 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7091 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7092 NULL_TREE, NULL_TREE);
7093 t = build_int_cst (pointer_sized_int_node,
7094 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7095 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7096 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7097 NULL_TREE, NULL_TREE);
7098 t = build_int_cst (pointer_sized_int_node, -1);
7099 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7100 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7101 NULL_TREE, NULL_TREE);
7102 t = build_int_cst (pointer_sized_int_node, 0);
7103 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7104
7105 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7106 and for each task reduction checks a bool right after the private variable
7107 within that thread's chunk; if the bool is clear, it hasn't been
7108 initialized and thus isn't going to be reduced nor destructed, otherwise
7109 reduce and destruct it. */
7110 tree idx = create_tmp_var (size_type_node);
7111 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7112 tree num_thr_sz = create_tmp_var (size_type_node);
7113 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7114 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7115 tree lab3 = NULL_TREE;
7116 gimple *g;
7117 if (code == OMP_FOR || code == OMP_SECTIONS)
7118 {
7119 /* For worksharing constructs, only perform it in the master thread,
7120 with the exception of cancelled implicit barriers - then only handle
7121 the current thread. */
7122 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7123 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7124 tree thr_num = create_tmp_var (integer_type_node);
7125 g = gimple_build_call (t, 0);
7126 gimple_call_set_lhs (g, thr_num);
7127 gimple_seq_add_stmt (end, g);
7128 if (cancellable)
7129 {
7130 tree c;
7131 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7132 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7133 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7134 if (code == OMP_FOR)
7135 c = gimple_omp_for_clauses (ctx->stmt);
7136 else /* if (code == OMP_SECTIONS) */
7137 c = gimple_omp_sections_clauses (ctx->stmt);
7138 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7139 cancellable = c;
7140 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7141 lab5, lab6);
7142 gimple_seq_add_stmt (end, g);
7143 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7144 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7145 gimple_seq_add_stmt (end, g);
7146 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7147 build_one_cst (TREE_TYPE (idx)));
7148 gimple_seq_add_stmt (end, g);
7149 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7150 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7151 }
7152 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7153 gimple_seq_add_stmt (end, g);
7154 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7155 }
7156 if (code != OMP_PARALLEL)
7157 {
7158 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7159 tree num_thr = create_tmp_var (integer_type_node);
7160 g = gimple_build_call (t, 0);
7161 gimple_call_set_lhs (g, num_thr);
7162 gimple_seq_add_stmt (end, g);
7163 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7164 gimple_seq_add_stmt (end, g);
7165 if (cancellable)
7166 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7167 }
7168 else
7169 {
7170 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7171 OMP_CLAUSE__REDUCTEMP_);
7172 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7173 t = fold_convert (size_type_node, t);
7174 gimplify_assign (num_thr_sz, t, end);
7175 }
7176 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7177 NULL_TREE, NULL_TREE);
7178 tree data = create_tmp_var (pointer_sized_int_node);
7179 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7180 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7181 tree ptr;
7182 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7183 ptr = create_tmp_var (build_pointer_type (record_type));
7184 else
7185 ptr = create_tmp_var (ptr_type_node);
7186 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7187
7188 tree field = TYPE_FIELDS (record_type);
7189 cnt = 0;
7190 if (cancellable)
7191 field = DECL_CHAIN (DECL_CHAIN (field));
7192 for (int pass = 0; pass < 2; pass++)
7193 {
7194 tree decl, type, next;
7195 for (tree c = clauses;
7196 omp_task_reduction_iterate (pass, code, ccode,
7197 &c, &decl, &type, &next); c = next)
7198 {
7199 tree var = decl, ref;
7200 if (TREE_CODE (decl) == MEM_REF)
7201 {
7202 var = TREE_OPERAND (var, 0);
7203 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7204 var = TREE_OPERAND (var, 0);
7205 tree v = var;
7206 if (TREE_CODE (var) == ADDR_EXPR)
7207 var = TREE_OPERAND (var, 0);
7208 else if (TREE_CODE (var) == INDIRECT_REF)
7209 var = TREE_OPERAND (var, 0);
7210 tree orig_var = var;
7211 if (is_variable_sized (var))
7212 {
7213 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7214 var = DECL_VALUE_EXPR (var);
7215 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7216 var = TREE_OPERAND (var, 0);
7217 gcc_assert (DECL_P (var));
7218 }
7219 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7220 if (orig_var != var)
7221 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7222 else if (TREE_CODE (v) == ADDR_EXPR)
7223 t = build_fold_addr_expr (t);
7224 else if (TREE_CODE (v) == INDIRECT_REF)
7225 t = build_fold_indirect_ref (t);
7226 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7227 {
7228 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7229 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7230 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7231 }
7232 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7233 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7234 fold_convert (size_type_node,
7235 TREE_OPERAND (decl, 1)));
7236 }
7237 else
7238 {
7239 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7240 if (!omp_is_reference (decl))
7241 t = build_fold_addr_expr (t);
7242 }
7243 t = fold_convert (pointer_sized_int_node, t);
7244 seq = NULL;
7245 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7246 gimple_seq_add_seq (start, seq);
7247 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7248 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7249 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7250 t = unshare_expr (byte_position (field));
7251 t = fold_convert (pointer_sized_int_node, t);
7252 ctx->task_reduction_map->put (c, cnt);
7253 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7254 ? t : NULL_TREE);
7255 seq = NULL;
7256 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7257 gimple_seq_add_seq (start, seq);
7258 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7259 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7260 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7261
7262 tree bfield = DECL_CHAIN (field);
7263 tree cond;
7264 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7265 /* In parallel or worksharing all threads unconditionally
7266 initialize all their task reduction private variables. */
7267 cond = boolean_true_node;
7268 else if (TREE_TYPE (ptr) == ptr_type_node)
7269 {
7270 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7271 unshare_expr (byte_position (bfield)));
7272 seq = NULL;
7273 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7274 gimple_seq_add_seq (end, seq);
7275 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7276 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7277 build_int_cst (pbool, 0));
7278 }
7279 else
7280 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7281 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7282 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7283 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7284 tree condv = create_tmp_var (boolean_type_node);
7285 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7286 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7287 lab3, lab4);
7288 gimple_seq_add_stmt (end, g);
7289 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7290 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7291 {
7292 /* If this reduction doesn't need destruction and parallel
7293 has been cancelled, there is nothing to do for this
7294 reduction, so jump around the merge operation. */
7295 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7296 g = gimple_build_cond (NE_EXPR, cancellable,
7297 build_zero_cst (TREE_TYPE (cancellable)),
7298 lab4, lab5);
7299 gimple_seq_add_stmt (end, g);
7300 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7301 }
7302
7303 tree new_var;
7304 if (TREE_TYPE (ptr) == ptr_type_node)
7305 {
7306 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7307 unshare_expr (byte_position (field)));
7308 seq = NULL;
7309 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7310 gimple_seq_add_seq (end, seq);
7311 tree pbool = build_pointer_type (TREE_TYPE (field));
7312 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7313 build_int_cst (pbool, 0));
7314 }
7315 else
7316 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7317 build_simple_mem_ref (ptr), field, NULL_TREE);
7318
7319 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7320 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7321 ref = build_simple_mem_ref (ref);
7322 /* reduction(-:var) sums up the partial results, so it acts
7323 identically to reduction(+:var). */
7324 if (rcode == MINUS_EXPR)
7325 rcode = PLUS_EXPR;
7326 if (TREE_CODE (decl) == MEM_REF)
7327 {
7328 tree type = TREE_TYPE (new_var);
7329 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7330 tree i = create_tmp_var (TREE_TYPE (v));
7331 tree ptype = build_pointer_type (TREE_TYPE (type));
7332 if (DECL_P (v))
7333 {
7334 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7335 tree vv = create_tmp_var (TREE_TYPE (v));
7336 gimplify_assign (vv, v, start);
7337 v = vv;
7338 }
7339 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7340 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7341 new_var = build_fold_addr_expr (new_var);
7342 new_var = fold_convert (ptype, new_var);
7343 ref = fold_convert (ptype, ref);
7344 tree m = create_tmp_var (ptype);
7345 gimplify_assign (m, new_var, end);
7346 new_var = m;
7347 m = create_tmp_var (ptype);
7348 gimplify_assign (m, ref, end);
7349 ref = m;
7350 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7351 tree body = create_artificial_label (UNKNOWN_LOCATION);
7352 tree endl = create_artificial_label (UNKNOWN_LOCATION);
7353 gimple_seq_add_stmt (end, gimple_build_label (body));
7354 tree priv = build_simple_mem_ref (new_var);
7355 tree out = build_simple_mem_ref (ref);
7356 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7357 {
7358 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7359 tree decl_placeholder
7360 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7361 tree lab6 = NULL_TREE;
7362 if (cancellable)
7363 {
7364 /* If this reduction needs destruction and parallel
7365 has been cancelled, jump around the merge operation
7366 to the destruction. */
7367 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7368 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7369 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7370 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7371 lab6, lab5);
7372 gimple_seq_add_stmt (end, g);
7373 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7374 }
7375 SET_DECL_VALUE_EXPR (placeholder, out);
7376 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7377 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7378 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7379 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7380 gimple_seq_add_seq (end,
7381 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7382 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7384 {
7385 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7386 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7387 }
7388 if (cancellable)
7389 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7390 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
7391 if (x)
7392 {
7393 gimple_seq tseq = NULL;
7394 gimplify_stmt (&x, &tseq);
7395 gimple_seq_add_seq (end, tseq);
7396 }
7397 }
7398 else
7399 {
7400 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
7401 out = unshare_expr (out);
7402 gimplify_assign (out, x, end);
7403 }
7404 gimple *g
7405 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7406 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7407 gimple_seq_add_stmt (end, g);
7408 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7409 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7410 gimple_seq_add_stmt (end, g);
7411 g = gimple_build_assign (i, PLUS_EXPR, i,
7412 build_int_cst (TREE_TYPE (i), 1));
7413 gimple_seq_add_stmt (end, g);
7414 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
7415 gimple_seq_add_stmt (end, g);
7416 gimple_seq_add_stmt (end, gimple_build_label (endl));
7417 }
7418 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7419 {
7420 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7421 tree oldv = NULL_TREE;
7422 tree lab6 = NULL_TREE;
7423 if (cancellable)
7424 {
7425 /* If this reduction needs destruction and parallel
7426 has been cancelled, jump around the merge operation
7427 to the destruction. */
7428 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7429 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7430 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7431 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7432 lab6, lab5);
7433 gimple_seq_add_stmt (end, g);
7434 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7435 }
7436 if (omp_is_reference (decl)
7437 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7438 TREE_TYPE (ref)))
7439 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7440 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7441 tree refv = create_tmp_var (TREE_TYPE (ref));
7442 gimplify_assign (refv, ref, end);
7443 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
7444 SET_DECL_VALUE_EXPR (placeholder, ref);
7445 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7446 tree d = maybe_lookup_decl (decl, ctx);
7447 gcc_assert (d);
7448 if (DECL_HAS_VALUE_EXPR_P (d))
7449 oldv = DECL_VALUE_EXPR (d);
7450 if (omp_is_reference (var))
7451 {
7452 tree v = fold_convert (TREE_TYPE (d),
7453 build_fold_addr_expr (new_var));
7454 SET_DECL_VALUE_EXPR (d, v);
7455 }
7456 else
7457 SET_DECL_VALUE_EXPR (d, new_var);
7458 DECL_HAS_VALUE_EXPR_P (d) = 1;
7459 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7460 if (oldv)
7461 SET_DECL_VALUE_EXPR (d, oldv);
7462 else
7463 {
7464 SET_DECL_VALUE_EXPR (d, NULL_TREE);
7465 DECL_HAS_VALUE_EXPR_P (d) = 0;
7466 }
7467 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7468 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7469 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7470 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7471 if (cancellable)
7472 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7473 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
7474 if (x)
7475 {
7476 gimple_seq tseq = NULL;
7477 gimplify_stmt (&x, &tseq);
7478 gimple_seq_add_seq (end, tseq);
7479 }
7480 }
7481 else
7482 {
7483 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
7484 ref = unshare_expr (ref);
7485 gimplify_assign (ref, x, end);
7486 }
7487 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7488 ++cnt;
7489 field = DECL_CHAIN (bfield);
7490 }
7491 }
7492
7493 if (code == OMP_TASKGROUP)
7494 {
7495 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
7496 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7497 gimple_seq_add_stmt (start, g);
7498 }
7499 else
7500 {
7501 tree c;
7502 if (code == OMP_FOR)
7503 c = gimple_omp_for_clauses (ctx->stmt);
7504 else if (code == OMP_SECTIONS)
7505 c = gimple_omp_sections_clauses (ctx->stmt);
7506 else
7507 c = gimple_omp_taskreg_clauses (ctx->stmt);
7508 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
7509 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
7510 build_fold_addr_expr (avar));
7511 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
7512 }
7513
7514 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
7515 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
7516 size_one_node));
7517 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
7518 gimple_seq_add_stmt (end, g);
7519 gimple_seq_add_stmt (end, gimple_build_label (lab2));
7520 if (code == OMP_FOR || code == OMP_SECTIONS)
7521 {
7522 enum built_in_function bfn
7523 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
7524 t = builtin_decl_explicit (bfn);
7525 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
7526 tree arg;
7527 if (cancellable)
7528 {
7529 arg = create_tmp_var (c_bool_type);
7530 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
7531 cancellable));
7532 }
7533 else
7534 arg = build_int_cst (c_bool_type, 0);
7535 g = gimple_build_call (t, 1, arg);
7536 }
7537 else
7538 {
7539 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
7540 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7541 }
7542 gimple_seq_add_stmt (end, g);
7543 t = build_constructor (atype, NULL);
7544 TREE_THIS_VOLATILE (t) = 1;
7545 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
7546 }
7547
7548 /* Expand code for an OpenMP taskgroup directive. */
7549
7550 static void
7551 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7552 {
7553 gimple *stmt = gsi_stmt (*gsi_p);
7554 gcall *x;
7555 gbind *bind;
7556 gimple_seq dseq = NULL;
7557 tree block = make_node (BLOCK);
7558
7559 bind = gimple_build_bind (NULL, NULL, block);
7560 gsi_replace (gsi_p, bind, true);
7561 gimple_bind_add_stmt (bind, stmt);
7562
7563 push_gimplify_context ();
7564
7565 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
7566 0);
7567 gimple_bind_add_stmt (bind, x);
7568
7569 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
7570 gimple_omp_taskgroup_clauses (stmt),
7571 gimple_bind_body_ptr (bind), &dseq);
7572
7573 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7574 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7575 gimple_omp_set_body (stmt, NULL);
7576
7577 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7578 gimple_bind_add_seq (bind, dseq);
7579
7580 pop_gimplify_context (bind);
7581
7582 gimple_bind_append_vars (bind, ctx->block_vars);
7583 BLOCK_VARS (block) = ctx->block_vars;
7584 }
7585
7586
7587 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
7588
7589 static void
7590 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
7591 omp_context *ctx)
7592 {
7593 struct omp_for_data fd;
7594 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
7595 return;
7596
7597 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
7598 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
7599 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
7600 if (!fd.ordered)
7601 return;
7602
7603 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7604 tree c = gimple_omp_ordered_clauses (ord_stmt);
7605 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7606 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
7607 {
7608 /* Merge depend clauses from multiple adjacent
7609 #pragma omp ordered depend(sink:...) constructs
7610 into one #pragma omp ordered depend(sink:...), so that
7611 we can optimize them together. */
7612 gimple_stmt_iterator gsi = *gsi_p;
7613 gsi_next (&gsi);
7614 while (!gsi_end_p (gsi))
7615 {
7616 gimple *stmt = gsi_stmt (gsi);
7617 if (is_gimple_debug (stmt)
7618 || gimple_code (stmt) == GIMPLE_NOP)
7619 {
7620 gsi_next (&gsi);
7621 continue;
7622 }
7623 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
7624 break;
7625 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
7626 c = gimple_omp_ordered_clauses (ord_stmt2);
7627 if (c == NULL_TREE
7628 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
7629 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7630 break;
7631 while (*list_p)
7632 list_p = &OMP_CLAUSE_CHAIN (*list_p);
7633 *list_p = c;
7634 gsi_remove (&gsi, true);
7635 }
7636 }
7637
7638 /* Canonicalize sink dependence clauses into one folded clause if
7639 possible.
7640
7641 The basic algorithm is to create a sink vector whose first
7642 element is the GCD of all the first elements, and whose remaining
7643 elements are the minimum of the subsequent columns.
7644
7645 We ignore dependence vectors whose first element is zero because
7646 such dependencies are known to be executed by the same thread.
7647
7648 We take into account the direction of the loop, so a minimum
7649 becomes a maximum if the loop is iterating forwards. We also
7650 ignore sink clauses where the loop direction is unknown, or where
7651 the offsets are clearly invalid because they are not a multiple
7652 of the loop increment.
7653
7654 For example:
7655
7656 #pragma omp for ordered(2)
7657 for (i=0; i < N; ++i)
7658 for (j=0; j < M; ++j)
7659 {
7660 #pragma omp ordered \
7661 depend(sink:i-8,j-2) \
7662 depend(sink:i,j-1) \ // Completely ignored because i+0.
7663 depend(sink:i-4,j-3) \
7664 depend(sink:i-6,j-4)
7665 #pragma omp ordered depend(source)
7666 }
7667
7668 Folded clause is:
7669
7670 depend(sink:-gcd(8,4,6),-min(2,3,4))
7671 -or-
7672 depend(sink:-2,-2)
7673 */
7674
7675 /* FIXME: Computing GCD's where the first element is zero is
7676 non-trivial in the presence of collapsed loops. Do this later. */
7677 if (fd.collapse > 1)
7678 return;
7679
7680 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
7681
7682 /* wide_int is not a POD so it must be default-constructed. */
7683 for (unsigned i = 0; i != 2 * len - 1; ++i)
7684 new (static_cast<void*>(folded_deps + i)) wide_int ();
7685
7686 tree folded_dep = NULL_TREE;
7687 /* TRUE if the first dimension's offset is negative. */
7688 bool neg_offset_p = false;
7689
7690 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7691 unsigned int i;
7692 while ((c = *list_p) != NULL)
7693 {
7694 bool remove = false;
7695
7696 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
7697 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7698 goto next_ordered_clause;
7699
7700 tree vec;
7701 for (vec = OMP_CLAUSE_DECL (c), i = 0;
7702 vec && TREE_CODE (vec) == TREE_LIST;
7703 vec = TREE_CHAIN (vec), ++i)
7704 {
7705 gcc_assert (i < len);
7706
7707 /* omp_extract_for_data has canonicalized the condition. */
7708 gcc_assert (fd.loops[i].cond_code == LT_EXPR
7709 || fd.loops[i].cond_code == GT_EXPR);
7710 bool forward = fd.loops[i].cond_code == LT_EXPR;
7711 bool maybe_lexically_later = true;
7712
7713 /* While the committee makes up its mind, bail if we have any
7714 non-constant steps. */
7715 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
7716 goto lower_omp_ordered_ret;
7717
7718 tree itype = TREE_TYPE (TREE_VALUE (vec));
7719 if (POINTER_TYPE_P (itype))
7720 itype = sizetype;
7721 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
7722 TYPE_PRECISION (itype),
7723 TYPE_SIGN (itype));
7724
7725 /* Ignore invalid offsets that are not multiples of the step. */
7726 if (!wi::multiple_of_p (wi::abs (offset),
7727 wi::abs (wi::to_wide (fd.loops[i].step)),
7728 UNSIGNED))
7729 {
7730 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7731 "ignoring sink clause with offset that is not "
7732 "a multiple of the loop step");
7733 remove = true;
7734 goto next_ordered_clause;
7735 }
7736
7737 /* Calculate the first dimension. The first dimension of
7738 the folded dependency vector is the GCD of the first
7739 elements, while ignoring any first elements whose offset
7740 is 0. */
7741 if (i == 0)
7742 {
7743 /* Ignore dependence vectors whose first dimension is 0. */
7744 if (offset == 0)
7745 {
7746 remove = true;
7747 goto next_ordered_clause;
7748 }
7749 else
7750 {
7751 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
7752 {
7753 error_at (OMP_CLAUSE_LOCATION (c),
7754 "first offset must be in opposite direction "
7755 "of loop iterations");
7756 goto lower_omp_ordered_ret;
7757 }
7758 if (forward)
7759 offset = -offset;
7760 neg_offset_p = forward;
7761 /* Initialize the first time around. */
7762 if (folded_dep == NULL_TREE)
7763 {
7764 folded_dep = c;
7765 folded_deps[0] = offset;
7766 }
7767 else
7768 folded_deps[0] = wi::gcd (folded_deps[0],
7769 offset, UNSIGNED);
7770 }
7771 }
7772 /* Calculate minimum for the remaining dimensions. */
7773 else
7774 {
7775 folded_deps[len + i - 1] = offset;
7776 if (folded_dep == c)
7777 folded_deps[i] = offset;
7778 else if (maybe_lexically_later
7779 && !wi::eq_p (folded_deps[i], offset))
7780 {
7781 if (forward ^ wi::gts_p (folded_deps[i], offset))
7782 {
7783 unsigned int j;
7784 folded_dep = c;
7785 for (j = 1; j <= i; j++)
7786 folded_deps[j] = folded_deps[len + j - 1];
7787 }
7788 else
7789 maybe_lexically_later = false;
7790 }
7791 }
7792 }
7793 gcc_assert (i == len);
7794
7795 remove = true;
7796
7797 next_ordered_clause:
7798 if (remove)
7799 *list_p = OMP_CLAUSE_CHAIN (c);
7800 else
7801 list_p = &OMP_CLAUSE_CHAIN (c);
7802 }
7803
7804 if (folded_dep)
7805 {
7806 if (neg_offset_p)
7807 folded_deps[0] = -folded_deps[0];
7808
7809 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
7810 if (POINTER_TYPE_P (itype))
7811 itype = sizetype;
7812
7813 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
7814 = wide_int_to_tree (itype, folded_deps[0]);
7815 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
7816 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
7817 }
7818
7819 lower_omp_ordered_ret:
7820
7821 /* Ordered without clauses is #pragma omp threads, while we want
7822 a nop instead if we remove all clauses. */
7823 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
7824 gsi_replace (gsi_p, gimple_build_nop (), true);
7825 }
7826
7827
7828 /* Expand code for an OpenMP ordered directive. */
7829
7830 static void
7831 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7832 {
7833 tree block;
7834 gimple *stmt = gsi_stmt (*gsi_p), *g;
7835 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
7836 gcall *x;
7837 gbind *bind;
7838 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7839 OMP_CLAUSE_SIMD);
7840 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7841 loop. */
7842 bool maybe_simt
7843 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
7844 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7845 OMP_CLAUSE_THREADS);
7846
7847 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7848 OMP_CLAUSE_DEPEND))
7849 {
7850 /* FIXME: This is needs to be moved to the expansion to verify various
7851 conditions only testable on cfg with dominators computed, and also
7852 all the depend clauses to be merged still might need to be available
7853 for the runtime checks. */
7854 if (0)
7855 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
7856 return;
7857 }
7858
7859 push_gimplify_context ();
7860
7861 block = make_node (BLOCK);
7862 bind = gimple_build_bind (NULL, NULL, block);
7863 gsi_replace (gsi_p, bind, true);
7864 gimple_bind_add_stmt (bind, stmt);
7865
7866 if (simd)
7867 {
7868 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
7869 build_int_cst (NULL_TREE, threads));
7870 cfun->has_simduid_loops = true;
7871 }
7872 else
7873 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
7874 0);
7875 gimple_bind_add_stmt (bind, x);
7876
7877 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
7878 if (maybe_simt)
7879 {
7880 counter = create_tmp_var (integer_type_node);
7881 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
7882 gimple_call_set_lhs (g, counter);
7883 gimple_bind_add_stmt (bind, g);
7884
7885 body = create_artificial_label (UNKNOWN_LOCATION);
7886 test = create_artificial_label (UNKNOWN_LOCATION);
7887 gimple_bind_add_stmt (bind, gimple_build_label (body));
7888
7889 tree simt_pred = create_tmp_var (integer_type_node);
7890 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
7891 gimple_call_set_lhs (g, simt_pred);
7892 gimple_bind_add_stmt (bind, g);
7893
7894 tree t = create_artificial_label (UNKNOWN_LOCATION);
7895 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
7896 gimple_bind_add_stmt (bind, g);
7897
7898 gimple_bind_add_stmt (bind, gimple_build_label (t));
7899 }
7900 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7901 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7902 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7903 gimple_omp_set_body (stmt, NULL);
7904
7905 if (maybe_simt)
7906 {
7907 gimple_bind_add_stmt (bind, gimple_build_label (test));
7908 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
7909 gimple_bind_add_stmt (bind, g);
7910
7911 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
7912 tree nonneg = create_tmp_var (integer_type_node);
7913 gimple_seq tseq = NULL;
7914 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
7915 gimple_bind_add_seq (bind, tseq);
7916
7917 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
7918 gimple_call_set_lhs (g, nonneg);
7919 gimple_bind_add_stmt (bind, g);
7920
7921 tree end = create_artificial_label (UNKNOWN_LOCATION);
7922 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
7923 gimple_bind_add_stmt (bind, g);
7924
7925 gimple_bind_add_stmt (bind, gimple_build_label (end));
7926 }
7927 if (simd)
7928 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
7929 build_int_cst (NULL_TREE, threads));
7930 else
7931 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
7932 0);
7933 gimple_bind_add_stmt (bind, x);
7934
7935 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7936
7937 pop_gimplify_context (bind);
7938
7939 gimple_bind_append_vars (bind, ctx->block_vars);
7940 BLOCK_VARS (block) = gimple_bind_vars (bind);
7941 }
7942
7943
7944 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
7945 substitution of a couple of function calls. But in the NAMED case,
7946 requires that languages coordinate a symbol name. It is therefore
7947 best put here in common code. */
7948
7949 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
7950
7951 static void
7952 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7953 {
7954 tree block;
7955 tree name, lock, unlock;
7956 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
7957 gbind *bind;
7958 location_t loc = gimple_location (stmt);
7959 gimple_seq tbody;
7960
7961 name = gimple_omp_critical_name (stmt);
7962 if (name)
7963 {
7964 tree decl;
7965
7966 if (!critical_name_mutexes)
7967 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
7968
7969 tree *n = critical_name_mutexes->get (name);
7970 if (n == NULL)
7971 {
7972 char *new_str;
7973
7974 decl = create_tmp_var_raw (ptr_type_node);
7975
7976 new_str = ACONCAT ((".gomp_critical_user_",
7977 IDENTIFIER_POINTER (name), NULL));
7978 DECL_NAME (decl) = get_identifier (new_str);
7979 TREE_PUBLIC (decl) = 1;
7980 TREE_STATIC (decl) = 1;
7981 DECL_COMMON (decl) = 1;
7982 DECL_ARTIFICIAL (decl) = 1;
7983 DECL_IGNORED_P (decl) = 1;
7984
7985 varpool_node::finalize_decl (decl);
7986
7987 critical_name_mutexes->put (name, decl);
7988 }
7989 else
7990 decl = *n;
7991
7992 /* If '#pragma omp critical' is inside offloaded region or
7993 inside function marked as offloadable, the symbol must be
7994 marked as offloadable too. */
7995 omp_context *octx;
7996 if (cgraph_node::get (current_function_decl)->offloadable)
7997 varpool_node::get_create (decl)->offloadable = 1;
7998 else
7999 for (octx = ctx->outer; octx; octx = octx->outer)
8000 if (is_gimple_omp_offloaded (octx->stmt))
8001 {
8002 varpool_node::get_create (decl)->offloadable = 1;
8003 break;
8004 }
8005
8006 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8007 lock = build_call_expr_loc (loc, lock, 1,
8008 build_fold_addr_expr_loc (loc, decl));
8009
8010 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8011 unlock = build_call_expr_loc (loc, unlock, 1,
8012 build_fold_addr_expr_loc (loc, decl));
8013 }
8014 else
8015 {
8016 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8017 lock = build_call_expr_loc (loc, lock, 0);
8018
8019 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8020 unlock = build_call_expr_loc (loc, unlock, 0);
8021 }
8022
8023 push_gimplify_context ();
8024
8025 block = make_node (BLOCK);
8026 bind = gimple_build_bind (NULL, NULL, block);
8027 gsi_replace (gsi_p, bind, true);
8028 gimple_bind_add_stmt (bind, stmt);
8029
8030 tbody = gimple_bind_body (bind);
8031 gimplify_and_add (lock, &tbody);
8032 gimple_bind_set_body (bind, tbody);
8033
8034 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8035 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8036 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8037 gimple_omp_set_body (stmt, NULL);
8038
8039 tbody = gimple_bind_body (bind);
8040 gimplify_and_add (unlock, &tbody);
8041 gimple_bind_set_body (bind, tbody);
8042
8043 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8044
8045 pop_gimplify_context (bind);
8046 gimple_bind_append_vars (bind, ctx->block_vars);
8047 BLOCK_VARS (block) = gimple_bind_vars (bind);
8048 }
8049
8050 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8051 for a lastprivate clause. Given a loop control predicate of (V
8052 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8053 is appended to *DLIST, iterator initialization is appended to
8054 *BODY_P. */
8055
8056 static void
8057 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8058 gimple_seq *dlist, struct omp_context *ctx)
8059 {
8060 tree clauses, cond, vinit;
8061 enum tree_code cond_code;
8062 gimple_seq stmts;
8063
8064 cond_code = fd->loop.cond_code;
8065 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8066
8067 /* When possible, use a strict equality expression. This can let VRP
8068 type optimizations deduce the value and remove a copy. */
8069 if (tree_fits_shwi_p (fd->loop.step))
8070 {
8071 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8072 if (step == 1 || step == -1)
8073 cond_code = EQ_EXPR;
8074 }
8075
8076 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8077 || gimple_omp_for_grid_phony (fd->for_stmt))
8078 cond = omp_grid_lastprivate_predicate (fd);
8079 else
8080 {
8081 tree n2 = fd->loop.n2;
8082 if (fd->collapse > 1
8083 && TREE_CODE (n2) != INTEGER_CST
8084 && gimple_omp_for_combined_into_p (fd->for_stmt))
8085 {
8086 struct omp_context *taskreg_ctx = NULL;
8087 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
8088 {
8089 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8090 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8091 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
8092 {
8093 if (gimple_omp_for_combined_into_p (gfor))
8094 {
8095 gcc_assert (ctx->outer->outer
8096 && is_parallel_ctx (ctx->outer->outer));
8097 taskreg_ctx = ctx->outer->outer;
8098 }
8099 else
8100 {
8101 struct omp_for_data outer_fd;
8102 omp_extract_for_data (gfor, &outer_fd, NULL);
8103 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8104 }
8105 }
8106 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8107 taskreg_ctx = ctx->outer->outer;
8108 }
8109 else if (is_taskreg_ctx (ctx->outer))
8110 taskreg_ctx = ctx->outer;
8111 if (taskreg_ctx)
8112 {
8113 int i;
8114 tree taskreg_clauses
8115 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8116 tree innerc = omp_find_clause (taskreg_clauses,
8117 OMP_CLAUSE__LOOPTEMP_);
8118 gcc_assert (innerc);
8119 for (i = 0; i < fd->collapse; i++)
8120 {
8121 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8122 OMP_CLAUSE__LOOPTEMP_);
8123 gcc_assert (innerc);
8124 }
8125 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8126 OMP_CLAUSE__LOOPTEMP_);
8127 if (innerc)
8128 n2 = fold_convert (TREE_TYPE (n2),
8129 lookup_decl (OMP_CLAUSE_DECL (innerc),
8130 taskreg_ctx));
8131 }
8132 }
8133 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
8134 }
8135
8136 clauses = gimple_omp_for_clauses (fd->for_stmt);
8137 stmts = NULL;
8138 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
8139 if (!gimple_seq_empty_p (stmts))
8140 {
8141 gimple_seq_add_seq (&stmts, *dlist);
8142 *dlist = stmts;
8143
8144 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8145 vinit = fd->loop.n1;
8146 if (cond_code == EQ_EXPR
8147 && tree_fits_shwi_p (fd->loop.n2)
8148 && ! integer_zerop (fd->loop.n2))
8149 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8150 else
8151 vinit = unshare_expr (vinit);
8152
8153 /* Initialize the iterator variable, so that threads that don't execute
8154 any iterations don't execute the lastprivate clauses by accident. */
8155 gimplify_assign (fd->loop.v, vinit, body_p);
8156 }
8157 }
8158
8159
8160 /* Lower code for an OMP loop directive. */
8161
8162 static void
8163 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8164 {
8165 tree *rhs_p, block;
8166 struct omp_for_data fd, *fdp = NULL;
8167 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8168 gbind *new_stmt;
8169 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
8170 gimple_seq cnt_list = NULL;
8171 gimple_seq oacc_head = NULL, oacc_tail = NULL;
8172 size_t i;
8173
8174 push_gimplify_context ();
8175
8176 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
8177
8178 block = make_node (BLOCK);
8179 new_stmt = gimple_build_bind (NULL, NULL, block);
8180 /* Replace at gsi right away, so that 'stmt' is no member
8181 of a sequence anymore as we're going to add to a different
8182 one below. */
8183 gsi_replace (gsi_p, new_stmt, true);
8184
8185 /* Move declaration of temporaries in the loop body before we make
8186 it go away. */
8187 omp_for_body = gimple_omp_body (stmt);
8188 if (!gimple_seq_empty_p (omp_for_body)
8189 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8190 {
8191 gbind *inner_bind
8192 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8193 tree vars = gimple_bind_vars (inner_bind);
8194 gimple_bind_append_vars (new_stmt, vars);
8195 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8196 keep them on the inner_bind and it's block. */
8197 gimple_bind_set_vars (inner_bind, NULL_TREE);
8198 if (gimple_bind_block (inner_bind))
8199 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
8200 }
8201
8202 if (gimple_omp_for_combined_into_p (stmt))
8203 {
8204 omp_extract_for_data (stmt, &fd, NULL);
8205 fdp = &fd;
8206
8207 /* We need two temporaries with fd.loop.v type (istart/iend)
8208 and then (fd.collapse - 1) temporaries with the same
8209 type for count2 ... countN-1 vars if not constant. */
8210 size_t count = 2;
8211 tree type = fd.iter_type;
8212 if (fd.collapse > 1
8213 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8214 count += fd.collapse - 1;
8215 bool taskreg_for
8216 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8217 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8218 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8219 tree simtc = NULL;
8220 tree clauses = *pc;
8221 if (taskreg_for)
8222 outerc
8223 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8224 OMP_CLAUSE__LOOPTEMP_);
8225 if (ctx->simt_stmt)
8226 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
8227 OMP_CLAUSE__LOOPTEMP_);
8228 for (i = 0; i < count; i++)
8229 {
8230 tree temp;
8231 if (taskreg_for)
8232 {
8233 gcc_assert (outerc);
8234 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8235 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
8236 OMP_CLAUSE__LOOPTEMP_);
8237 }
8238 else
8239 {
8240 /* If there are 2 adjacent SIMD stmts, one with _simt_
8241 clause, another without, make sure they have the same
8242 decls in _looptemp_ clauses, because the outer stmt
8243 they are combined into will look up just one inner_stmt. */
8244 if (ctx->simt_stmt)
8245 temp = OMP_CLAUSE_DECL (simtc);
8246 else
8247 temp = create_tmp_var (type);
8248 insert_decl_map (&ctx->outer->cb, temp, temp);
8249 }
8250 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8251 OMP_CLAUSE_DECL (*pc) = temp;
8252 pc = &OMP_CLAUSE_CHAIN (*pc);
8253 if (ctx->simt_stmt)
8254 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
8255 OMP_CLAUSE__LOOPTEMP_);
8256 }
8257 *pc = clauses;
8258 }
8259
8260 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8261 dlist = NULL;
8262 body = NULL;
8263 tree rclauses
8264 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
8265 OMP_CLAUSE_REDUCTION);
8266 tree rtmp = NULL_TREE;
8267 if (rclauses)
8268 {
8269 tree type = build_pointer_type (pointer_sized_int_node);
8270 tree temp = create_tmp_var (type);
8271 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8272 OMP_CLAUSE_DECL (c) = temp;
8273 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
8274 gimple_omp_for_set_clauses (stmt, c);
8275 lower_omp_task_reductions (ctx, OMP_FOR,
8276 gimple_omp_for_clauses (stmt),
8277 &tred_ilist, &tred_dlist);
8278 rclauses = c;
8279 rtmp = make_ssa_name (type);
8280 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
8281 }
8282
8283 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8284 fdp);
8285 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
8286 gimple_omp_for_pre_body (stmt));
8287
8288 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8289
8290 /* Lower the header expressions. At this point, we can assume that
8291 the header is of the form:
8292
8293 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8294
8295 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8296 using the .omp_data_s mapping, if needed. */
8297 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
8298 {
8299 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
8300 if (!is_gimple_min_invariant (*rhs_p))
8301 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8302 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8303 recompute_tree_invariant_for_addr_expr (*rhs_p);
8304
8305 rhs_p = gimple_omp_for_final_ptr (stmt, i);
8306 if (!is_gimple_min_invariant (*rhs_p))
8307 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8308 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8309 recompute_tree_invariant_for_addr_expr (*rhs_p);
8310
8311 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
8312 if (!is_gimple_min_invariant (*rhs_p))
8313 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8314 }
8315 if (rclauses)
8316 gimple_seq_add_seq (&tred_ilist, cnt_list);
8317 else
8318 gimple_seq_add_seq (&body, cnt_list);
8319
8320 /* Once lowered, extract the bounds and clauses. */
8321 omp_extract_for_data (stmt, &fd, NULL);
8322
8323 if (is_gimple_omp_oacc (ctx->stmt)
8324 && !ctx_in_oacc_kernels_region (ctx))
8325 lower_oacc_head_tail (gimple_location (stmt),
8326 gimple_omp_for_clauses (stmt),
8327 &oacc_head, &oacc_tail, ctx);
8328
8329 /* Add OpenACC partitioning and reduction markers just before the loop. */
8330 if (oacc_head)
8331 gimple_seq_add_seq (&body, oacc_head);
8332
8333 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
8334
8335 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
8336 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
8337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8338 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8339 {
8340 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8341 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
8342 OMP_CLAUSE_LINEAR_STEP (c)
8343 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
8344 ctx);
8345 }
8346
8347 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
8348 && gimple_omp_for_grid_phony (stmt));
8349 if (!phony_loop)
8350 gimple_seq_add_stmt (&body, stmt);
8351 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
8352
8353 if (!phony_loop)
8354 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8355 fd.loop.v));
8356
8357 /* After the loop, add exit clauses. */
8358 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
8359
8360 if (ctx->cancellable)
8361 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8362
8363 gimple_seq_add_seq (&body, dlist);
8364
8365 if (rclauses)
8366 {
8367 gimple_seq_add_seq (&tred_ilist, body);
8368 body = tred_ilist;
8369 }
8370
8371 body = maybe_catch_exception (body);
8372
8373 if (!phony_loop)
8374 {
8375 /* Region exit marker goes at the end of the loop body. */
8376 gimple *g = gimple_build_omp_return (fd.have_nowait);
8377 gimple_seq_add_stmt (&body, g);
8378
8379 gimple_seq_add_seq (&body, tred_dlist);
8380
8381 maybe_add_implicit_barrier_cancel (ctx, g, &body);
8382
8383 if (rclauses)
8384 OMP_CLAUSE_DECL (rclauses) = rtmp;
8385 }
8386
8387 /* Add OpenACC joining and reduction markers just after the loop. */
8388 if (oacc_tail)
8389 gimple_seq_add_seq (&body, oacc_tail);
8390
8391 pop_gimplify_context (new_stmt);
8392
8393 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8394 maybe_remove_omp_member_access_dummy_vars (new_stmt);
8395 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
8396 if (BLOCK_VARS (block))
8397 TREE_USED (block) = 1;
8398
8399 gimple_bind_set_body (new_stmt, body);
8400 gimple_omp_set_body (stmt, NULL);
8401 gimple_omp_for_set_pre_body (stmt, NULL);
8402 }
8403
8404 /* Callback for walk_stmts. Check if the current statement only contains
8405 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
8406
8407 static tree
8408 check_combined_parallel (gimple_stmt_iterator *gsi_p,
8409 bool *handled_ops_p,
8410 struct walk_stmt_info *wi)
8411 {
8412 int *info = (int *) wi->info;
8413 gimple *stmt = gsi_stmt (*gsi_p);
8414
8415 *handled_ops_p = true;
8416 switch (gimple_code (stmt))
8417 {
8418 WALK_SUBSTMTS;
8419
8420 case GIMPLE_DEBUG:
8421 break;
8422 case GIMPLE_OMP_FOR:
8423 case GIMPLE_OMP_SECTIONS:
8424 *info = *info == 0 ? 1 : -1;
8425 break;
8426 default:
8427 *info = -1;
8428 break;
8429 }
8430 return NULL;
8431 }
8432
8433 struct omp_taskcopy_context
8434 {
8435 /* This field must be at the beginning, as we do "inheritance": Some
8436 callback functions for tree-inline.c (e.g., omp_copy_decl)
8437 receive a copy_body_data pointer that is up-casted to an
8438 omp_context pointer. */
8439 copy_body_data cb;
8440 omp_context *ctx;
8441 };
8442
8443 static tree
8444 task_copyfn_copy_decl (tree var, copy_body_data *cb)
8445 {
8446 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
8447
8448 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
8449 return create_tmp_var (TREE_TYPE (var));
8450
8451 return var;
8452 }
8453
8454 static tree
8455 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
8456 {
8457 tree name, new_fields = NULL, type, f;
8458
8459 type = lang_hooks.types.make_type (RECORD_TYPE);
8460 name = DECL_NAME (TYPE_NAME (orig_type));
8461 name = build_decl (gimple_location (tcctx->ctx->stmt),
8462 TYPE_DECL, name, type);
8463 TYPE_NAME (type) = name;
8464
8465 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
8466 {
8467 tree new_f = copy_node (f);
8468 DECL_CONTEXT (new_f) = type;
8469 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
8470 TREE_CHAIN (new_f) = new_fields;
8471 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8472 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8473 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
8474 &tcctx->cb, NULL);
8475 new_fields = new_f;
8476 tcctx->cb.decl_map->put (f, new_f);
8477 }
8478 TYPE_FIELDS (type) = nreverse (new_fields);
8479 layout_type (type);
8480 return type;
8481 }
8482
8483 /* Create task copyfn. */
8484
8485 static void
8486 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
8487 {
8488 struct function *child_cfun;
8489 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
8490 tree record_type, srecord_type, bind, list;
8491 bool record_needs_remap = false, srecord_needs_remap = false;
8492 splay_tree_node n;
8493 struct omp_taskcopy_context tcctx;
8494 location_t loc = gimple_location (task_stmt);
8495 size_t looptempno = 0;
8496
8497 child_fn = gimple_omp_task_copy_fn (task_stmt);
8498 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
8499 gcc_assert (child_cfun->cfg == NULL);
8500 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
8501
8502 /* Reset DECL_CONTEXT on function arguments. */
8503 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
8504 DECL_CONTEXT (t) = child_fn;
8505
8506 /* Populate the function. */
8507 push_gimplify_context ();
8508 push_cfun (child_cfun);
8509
8510 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
8511 TREE_SIDE_EFFECTS (bind) = 1;
8512 list = NULL;
8513 DECL_SAVED_TREE (child_fn) = bind;
8514 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
8515
8516 /* Remap src and dst argument types if needed. */
8517 record_type = ctx->record_type;
8518 srecord_type = ctx->srecord_type;
8519 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8520 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8521 {
8522 record_needs_remap = true;
8523 break;
8524 }
8525 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
8526 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8527 {
8528 srecord_needs_remap = true;
8529 break;
8530 }
8531
8532 if (record_needs_remap || srecord_needs_remap)
8533 {
8534 memset (&tcctx, '\0', sizeof (tcctx));
8535 tcctx.cb.src_fn = ctx->cb.src_fn;
8536 tcctx.cb.dst_fn = child_fn;
8537 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
8538 gcc_checking_assert (tcctx.cb.src_node);
8539 tcctx.cb.dst_node = tcctx.cb.src_node;
8540 tcctx.cb.src_cfun = ctx->cb.src_cfun;
8541 tcctx.cb.copy_decl = task_copyfn_copy_decl;
8542 tcctx.cb.eh_lp_nr = 0;
8543 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
8544 tcctx.cb.decl_map = new hash_map<tree, tree>;
8545 tcctx.ctx = ctx;
8546
8547 if (record_needs_remap)
8548 record_type = task_copyfn_remap_type (&tcctx, record_type);
8549 if (srecord_needs_remap)
8550 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
8551 }
8552 else
8553 tcctx.cb.decl_map = NULL;
8554
8555 arg = DECL_ARGUMENTS (child_fn);
8556 TREE_TYPE (arg) = build_pointer_type (record_type);
8557 sarg = DECL_CHAIN (arg);
8558 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
8559
8560 /* First pass: initialize temporaries used in record_type and srecord_type
8561 sizes and field offsets. */
8562 if (tcctx.cb.decl_map)
8563 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8564 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8565 {
8566 tree *p;
8567
8568 decl = OMP_CLAUSE_DECL (c);
8569 p = tcctx.cb.decl_map->get (decl);
8570 if (p == NULL)
8571 continue;
8572 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8573 sf = (tree) n->value;
8574 sf = *tcctx.cb.decl_map->get (sf);
8575 src = build_simple_mem_ref_loc (loc, sarg);
8576 src = omp_build_component_ref (src, sf);
8577 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
8578 append_to_statement_list (t, &list);
8579 }
8580
8581 /* Second pass: copy shared var pointers and copy construct non-VLA
8582 firstprivate vars. */
8583 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8584 switch (OMP_CLAUSE_CODE (c))
8585 {
8586 splay_tree_key key;
8587 case OMP_CLAUSE_SHARED:
8588 decl = OMP_CLAUSE_DECL (c);
8589 key = (splay_tree_key) decl;
8590 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8591 key = (splay_tree_key) &DECL_UID (decl);
8592 n = splay_tree_lookup (ctx->field_map, key);
8593 if (n == NULL)
8594 break;
8595 f = (tree) n->value;
8596 if (tcctx.cb.decl_map)
8597 f = *tcctx.cb.decl_map->get (f);
8598 n = splay_tree_lookup (ctx->sfield_map, key);
8599 sf = (tree) n->value;
8600 if (tcctx.cb.decl_map)
8601 sf = *tcctx.cb.decl_map->get (sf);
8602 src = build_simple_mem_ref_loc (loc, sarg);
8603 src = omp_build_component_ref (src, sf);
8604 dst = build_simple_mem_ref_loc (loc, arg);
8605 dst = omp_build_component_ref (dst, f);
8606 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8607 append_to_statement_list (t, &list);
8608 break;
8609 case OMP_CLAUSE_REDUCTION:
8610 case OMP_CLAUSE_IN_REDUCTION:
8611 decl = OMP_CLAUSE_DECL (c);
8612 if (TREE_CODE (decl) == MEM_REF)
8613 {
8614 decl = TREE_OPERAND (decl, 0);
8615 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8616 decl = TREE_OPERAND (decl, 0);
8617 if (TREE_CODE (decl) == INDIRECT_REF
8618 || TREE_CODE (decl) == ADDR_EXPR)
8619 decl = TREE_OPERAND (decl, 0);
8620 }
8621 key = (splay_tree_key) decl;
8622 n = splay_tree_lookup (ctx->field_map, key);
8623 if (n == NULL)
8624 break;
8625 f = (tree) n->value;
8626 if (tcctx.cb.decl_map)
8627 f = *tcctx.cb.decl_map->get (f);
8628 n = splay_tree_lookup (ctx->sfield_map, key);
8629 sf = (tree) n->value;
8630 if (tcctx.cb.decl_map)
8631 sf = *tcctx.cb.decl_map->get (sf);
8632 src = build_simple_mem_ref_loc (loc, sarg);
8633 src = omp_build_component_ref (src, sf);
8634 if (decl != OMP_CLAUSE_DECL (c)
8635 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8636 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8637 src = build_simple_mem_ref_loc (loc, src);
8638 dst = build_simple_mem_ref_loc (loc, arg);
8639 dst = omp_build_component_ref (dst, f);
8640 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8641 append_to_statement_list (t, &list);
8642 break;
8643 case OMP_CLAUSE__LOOPTEMP_:
8644 /* Fields for first two _looptemp_ clauses are initialized by
8645 GOMP_taskloop*, the rest are handled like firstprivate. */
8646 if (looptempno < 2)
8647 {
8648 looptempno++;
8649 break;
8650 }
8651 /* FALLTHRU */
8652 case OMP_CLAUSE__REDUCTEMP_:
8653 case OMP_CLAUSE_FIRSTPRIVATE:
8654 decl = OMP_CLAUSE_DECL (c);
8655 if (is_variable_sized (decl))
8656 break;
8657 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8658 if (n == NULL)
8659 break;
8660 f = (tree) n->value;
8661 if (tcctx.cb.decl_map)
8662 f = *tcctx.cb.decl_map->get (f);
8663 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8664 if (n != NULL)
8665 {
8666 sf = (tree) n->value;
8667 if (tcctx.cb.decl_map)
8668 sf = *tcctx.cb.decl_map->get (sf);
8669 src = build_simple_mem_ref_loc (loc, sarg);
8670 src = omp_build_component_ref (src, sf);
8671 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
8672 src = build_simple_mem_ref_loc (loc, src);
8673 }
8674 else
8675 src = decl;
8676 dst = build_simple_mem_ref_loc (loc, arg);
8677 dst = omp_build_component_ref (dst, f);
8678 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
8679 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8680 else
8681 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8682 append_to_statement_list (t, &list);
8683 break;
8684 case OMP_CLAUSE_PRIVATE:
8685 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8686 break;
8687 decl = OMP_CLAUSE_DECL (c);
8688 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8689 f = (tree) n->value;
8690 if (tcctx.cb.decl_map)
8691 f = *tcctx.cb.decl_map->get (f);
8692 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8693 if (n != NULL)
8694 {
8695 sf = (tree) n->value;
8696 if (tcctx.cb.decl_map)
8697 sf = *tcctx.cb.decl_map->get (sf);
8698 src = build_simple_mem_ref_loc (loc, sarg);
8699 src = omp_build_component_ref (src, sf);
8700 if (use_pointer_for_field (decl, NULL))
8701 src = build_simple_mem_ref_loc (loc, src);
8702 }
8703 else
8704 src = decl;
8705 dst = build_simple_mem_ref_loc (loc, arg);
8706 dst = omp_build_component_ref (dst, f);
8707 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8708 append_to_statement_list (t, &list);
8709 break;
8710 default:
8711 break;
8712 }
8713
8714 /* Last pass: handle VLA firstprivates. */
8715 if (tcctx.cb.decl_map)
8716 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8717 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8718 {
8719 tree ind, ptr, df;
8720
8721 decl = OMP_CLAUSE_DECL (c);
8722 if (!is_variable_sized (decl))
8723 continue;
8724 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8725 if (n == NULL)
8726 continue;
8727 f = (tree) n->value;
8728 f = *tcctx.cb.decl_map->get (f);
8729 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
8730 ind = DECL_VALUE_EXPR (decl);
8731 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
8732 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
8733 n = splay_tree_lookup (ctx->sfield_map,
8734 (splay_tree_key) TREE_OPERAND (ind, 0));
8735 sf = (tree) n->value;
8736 sf = *tcctx.cb.decl_map->get (sf);
8737 src = build_simple_mem_ref_loc (loc, sarg);
8738 src = omp_build_component_ref (src, sf);
8739 src = build_simple_mem_ref_loc (loc, src);
8740 dst = build_simple_mem_ref_loc (loc, arg);
8741 dst = omp_build_component_ref (dst, f);
8742 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8743 append_to_statement_list (t, &list);
8744 n = splay_tree_lookup (ctx->field_map,
8745 (splay_tree_key) TREE_OPERAND (ind, 0));
8746 df = (tree) n->value;
8747 df = *tcctx.cb.decl_map->get (df);
8748 ptr = build_simple_mem_ref_loc (loc, arg);
8749 ptr = omp_build_component_ref (ptr, df);
8750 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
8751 build_fold_addr_expr_loc (loc, dst));
8752 append_to_statement_list (t, &list);
8753 }
8754
8755 t = build1 (RETURN_EXPR, void_type_node, NULL);
8756 append_to_statement_list (t, &list);
8757
8758 if (tcctx.cb.decl_map)
8759 delete tcctx.cb.decl_map;
8760 pop_gimplify_context (NULL);
8761 BIND_EXPR_BODY (bind) = list;
8762 pop_cfun ();
8763 }
8764
8765 static void
8766 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
8767 {
8768 tree c, clauses;
8769 gimple *g;
8770 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
8771
8772 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
8773 gcc_assert (clauses);
8774 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8776 switch (OMP_CLAUSE_DEPEND_KIND (c))
8777 {
8778 case OMP_CLAUSE_DEPEND_LAST:
8779 /* Lowering already done at gimplification. */
8780 return;
8781 case OMP_CLAUSE_DEPEND_IN:
8782 cnt[2]++;
8783 break;
8784 case OMP_CLAUSE_DEPEND_OUT:
8785 case OMP_CLAUSE_DEPEND_INOUT:
8786 cnt[0]++;
8787 break;
8788 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8789 cnt[1]++;
8790 break;
8791 case OMP_CLAUSE_DEPEND_DEPOBJ:
8792 cnt[3]++;
8793 break;
8794 case OMP_CLAUSE_DEPEND_SOURCE:
8795 case OMP_CLAUSE_DEPEND_SINK:
8796 /* FALLTHRU */
8797 default:
8798 gcc_unreachable ();
8799 }
8800 if (cnt[1] || cnt[3])
8801 idx = 5;
8802 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
8803 tree type = build_array_type_nelts (ptr_type_node, total + idx);
8804 tree array = create_tmp_var (type);
8805 TREE_ADDRESSABLE (array) = 1;
8806 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8807 NULL_TREE);
8808 if (idx == 5)
8809 {
8810 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
8811 gimple_seq_add_stmt (iseq, g);
8812 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8813 NULL_TREE);
8814 }
8815 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
8816 gimple_seq_add_stmt (iseq, g);
8817 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
8818 {
8819 r = build4 (ARRAY_REF, ptr_type_node, array,
8820 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
8821 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
8822 gimple_seq_add_stmt (iseq, g);
8823 }
8824 for (i = 0; i < 4; i++)
8825 {
8826 if (cnt[i] == 0)
8827 continue;
8828 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8829 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
8830 continue;
8831 else
8832 {
8833 switch (OMP_CLAUSE_DEPEND_KIND (c))
8834 {
8835 case OMP_CLAUSE_DEPEND_IN:
8836 if (i != 2)
8837 continue;
8838 break;
8839 case OMP_CLAUSE_DEPEND_OUT:
8840 case OMP_CLAUSE_DEPEND_INOUT:
8841 if (i != 0)
8842 continue;
8843 break;
8844 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8845 if (i != 1)
8846 continue;
8847 break;
8848 case OMP_CLAUSE_DEPEND_DEPOBJ:
8849 if (i != 3)
8850 continue;
8851 break;
8852 default:
8853 gcc_unreachable ();
8854 }
8855 tree t = OMP_CLAUSE_DECL (c);
8856 t = fold_convert (ptr_type_node, t);
8857 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
8858 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
8859 NULL_TREE, NULL_TREE);
8860 g = gimple_build_assign (r, t);
8861 gimple_seq_add_stmt (iseq, g);
8862 }
8863 }
8864 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8865 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8866 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8867 OMP_CLAUSE_CHAIN (c) = *pclauses;
8868 *pclauses = c;
8869 tree clobber = build_constructor (type, NULL);
8870 TREE_THIS_VOLATILE (clobber) = 1;
8871 g = gimple_build_assign (array, clobber);
8872 gimple_seq_add_stmt (oseq, g);
8873 }
8874
8875 /* Lower the OpenMP parallel or task directive in the current statement
8876 in GSI_P. CTX holds context information for the directive. */
8877
8878 static void
8879 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8880 {
8881 tree clauses;
8882 tree child_fn, t;
8883 gimple *stmt = gsi_stmt (*gsi_p);
8884 gbind *par_bind, *bind, *dep_bind = NULL;
8885 gimple_seq par_body;
8886 location_t loc = gimple_location (stmt);
8887
8888 clauses = gimple_omp_taskreg_clauses (stmt);
8889 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8890 && gimple_omp_task_taskwait_p (stmt))
8891 {
8892 par_bind = NULL;
8893 par_body = NULL;
8894 }
8895 else
8896 {
8897 par_bind
8898 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
8899 par_body = gimple_bind_body (par_bind);
8900 }
8901 child_fn = ctx->cb.dst_fn;
8902 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8903 && !gimple_omp_parallel_combined_p (stmt))
8904 {
8905 struct walk_stmt_info wi;
8906 int ws_num = 0;
8907
8908 memset (&wi, 0, sizeof (wi));
8909 wi.info = &ws_num;
8910 wi.val_only = true;
8911 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
8912 if (ws_num == 1)
8913 gimple_omp_parallel_set_combined_p (stmt, true);
8914 }
8915 gimple_seq dep_ilist = NULL;
8916 gimple_seq dep_olist = NULL;
8917 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8918 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
8919 {
8920 push_gimplify_context ();
8921 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8922 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
8923 &dep_ilist, &dep_olist);
8924 }
8925
8926 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8927 && gimple_omp_task_taskwait_p (stmt))
8928 {
8929 if (dep_bind)
8930 {
8931 gsi_replace (gsi_p, dep_bind, true);
8932 gimple_bind_add_seq (dep_bind, dep_ilist);
8933 gimple_bind_add_stmt (dep_bind, stmt);
8934 gimple_bind_add_seq (dep_bind, dep_olist);
8935 pop_gimplify_context (dep_bind);
8936 }
8937 return;
8938 }
8939
8940 if (ctx->srecord_type)
8941 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
8942
8943 gimple_seq tskred_ilist = NULL;
8944 gimple_seq tskred_olist = NULL;
8945 if ((is_task_ctx (ctx)
8946 && gimple_omp_task_taskloop_p (ctx->stmt)
8947 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
8948 OMP_CLAUSE_REDUCTION))
8949 || (is_parallel_ctx (ctx)
8950 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
8951 OMP_CLAUSE__REDUCTEMP_)))
8952 {
8953 if (dep_bind == NULL)
8954 {
8955 push_gimplify_context ();
8956 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8957 }
8958 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
8959 : OMP_PARALLEL,
8960 gimple_omp_taskreg_clauses (ctx->stmt),
8961 &tskred_ilist, &tskred_olist);
8962 }
8963
8964 push_gimplify_context ();
8965
8966 gimple_seq par_olist = NULL;
8967 gimple_seq par_ilist = NULL;
8968 gimple_seq par_rlist = NULL;
8969 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8970 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
8971 if (phony_construct && ctx->record_type)
8972 {
8973 gcc_checking_assert (!ctx->receiver_decl);
8974 ctx->receiver_decl = create_tmp_var
8975 (build_reference_type (ctx->record_type), ".omp_rec");
8976 }
8977 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
8978 lower_omp (&par_body, ctx);
8979 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
8980 lower_reduction_clauses (clauses, &par_rlist, ctx);
8981
8982 /* Declare all the variables created by mapping and the variables
8983 declared in the scope of the parallel body. */
8984 record_vars_into (ctx->block_vars, child_fn);
8985 maybe_remove_omp_member_access_dummy_vars (par_bind);
8986 record_vars_into (gimple_bind_vars (par_bind), child_fn);
8987
8988 if (ctx->record_type)
8989 {
8990 ctx->sender_decl
8991 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
8992 : ctx->record_type, ".omp_data_o");
8993 DECL_NAMELESS (ctx->sender_decl) = 1;
8994 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
8995 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
8996 }
8997
8998 gimple_seq olist = NULL;
8999 gimple_seq ilist = NULL;
9000 lower_send_clauses (clauses, &ilist, &olist, ctx);
9001 lower_send_shared_vars (&ilist, &olist, ctx);
9002
9003 if (ctx->record_type)
9004 {
9005 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9006 TREE_THIS_VOLATILE (clobber) = 1;
9007 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9008 clobber));
9009 }
9010
9011 /* Once all the expansions are done, sequence all the different
9012 fragments inside gimple_omp_body. */
9013
9014 gimple_seq new_body = NULL;
9015
9016 if (ctx->record_type)
9017 {
9018 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9019 /* fixup_child_record_type might have changed receiver_decl's type. */
9020 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9021 gimple_seq_add_stmt (&new_body,
9022 gimple_build_assign (ctx->receiver_decl, t));
9023 }
9024
9025 gimple_seq_add_seq (&new_body, par_ilist);
9026 gimple_seq_add_seq (&new_body, par_body);
9027 gimple_seq_add_seq (&new_body, par_rlist);
9028 if (ctx->cancellable)
9029 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9030 gimple_seq_add_seq (&new_body, par_olist);
9031 new_body = maybe_catch_exception (new_body);
9032 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9033 gimple_seq_add_stmt (&new_body,
9034 gimple_build_omp_continue (integer_zero_node,
9035 integer_zero_node));
9036 if (!phony_construct)
9037 {
9038 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9039 gimple_omp_set_body (stmt, new_body);
9040 }
9041
9042 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9043 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9044 else
9045 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
9046 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9047 gimple_bind_add_seq (bind, ilist);
9048 if (!phony_construct)
9049 gimple_bind_add_stmt (bind, stmt);
9050 else
9051 gimple_bind_add_seq (bind, new_body);
9052 gimple_bind_add_seq (bind, olist);
9053
9054 pop_gimplify_context (NULL);
9055
9056 if (dep_bind)
9057 {
9058 gimple_bind_add_seq (dep_bind, dep_ilist);
9059 gimple_bind_add_seq (dep_bind, tskred_ilist);
9060 gimple_bind_add_stmt (dep_bind, bind);
9061 gimple_bind_add_seq (dep_bind, tskred_olist);
9062 gimple_bind_add_seq (dep_bind, dep_olist);
9063 pop_gimplify_context (dep_bind);
9064 }
9065 }
9066
9067 /* Lower the GIMPLE_OMP_TARGET in the current statement
9068 in GSI_P. CTX holds context information for the directive. */
9069
9070 static void
9071 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9072 {
9073 tree clauses;
9074 tree child_fn, t, c;
9075 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9076 gbind *tgt_bind, *bind, *dep_bind = NULL;
9077 gimple_seq tgt_body, olist, ilist, fplist, new_body;
9078 location_t loc = gimple_location (stmt);
9079 bool offloaded, data_region;
9080 unsigned int map_cnt = 0;
9081
9082 offloaded = is_gimple_omp_offloaded (stmt);
9083 switch (gimple_omp_target_kind (stmt))
9084 {
9085 case GF_OMP_TARGET_KIND_REGION:
9086 case GF_OMP_TARGET_KIND_UPDATE:
9087 case GF_OMP_TARGET_KIND_ENTER_DATA:
9088 case GF_OMP_TARGET_KIND_EXIT_DATA:
9089 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9090 case GF_OMP_TARGET_KIND_OACC_KERNELS:
9091 case GF_OMP_TARGET_KIND_OACC_UPDATE:
9092 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9093 case GF_OMP_TARGET_KIND_OACC_DECLARE:
9094 data_region = false;
9095 break;
9096 case GF_OMP_TARGET_KIND_DATA:
9097 case GF_OMP_TARGET_KIND_OACC_DATA:
9098 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9099 data_region = true;
9100 break;
9101 default:
9102 gcc_unreachable ();
9103 }
9104
9105 clauses = gimple_omp_target_clauses (stmt);
9106
9107 gimple_seq dep_ilist = NULL;
9108 gimple_seq dep_olist = NULL;
9109 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9110 {
9111 push_gimplify_context ();
9112 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9113 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9114 &dep_ilist, &dep_olist);
9115 }
9116
9117 tgt_bind = NULL;
9118 tgt_body = NULL;
9119 if (offloaded)
9120 {
9121 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9122 tgt_body = gimple_bind_body (tgt_bind);
9123 }
9124 else if (data_region)
9125 tgt_body = gimple_omp_body (stmt);
9126 child_fn = ctx->cb.dst_fn;
9127
9128 push_gimplify_context ();
9129 fplist = NULL;
9130
9131 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9132 switch (OMP_CLAUSE_CODE (c))
9133 {
9134 tree var, x;
9135
9136 default:
9137 break;
9138 case OMP_CLAUSE_MAP:
9139 #if CHECKING_P
9140 /* First check what we're prepared to handle in the following. */
9141 switch (OMP_CLAUSE_MAP_KIND (c))
9142 {
9143 case GOMP_MAP_ALLOC:
9144 case GOMP_MAP_TO:
9145 case GOMP_MAP_FROM:
9146 case GOMP_MAP_TOFROM:
9147 case GOMP_MAP_POINTER:
9148 case GOMP_MAP_TO_PSET:
9149 case GOMP_MAP_DELETE:
9150 case GOMP_MAP_RELEASE:
9151 case GOMP_MAP_ALWAYS_TO:
9152 case GOMP_MAP_ALWAYS_FROM:
9153 case GOMP_MAP_ALWAYS_TOFROM:
9154 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9155 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9156 case GOMP_MAP_STRUCT:
9157 case GOMP_MAP_ALWAYS_POINTER:
9158 break;
9159 case GOMP_MAP_FORCE_ALLOC:
9160 case GOMP_MAP_FORCE_TO:
9161 case GOMP_MAP_FORCE_FROM:
9162 case GOMP_MAP_FORCE_TOFROM:
9163 case GOMP_MAP_FORCE_PRESENT:
9164 case GOMP_MAP_FORCE_DEVICEPTR:
9165 case GOMP_MAP_DEVICE_RESIDENT:
9166 case GOMP_MAP_LINK:
9167 gcc_assert (is_gimple_omp_oacc (stmt));
9168 break;
9169 default:
9170 gcc_unreachable ();
9171 }
9172 #endif
9173 /* FALLTHRU */
9174 case OMP_CLAUSE_TO:
9175 case OMP_CLAUSE_FROM:
9176 oacc_firstprivate:
9177 var = OMP_CLAUSE_DECL (c);
9178 if (!DECL_P (var))
9179 {
9180 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9181 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9182 && (OMP_CLAUSE_MAP_KIND (c)
9183 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
9184 map_cnt++;
9185 continue;
9186 }
9187
9188 if (DECL_SIZE (var)
9189 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9190 {
9191 tree var2 = DECL_VALUE_EXPR (var);
9192 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9193 var2 = TREE_OPERAND (var2, 0);
9194 gcc_assert (DECL_P (var2));
9195 var = var2;
9196 }
9197
9198 if (offloaded
9199 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9200 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9201 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9202 {
9203 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9204 {
9205 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9206 && varpool_node::get_create (var)->offloadable)
9207 continue;
9208
9209 tree type = build_pointer_type (TREE_TYPE (var));
9210 tree new_var = lookup_decl (var, ctx);
9211 x = create_tmp_var_raw (type, get_name (new_var));
9212 gimple_add_tmp_var (x);
9213 x = build_simple_mem_ref (x);
9214 SET_DECL_VALUE_EXPR (new_var, x);
9215 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9216 }
9217 continue;
9218 }
9219
9220 if (!maybe_lookup_field (var, ctx))
9221 continue;
9222
9223 /* Don't remap oacc parallel reduction variables, because the
9224 intermediate result must be local to each gang. */
9225 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9226 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
9227 {
9228 x = build_receiver_ref (var, true, ctx);
9229 tree new_var = lookup_decl (var, ctx);
9230
9231 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9232 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9233 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9234 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9235 x = build_simple_mem_ref (x);
9236 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9237 {
9238 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9239 if (omp_is_reference (new_var)
9240 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
9241 {
9242 /* Create a local object to hold the instance
9243 value. */
9244 tree type = TREE_TYPE (TREE_TYPE (new_var));
9245 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
9246 tree inst = create_tmp_var (type, id);
9247 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
9248 x = build_fold_addr_expr (inst);
9249 }
9250 gimplify_assign (new_var, x, &fplist);
9251 }
9252 else if (DECL_P (new_var))
9253 {
9254 SET_DECL_VALUE_EXPR (new_var, x);
9255 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9256 }
9257 else
9258 gcc_unreachable ();
9259 }
9260 map_cnt++;
9261 break;
9262
9263 case OMP_CLAUSE_FIRSTPRIVATE:
9264 if (is_oacc_parallel (ctx))
9265 goto oacc_firstprivate;
9266 map_cnt++;
9267 var = OMP_CLAUSE_DECL (c);
9268 if (!omp_is_reference (var)
9269 && !is_gimple_reg_type (TREE_TYPE (var)))
9270 {
9271 tree new_var = lookup_decl (var, ctx);
9272 if (is_variable_sized (var))
9273 {
9274 tree pvar = DECL_VALUE_EXPR (var);
9275 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9276 pvar = TREE_OPERAND (pvar, 0);
9277 gcc_assert (DECL_P (pvar));
9278 tree new_pvar = lookup_decl (pvar, ctx);
9279 x = build_fold_indirect_ref (new_pvar);
9280 TREE_THIS_NOTRAP (x) = 1;
9281 }
9282 else
9283 x = build_receiver_ref (var, true, ctx);
9284 SET_DECL_VALUE_EXPR (new_var, x);
9285 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9286 }
9287 break;
9288
9289 case OMP_CLAUSE_PRIVATE:
9290 if (is_gimple_omp_oacc (ctx->stmt))
9291 break;
9292 var = OMP_CLAUSE_DECL (c);
9293 if (is_variable_sized (var))
9294 {
9295 tree new_var = lookup_decl (var, ctx);
9296 tree pvar = DECL_VALUE_EXPR (var);
9297 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9298 pvar = TREE_OPERAND (pvar, 0);
9299 gcc_assert (DECL_P (pvar));
9300 tree new_pvar = lookup_decl (pvar, ctx);
9301 x = build_fold_indirect_ref (new_pvar);
9302 TREE_THIS_NOTRAP (x) = 1;
9303 SET_DECL_VALUE_EXPR (new_var, x);
9304 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9305 }
9306 break;
9307
9308 case OMP_CLAUSE_USE_DEVICE_PTR:
9309 case OMP_CLAUSE_IS_DEVICE_PTR:
9310 var = OMP_CLAUSE_DECL (c);
9311 map_cnt++;
9312 if (is_variable_sized (var))
9313 {
9314 tree new_var = lookup_decl (var, ctx);
9315 tree pvar = DECL_VALUE_EXPR (var);
9316 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9317 pvar = TREE_OPERAND (pvar, 0);
9318 gcc_assert (DECL_P (pvar));
9319 tree new_pvar = lookup_decl (pvar, ctx);
9320 x = build_fold_indirect_ref (new_pvar);
9321 TREE_THIS_NOTRAP (x) = 1;
9322 SET_DECL_VALUE_EXPR (new_var, x);
9323 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9324 }
9325 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9326 {
9327 tree new_var = lookup_decl (var, ctx);
9328 tree type = build_pointer_type (TREE_TYPE (var));
9329 x = create_tmp_var_raw (type, get_name (new_var));
9330 gimple_add_tmp_var (x);
9331 x = build_simple_mem_ref (x);
9332 SET_DECL_VALUE_EXPR (new_var, x);
9333 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9334 }
9335 else
9336 {
9337 tree new_var = lookup_decl (var, ctx);
9338 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
9339 gimple_add_tmp_var (x);
9340 SET_DECL_VALUE_EXPR (new_var, x);
9341 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9342 }
9343 break;
9344 }
9345
9346 if (offloaded)
9347 {
9348 target_nesting_level++;
9349 lower_omp (&tgt_body, ctx);
9350 target_nesting_level--;
9351 }
9352 else if (data_region)
9353 lower_omp (&tgt_body, ctx);
9354
9355 if (offloaded)
9356 {
9357 /* Declare all the variables created by mapping and the variables
9358 declared in the scope of the target body. */
9359 record_vars_into (ctx->block_vars, child_fn);
9360 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
9361 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9362 }
9363
9364 olist = NULL;
9365 ilist = NULL;
9366 if (ctx->record_type)
9367 {
9368 ctx->sender_decl
9369 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9370 DECL_NAMELESS (ctx->sender_decl) = 1;
9371 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9372 t = make_tree_vec (3);
9373 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9374 TREE_VEC_ELT (t, 1)
9375 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9376 ".omp_data_sizes");
9377 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9378 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9379 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9380 tree tkind_type = short_unsigned_type_node;
9381 int talign_shift = 8;
9382 TREE_VEC_ELT (t, 2)
9383 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
9384 ".omp_data_kinds");
9385 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9386 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9387 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9388 gimple_omp_target_set_data_arg (stmt, t);
9389
9390 vec<constructor_elt, va_gc> *vsize;
9391 vec<constructor_elt, va_gc> *vkind;
9392 vec_alloc (vsize, map_cnt);
9393 vec_alloc (vkind, map_cnt);
9394 unsigned int map_idx = 0;
9395
9396 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9397 switch (OMP_CLAUSE_CODE (c))
9398 {
9399 tree ovar, nc, s, purpose, var, x, type;
9400 unsigned int talign;
9401
9402 default:
9403 break;
9404
9405 case OMP_CLAUSE_MAP:
9406 case OMP_CLAUSE_TO:
9407 case OMP_CLAUSE_FROM:
9408 oacc_firstprivate_map:
9409 nc = c;
9410 ovar = OMP_CLAUSE_DECL (c);
9411 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9412 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9413 || (OMP_CLAUSE_MAP_KIND (c)
9414 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
9415 break;
9416 if (!DECL_P (ovar))
9417 {
9418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9419 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9420 {
9421 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9422 == get_base_address (ovar));
9423 nc = OMP_CLAUSE_CHAIN (c);
9424 ovar = OMP_CLAUSE_DECL (nc);
9425 }
9426 else
9427 {
9428 tree x = build_sender_ref (ovar, ctx);
9429 tree v
9430 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9431 gimplify_assign (x, v, &ilist);
9432 nc = NULL_TREE;
9433 }
9434 }
9435 else
9436 {
9437 if (DECL_SIZE (ovar)
9438 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9439 {
9440 tree ovar2 = DECL_VALUE_EXPR (ovar);
9441 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9442 ovar2 = TREE_OPERAND (ovar2, 0);
9443 gcc_assert (DECL_P (ovar2));
9444 ovar = ovar2;
9445 }
9446 if (!maybe_lookup_field (ovar, ctx))
9447 continue;
9448 }
9449
9450 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9451 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9452 talign = DECL_ALIGN_UNIT (ovar);
9453 if (nc)
9454 {
9455 var = lookup_decl_in_outer_ctx (ovar, ctx);
9456 x = build_sender_ref (ovar, ctx);
9457
9458 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9459 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9460 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9461 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9462 {
9463 gcc_assert (offloaded);
9464 tree avar
9465 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
9466 mark_addressable (avar);
9467 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9468 talign = DECL_ALIGN_UNIT (avar);
9469 avar = build_fold_addr_expr (avar);
9470 gimplify_assign (x, avar, &ilist);
9471 }
9472 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9473 {
9474 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9475 if (!omp_is_reference (var))
9476 {
9477 if (is_gimple_reg (var)
9478 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9479 TREE_NO_WARNING (var) = 1;
9480 var = build_fold_addr_expr (var);
9481 }
9482 else
9483 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9484 gimplify_assign (x, var, &ilist);
9485 }
9486 else if (is_gimple_reg (var))
9487 {
9488 gcc_assert (offloaded);
9489 tree avar = create_tmp_var (TREE_TYPE (var));
9490 mark_addressable (avar);
9491 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
9492 if (GOMP_MAP_COPY_TO_P (map_kind)
9493 || map_kind == GOMP_MAP_POINTER
9494 || map_kind == GOMP_MAP_TO_PSET
9495 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9496 {
9497 /* If we need to initialize a temporary
9498 with VAR because it is not addressable, and
9499 the variable hasn't been initialized yet, then
9500 we'll get a warning for the store to avar.
9501 Don't warn in that case, the mapping might
9502 be implicit. */
9503 TREE_NO_WARNING (var) = 1;
9504 gimplify_assign (avar, var, &ilist);
9505 }
9506 avar = build_fold_addr_expr (avar);
9507 gimplify_assign (x, avar, &ilist);
9508 if ((GOMP_MAP_COPY_FROM_P (map_kind)
9509 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9510 && !TYPE_READONLY (TREE_TYPE (var)))
9511 {
9512 x = unshare_expr (x);
9513 x = build_simple_mem_ref (x);
9514 gimplify_assign (var, x, &olist);
9515 }
9516 }
9517 else
9518 {
9519 var = build_fold_addr_expr (var);
9520 gimplify_assign (x, var, &ilist);
9521 }
9522 }
9523 s = NULL_TREE;
9524 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9525 {
9526 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9527 s = TREE_TYPE (ovar);
9528 if (TREE_CODE (s) == REFERENCE_TYPE)
9529 s = TREE_TYPE (s);
9530 s = TYPE_SIZE_UNIT (s);
9531 }
9532 else
9533 s = OMP_CLAUSE_SIZE (c);
9534 if (s == NULL_TREE)
9535 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9536 s = fold_convert (size_type_node, s);
9537 purpose = size_int (map_idx++);
9538 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9539 if (TREE_CODE (s) != INTEGER_CST)
9540 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9541
9542 unsigned HOST_WIDE_INT tkind, tkind_zero;
9543 switch (OMP_CLAUSE_CODE (c))
9544 {
9545 case OMP_CLAUSE_MAP:
9546 tkind = OMP_CLAUSE_MAP_KIND (c);
9547 tkind_zero = tkind;
9548 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
9549 switch (tkind)
9550 {
9551 case GOMP_MAP_ALLOC:
9552 case GOMP_MAP_TO:
9553 case GOMP_MAP_FROM:
9554 case GOMP_MAP_TOFROM:
9555 case GOMP_MAP_ALWAYS_TO:
9556 case GOMP_MAP_ALWAYS_FROM:
9557 case GOMP_MAP_ALWAYS_TOFROM:
9558 case GOMP_MAP_RELEASE:
9559 case GOMP_MAP_FORCE_TO:
9560 case GOMP_MAP_FORCE_FROM:
9561 case GOMP_MAP_FORCE_TOFROM:
9562 case GOMP_MAP_FORCE_PRESENT:
9563 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
9564 break;
9565 case GOMP_MAP_DELETE:
9566 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
9567 default:
9568 break;
9569 }
9570 if (tkind_zero != tkind)
9571 {
9572 if (integer_zerop (s))
9573 tkind = tkind_zero;
9574 else if (integer_nonzerop (s))
9575 tkind_zero = tkind;
9576 }
9577 break;
9578 case OMP_CLAUSE_FIRSTPRIVATE:
9579 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9580 tkind = GOMP_MAP_TO;
9581 tkind_zero = tkind;
9582 break;
9583 case OMP_CLAUSE_TO:
9584 tkind = GOMP_MAP_TO;
9585 tkind_zero = tkind;
9586 break;
9587 case OMP_CLAUSE_FROM:
9588 tkind = GOMP_MAP_FROM;
9589 tkind_zero = tkind;
9590 break;
9591 default:
9592 gcc_unreachable ();
9593 }
9594 gcc_checking_assert (tkind
9595 < (HOST_WIDE_INT_C (1U) << talign_shift));
9596 gcc_checking_assert (tkind_zero
9597 < (HOST_WIDE_INT_C (1U) << talign_shift));
9598 talign = ceil_log2 (talign);
9599 tkind |= talign << talign_shift;
9600 tkind_zero |= talign << talign_shift;
9601 gcc_checking_assert (tkind
9602 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9603 gcc_checking_assert (tkind_zero
9604 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9605 if (tkind == tkind_zero)
9606 x = build_int_cstu (tkind_type, tkind);
9607 else
9608 {
9609 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
9610 x = build3 (COND_EXPR, tkind_type,
9611 fold_build2 (EQ_EXPR, boolean_type_node,
9612 unshare_expr (s), size_zero_node),
9613 build_int_cstu (tkind_type, tkind_zero),
9614 build_int_cstu (tkind_type, tkind));
9615 }
9616 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
9617 if (nc && nc != c)
9618 c = nc;
9619 break;
9620
9621 case OMP_CLAUSE_FIRSTPRIVATE:
9622 if (is_oacc_parallel (ctx))
9623 goto oacc_firstprivate_map;
9624 ovar = OMP_CLAUSE_DECL (c);
9625 if (omp_is_reference (ovar))
9626 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9627 else
9628 talign = DECL_ALIGN_UNIT (ovar);
9629 var = lookup_decl_in_outer_ctx (ovar, ctx);
9630 x = build_sender_ref (ovar, ctx);
9631 tkind = GOMP_MAP_FIRSTPRIVATE;
9632 type = TREE_TYPE (ovar);
9633 if (omp_is_reference (ovar))
9634 type = TREE_TYPE (type);
9635 if ((INTEGRAL_TYPE_P (type)
9636 && TYPE_PRECISION (type) <= POINTER_SIZE)
9637 || TREE_CODE (type) == POINTER_TYPE)
9638 {
9639 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9640 tree t = var;
9641 if (omp_is_reference (var))
9642 t = build_simple_mem_ref (var);
9643 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9644 TREE_NO_WARNING (var) = 1;
9645 if (TREE_CODE (type) != POINTER_TYPE)
9646 t = fold_convert (pointer_sized_int_node, t);
9647 t = fold_convert (TREE_TYPE (x), t);
9648 gimplify_assign (x, t, &ilist);
9649 }
9650 else if (omp_is_reference (var))
9651 gimplify_assign (x, var, &ilist);
9652 else if (is_gimple_reg (var))
9653 {
9654 tree avar = create_tmp_var (TREE_TYPE (var));
9655 mark_addressable (avar);
9656 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9657 TREE_NO_WARNING (var) = 1;
9658 gimplify_assign (avar, var, &ilist);
9659 avar = build_fold_addr_expr (avar);
9660 gimplify_assign (x, avar, &ilist);
9661 }
9662 else
9663 {
9664 var = build_fold_addr_expr (var);
9665 gimplify_assign (x, var, &ilist);
9666 }
9667 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
9668 s = size_int (0);
9669 else if (omp_is_reference (ovar))
9670 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9671 else
9672 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9673 s = fold_convert (size_type_node, s);
9674 purpose = size_int (map_idx++);
9675 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9676 if (TREE_CODE (s) != INTEGER_CST)
9677 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9678
9679 gcc_checking_assert (tkind
9680 < (HOST_WIDE_INT_C (1U) << talign_shift));
9681 talign = ceil_log2 (talign);
9682 tkind |= talign << talign_shift;
9683 gcc_checking_assert (tkind
9684 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9685 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9686 build_int_cstu (tkind_type, tkind));
9687 break;
9688
9689 case OMP_CLAUSE_USE_DEVICE_PTR:
9690 case OMP_CLAUSE_IS_DEVICE_PTR:
9691 ovar = OMP_CLAUSE_DECL (c);
9692 var = lookup_decl_in_outer_ctx (ovar, ctx);
9693 x = build_sender_ref (ovar, ctx);
9694 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9695 tkind = GOMP_MAP_USE_DEVICE_PTR;
9696 else
9697 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9698 type = TREE_TYPE (ovar);
9699 if (TREE_CODE (type) == ARRAY_TYPE)
9700 var = build_fold_addr_expr (var);
9701 else
9702 {
9703 if (omp_is_reference (ovar))
9704 {
9705 type = TREE_TYPE (type);
9706 if (TREE_CODE (type) != ARRAY_TYPE)
9707 var = build_simple_mem_ref (var);
9708 var = fold_convert (TREE_TYPE (x), var);
9709 }
9710 }
9711 gimplify_assign (x, var, &ilist);
9712 s = size_int (0);
9713 purpose = size_int (map_idx++);
9714 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9715 gcc_checking_assert (tkind
9716 < (HOST_WIDE_INT_C (1U) << talign_shift));
9717 gcc_checking_assert (tkind
9718 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9719 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9720 build_int_cstu (tkind_type, tkind));
9721 break;
9722 }
9723
9724 gcc_assert (map_idx == map_cnt);
9725
9726 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9727 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9728 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9729 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9730 for (int i = 1; i <= 2; i++)
9731 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
9732 {
9733 gimple_seq initlist = NULL;
9734 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9735 TREE_VEC_ELT (t, i)),
9736 &initlist, true, NULL_TREE);
9737 gimple_seq_add_seq (&ilist, initlist);
9738
9739 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
9740 NULL);
9741 TREE_THIS_VOLATILE (clobber) = 1;
9742 gimple_seq_add_stmt (&olist,
9743 gimple_build_assign (TREE_VEC_ELT (t, i),
9744 clobber));
9745 }
9746
9747 tree clobber = build_constructor (ctx->record_type, NULL);
9748 TREE_THIS_VOLATILE (clobber) = 1;
9749 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9750 clobber));
9751 }
9752
9753 /* Once all the expansions are done, sequence all the different
9754 fragments inside gimple_omp_body. */
9755
9756 new_body = NULL;
9757
9758 if (offloaded
9759 && ctx->record_type)
9760 {
9761 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9762 /* fixup_child_record_type might have changed receiver_decl's type. */
9763 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9764 gimple_seq_add_stmt (&new_body,
9765 gimple_build_assign (ctx->receiver_decl, t));
9766 }
9767 gimple_seq_add_seq (&new_body, fplist);
9768
9769 if (offloaded || data_region)
9770 {
9771 tree prev = NULL_TREE;
9772 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9773 switch (OMP_CLAUSE_CODE (c))
9774 {
9775 tree var, x;
9776 default:
9777 break;
9778 case OMP_CLAUSE_FIRSTPRIVATE:
9779 if (is_gimple_omp_oacc (ctx->stmt))
9780 break;
9781 var = OMP_CLAUSE_DECL (c);
9782 if (omp_is_reference (var)
9783 || is_gimple_reg_type (TREE_TYPE (var)))
9784 {
9785 tree new_var = lookup_decl (var, ctx);
9786 tree type;
9787 type = TREE_TYPE (var);
9788 if (omp_is_reference (var))
9789 type = TREE_TYPE (type);
9790 if ((INTEGRAL_TYPE_P (type)
9791 && TYPE_PRECISION (type) <= POINTER_SIZE)
9792 || TREE_CODE (type) == POINTER_TYPE)
9793 {
9794 x = build_receiver_ref (var, false, ctx);
9795 if (TREE_CODE (type) != POINTER_TYPE)
9796 x = fold_convert (pointer_sized_int_node, x);
9797 x = fold_convert (type, x);
9798 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9799 fb_rvalue);
9800 if (omp_is_reference (var))
9801 {
9802 tree v = create_tmp_var_raw (type, get_name (var));
9803 gimple_add_tmp_var (v);
9804 TREE_ADDRESSABLE (v) = 1;
9805 gimple_seq_add_stmt (&new_body,
9806 gimple_build_assign (v, x));
9807 x = build_fold_addr_expr (v);
9808 }
9809 gimple_seq_add_stmt (&new_body,
9810 gimple_build_assign (new_var, x));
9811 }
9812 else
9813 {
9814 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
9815 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9816 fb_rvalue);
9817 gimple_seq_add_stmt (&new_body,
9818 gimple_build_assign (new_var, x));
9819 }
9820 }
9821 else if (is_variable_sized (var))
9822 {
9823 tree pvar = DECL_VALUE_EXPR (var);
9824 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9825 pvar = TREE_OPERAND (pvar, 0);
9826 gcc_assert (DECL_P (pvar));
9827 tree new_var = lookup_decl (pvar, ctx);
9828 x = build_receiver_ref (var, false, ctx);
9829 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9830 gimple_seq_add_stmt (&new_body,
9831 gimple_build_assign (new_var, x));
9832 }
9833 break;
9834 case OMP_CLAUSE_PRIVATE:
9835 if (is_gimple_omp_oacc (ctx->stmt))
9836 break;
9837 var = OMP_CLAUSE_DECL (c);
9838 if (omp_is_reference (var))
9839 {
9840 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9841 tree new_var = lookup_decl (var, ctx);
9842 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
9843 if (TREE_CONSTANT (x))
9844 {
9845 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
9846 get_name (var));
9847 gimple_add_tmp_var (x);
9848 TREE_ADDRESSABLE (x) = 1;
9849 x = build_fold_addr_expr_loc (clause_loc, x);
9850 }
9851 else
9852 break;
9853
9854 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
9855 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9856 gimple_seq_add_stmt (&new_body,
9857 gimple_build_assign (new_var, x));
9858 }
9859 break;
9860 case OMP_CLAUSE_USE_DEVICE_PTR:
9861 case OMP_CLAUSE_IS_DEVICE_PTR:
9862 var = OMP_CLAUSE_DECL (c);
9863 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9864 x = build_sender_ref (var, ctx);
9865 else
9866 x = build_receiver_ref (var, false, ctx);
9867 if (is_variable_sized (var))
9868 {
9869 tree pvar = DECL_VALUE_EXPR (var);
9870 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9871 pvar = TREE_OPERAND (pvar, 0);
9872 gcc_assert (DECL_P (pvar));
9873 tree new_var = lookup_decl (pvar, ctx);
9874 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9875 gimple_seq_add_stmt (&new_body,
9876 gimple_build_assign (new_var, x));
9877 }
9878 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9879 {
9880 tree new_var = lookup_decl (var, ctx);
9881 new_var = DECL_VALUE_EXPR (new_var);
9882 gcc_assert (TREE_CODE (new_var) == MEM_REF);
9883 new_var = TREE_OPERAND (new_var, 0);
9884 gcc_assert (DECL_P (new_var));
9885 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9886 gimple_seq_add_stmt (&new_body,
9887 gimple_build_assign (new_var, x));
9888 }
9889 else
9890 {
9891 tree type = TREE_TYPE (var);
9892 tree new_var = lookup_decl (var, ctx);
9893 if (omp_is_reference (var))
9894 {
9895 type = TREE_TYPE (type);
9896 if (TREE_CODE (type) != ARRAY_TYPE)
9897 {
9898 tree v = create_tmp_var_raw (type, get_name (var));
9899 gimple_add_tmp_var (v);
9900 TREE_ADDRESSABLE (v) = 1;
9901 x = fold_convert (type, x);
9902 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9903 fb_rvalue);
9904 gimple_seq_add_stmt (&new_body,
9905 gimple_build_assign (v, x));
9906 x = build_fold_addr_expr (v);
9907 }
9908 }
9909 new_var = DECL_VALUE_EXPR (new_var);
9910 x = fold_convert (TREE_TYPE (new_var), x);
9911 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9912 gimple_seq_add_stmt (&new_body,
9913 gimple_build_assign (new_var, x));
9914 }
9915 break;
9916 }
9917 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9918 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9919 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
9920 or references to VLAs. */
9921 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9922 switch (OMP_CLAUSE_CODE (c))
9923 {
9924 tree var;
9925 default:
9926 break;
9927 case OMP_CLAUSE_MAP:
9928 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9929 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9930 {
9931 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9932 poly_int64 offset = 0;
9933 gcc_assert (prev);
9934 var = OMP_CLAUSE_DECL (c);
9935 if (DECL_P (var)
9936 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
9937 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
9938 ctx))
9939 && varpool_node::get_create (var)->offloadable)
9940 break;
9941 if (TREE_CODE (var) == INDIRECT_REF
9942 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
9943 var = TREE_OPERAND (var, 0);
9944 if (TREE_CODE (var) == COMPONENT_REF)
9945 {
9946 var = get_addr_base_and_unit_offset (var, &offset);
9947 gcc_assert (var != NULL_TREE && DECL_P (var));
9948 }
9949 else if (DECL_SIZE (var)
9950 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9951 {
9952 tree var2 = DECL_VALUE_EXPR (var);
9953 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9954 var2 = TREE_OPERAND (var2, 0);
9955 gcc_assert (DECL_P (var2));
9956 var = var2;
9957 }
9958 tree new_var = lookup_decl (var, ctx), x;
9959 tree type = TREE_TYPE (new_var);
9960 bool is_ref;
9961 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
9962 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9963 == COMPONENT_REF))
9964 {
9965 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
9966 is_ref = true;
9967 new_var = build2 (MEM_REF, type,
9968 build_fold_addr_expr (new_var),
9969 build_int_cst (build_pointer_type (type),
9970 offset));
9971 }
9972 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
9973 {
9974 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
9975 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
9976 new_var = build2 (MEM_REF, type,
9977 build_fold_addr_expr (new_var),
9978 build_int_cst (build_pointer_type (type),
9979 offset));
9980 }
9981 else
9982 is_ref = omp_is_reference (var);
9983 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9984 is_ref = false;
9985 bool ref_to_array = false;
9986 if (is_ref)
9987 {
9988 type = TREE_TYPE (type);
9989 if (TREE_CODE (type) == ARRAY_TYPE)
9990 {
9991 type = build_pointer_type (type);
9992 ref_to_array = true;
9993 }
9994 }
9995 else if (TREE_CODE (type) == ARRAY_TYPE)
9996 {
9997 tree decl2 = DECL_VALUE_EXPR (new_var);
9998 gcc_assert (TREE_CODE (decl2) == MEM_REF);
9999 decl2 = TREE_OPERAND (decl2, 0);
10000 gcc_assert (DECL_P (decl2));
10001 new_var = decl2;
10002 type = TREE_TYPE (new_var);
10003 }
10004 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10005 x = fold_convert_loc (clause_loc, type, x);
10006 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10007 {
10008 tree bias = OMP_CLAUSE_SIZE (c);
10009 if (DECL_P (bias))
10010 bias = lookup_decl (bias, ctx);
10011 bias = fold_convert_loc (clause_loc, sizetype, bias);
10012 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10013 bias);
10014 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10015 TREE_TYPE (x), x, bias);
10016 }
10017 if (ref_to_array)
10018 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10019 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10020 if (is_ref && !ref_to_array)
10021 {
10022 tree t = create_tmp_var_raw (type, get_name (var));
10023 gimple_add_tmp_var (t);
10024 TREE_ADDRESSABLE (t) = 1;
10025 gimple_seq_add_stmt (&new_body,
10026 gimple_build_assign (t, x));
10027 x = build_fold_addr_expr_loc (clause_loc, t);
10028 }
10029 gimple_seq_add_stmt (&new_body,
10030 gimple_build_assign (new_var, x));
10031 prev = NULL_TREE;
10032 }
10033 else if (OMP_CLAUSE_CHAIN (c)
10034 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10035 == OMP_CLAUSE_MAP
10036 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10037 == GOMP_MAP_FIRSTPRIVATE_POINTER
10038 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10039 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10040 prev = c;
10041 break;
10042 case OMP_CLAUSE_PRIVATE:
10043 var = OMP_CLAUSE_DECL (c);
10044 if (is_variable_sized (var))
10045 {
10046 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10047 tree new_var = lookup_decl (var, ctx);
10048 tree pvar = DECL_VALUE_EXPR (var);
10049 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10050 pvar = TREE_OPERAND (pvar, 0);
10051 gcc_assert (DECL_P (pvar));
10052 tree new_pvar = lookup_decl (pvar, ctx);
10053 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10054 tree al = size_int (DECL_ALIGN (var));
10055 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10056 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10057 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10058 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10059 gimple_seq_add_stmt (&new_body,
10060 gimple_build_assign (new_pvar, x));
10061 }
10062 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10063 {
10064 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10065 tree new_var = lookup_decl (var, ctx);
10066 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10067 if (TREE_CONSTANT (x))
10068 break;
10069 else
10070 {
10071 tree atmp
10072 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10073 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10074 tree al = size_int (TYPE_ALIGN (rtype));
10075 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10076 }
10077
10078 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10079 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10080 gimple_seq_add_stmt (&new_body,
10081 gimple_build_assign (new_var, x));
10082 }
10083 break;
10084 }
10085
10086 gimple_seq fork_seq = NULL;
10087 gimple_seq join_seq = NULL;
10088
10089 if (is_oacc_parallel (ctx))
10090 {
10091 /* If there are reductions on the offloaded region itself, treat
10092 them as a dummy GANG loop. */
10093 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
10094
10095 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10096 false, NULL, NULL, &fork_seq, &join_seq, ctx);
10097 }
10098
10099 gimple_seq_add_seq (&new_body, fork_seq);
10100 gimple_seq_add_seq (&new_body, tgt_body);
10101 gimple_seq_add_seq (&new_body, join_seq);
10102
10103 if (offloaded)
10104 new_body = maybe_catch_exception (new_body);
10105
10106 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10107 gimple_omp_set_body (stmt, new_body);
10108 }
10109
10110 bind = gimple_build_bind (NULL, NULL,
10111 tgt_bind ? gimple_bind_block (tgt_bind)
10112 : NULL_TREE);
10113 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10114 gimple_bind_add_seq (bind, ilist);
10115 gimple_bind_add_stmt (bind, stmt);
10116 gimple_bind_add_seq (bind, olist);
10117
10118 pop_gimplify_context (NULL);
10119
10120 if (dep_bind)
10121 {
10122 gimple_bind_add_seq (dep_bind, dep_ilist);
10123 gimple_bind_add_stmt (dep_bind, bind);
10124 gimple_bind_add_seq (dep_bind, dep_olist);
10125 pop_gimplify_context (dep_bind);
10126 }
10127 }
10128
10129 /* Expand code for an OpenMP teams directive. */
10130
10131 static void
10132 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10133 {
10134 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10135 push_gimplify_context ();
10136
10137 tree block = make_node (BLOCK);
10138 gbind *bind = gimple_build_bind (NULL, NULL, block);
10139 gsi_replace (gsi_p, bind, true);
10140 gimple_seq bind_body = NULL;
10141 gimple_seq dlist = NULL;
10142 gimple_seq olist = NULL;
10143
10144 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10145 OMP_CLAUSE_NUM_TEAMS);
10146 if (num_teams == NULL_TREE)
10147 num_teams = build_int_cst (unsigned_type_node, 0);
10148 else
10149 {
10150 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10151 num_teams = fold_convert (unsigned_type_node, num_teams);
10152 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
10153 }
10154 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10155 OMP_CLAUSE_THREAD_LIMIT);
10156 if (thread_limit == NULL_TREE)
10157 thread_limit = build_int_cst (unsigned_type_node, 0);
10158 else
10159 {
10160 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10161 thread_limit = fold_convert (unsigned_type_node, thread_limit);
10162 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10163 fb_rvalue);
10164 }
10165
10166 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10167 &bind_body, &dlist, ctx, NULL);
10168 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
10169 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
10170 if (!gimple_omp_teams_grid_phony (teams_stmt))
10171 {
10172 gimple_seq_add_stmt (&bind_body, teams_stmt);
10173 location_t loc = gimple_location (teams_stmt);
10174 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10175 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10176 gimple_set_location (call, loc);
10177 gimple_seq_add_stmt (&bind_body, call);
10178 }
10179
10180 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10181 gimple_omp_set_body (teams_stmt, NULL);
10182 gimple_seq_add_seq (&bind_body, olist);
10183 gimple_seq_add_seq (&bind_body, dlist);
10184 if (!gimple_omp_teams_grid_phony (teams_stmt))
10185 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10186 gimple_bind_set_body (bind, bind_body);
10187
10188 pop_gimplify_context (bind);
10189
10190 gimple_bind_append_vars (bind, ctx->block_vars);
10191 BLOCK_VARS (block) = ctx->block_vars;
10192 if (BLOCK_VARS (block))
10193 TREE_USED (block) = 1;
10194 }
10195
10196 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
10197
10198 static void
10199 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10200 {
10201 gimple *stmt = gsi_stmt (*gsi_p);
10202 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10203 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10204 gimple_build_omp_return (false));
10205 }
10206
10207
10208 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10209 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10210 of OMP context, but with task_shared_vars set. */
10211
10212 static tree
10213 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
10214 void *data)
10215 {
10216 tree t = *tp;
10217
10218 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10219 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
10220 return t;
10221
10222 if (task_shared_vars
10223 && DECL_P (t)
10224 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
10225 return t;
10226
10227 /* If a global variable has been privatized, TREE_CONSTANT on
10228 ADDR_EXPR might be wrong. */
10229 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
10230 recompute_tree_invariant_for_addr_expr (t);
10231
10232 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
10233 return NULL_TREE;
10234 }
10235
10236 /* Data to be communicated between lower_omp_regimplify_operands and
10237 lower_omp_regimplify_operands_p. */
10238
10239 struct lower_omp_regimplify_operands_data
10240 {
10241 omp_context *ctx;
10242 vec<tree> *decls;
10243 };
10244
10245 /* Helper function for lower_omp_regimplify_operands. Find
10246 omp_member_access_dummy_var vars and adjust temporarily their
10247 DECL_VALUE_EXPRs if needed. */
10248
10249 static tree
10250 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
10251 void *data)
10252 {
10253 tree t = omp_member_access_dummy_var (*tp);
10254 if (t)
10255 {
10256 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10257 lower_omp_regimplify_operands_data *ldata
10258 = (lower_omp_regimplify_operands_data *) wi->info;
10259 tree o = maybe_lookup_decl (t, ldata->ctx);
10260 if (o != t)
10261 {
10262 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
10263 ldata->decls->safe_push (*tp);
10264 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
10265 SET_DECL_VALUE_EXPR (*tp, v);
10266 }
10267 }
10268 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
10269 return NULL_TREE;
10270 }
10271
10272 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10273 of omp_member_access_dummy_var vars during regimplification. */
10274
10275 static void
10276 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
10277 gimple_stmt_iterator *gsi_p)
10278 {
10279 auto_vec<tree, 10> decls;
10280 if (ctx)
10281 {
10282 struct walk_stmt_info wi;
10283 memset (&wi, '\0', sizeof (wi));
10284 struct lower_omp_regimplify_operands_data data;
10285 data.ctx = ctx;
10286 data.decls = &decls;
10287 wi.info = &data;
10288 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
10289 }
10290 gimple_regimplify_operands (stmt, gsi_p);
10291 while (!decls.is_empty ())
10292 {
10293 tree t = decls.pop ();
10294 tree v = decls.pop ();
10295 SET_DECL_VALUE_EXPR (t, v);
10296 }
10297 }
10298
10299 static void
10300 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10301 {
10302 gimple *stmt = gsi_stmt (*gsi_p);
10303 struct walk_stmt_info wi;
10304 gcall *call_stmt;
10305
10306 if (gimple_has_location (stmt))
10307 input_location = gimple_location (stmt);
10308
10309 if (task_shared_vars)
10310 memset (&wi, '\0', sizeof (wi));
10311
10312 /* If we have issued syntax errors, avoid doing any heavy lifting.
10313 Just replace the OMP directives with a NOP to avoid
10314 confusing RTL expansion. */
10315 if (seen_error () && is_gimple_omp (stmt))
10316 {
10317 gsi_replace (gsi_p, gimple_build_nop (), true);
10318 return;
10319 }
10320
10321 switch (gimple_code (stmt))
10322 {
10323 case GIMPLE_COND:
10324 {
10325 gcond *cond_stmt = as_a <gcond *> (stmt);
10326 if ((ctx || task_shared_vars)
10327 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
10328 lower_omp_regimplify_p,
10329 ctx ? NULL : &wi, NULL)
10330 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
10331 lower_omp_regimplify_p,
10332 ctx ? NULL : &wi, NULL)))
10333 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
10334 }
10335 break;
10336 case GIMPLE_CATCH:
10337 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
10338 break;
10339 case GIMPLE_EH_FILTER:
10340 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
10341 break;
10342 case GIMPLE_TRY:
10343 lower_omp (gimple_try_eval_ptr (stmt), ctx);
10344 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
10345 break;
10346 case GIMPLE_TRANSACTION:
10347 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
10348 ctx);
10349 break;
10350 case GIMPLE_BIND:
10351 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
10352 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
10353 break;
10354 case GIMPLE_OMP_PARALLEL:
10355 case GIMPLE_OMP_TASK:
10356 ctx = maybe_lookup_ctx (stmt);
10357 gcc_assert (ctx);
10358 if (ctx->cancellable)
10359 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10360 lower_omp_taskreg (gsi_p, ctx);
10361 break;
10362 case GIMPLE_OMP_FOR:
10363 ctx = maybe_lookup_ctx (stmt);
10364 gcc_assert (ctx);
10365 if (ctx->cancellable)
10366 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10367 lower_omp_for (gsi_p, ctx);
10368 break;
10369 case GIMPLE_OMP_SECTIONS:
10370 ctx = maybe_lookup_ctx (stmt);
10371 gcc_assert (ctx);
10372 if (ctx->cancellable)
10373 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10374 lower_omp_sections (gsi_p, ctx);
10375 break;
10376 case GIMPLE_OMP_SINGLE:
10377 ctx = maybe_lookup_ctx (stmt);
10378 gcc_assert (ctx);
10379 lower_omp_single (gsi_p, ctx);
10380 break;
10381 case GIMPLE_OMP_MASTER:
10382 ctx = maybe_lookup_ctx (stmt);
10383 gcc_assert (ctx);
10384 lower_omp_master (gsi_p, ctx);
10385 break;
10386 case GIMPLE_OMP_TASKGROUP:
10387 ctx = maybe_lookup_ctx (stmt);
10388 gcc_assert (ctx);
10389 lower_omp_taskgroup (gsi_p, ctx);
10390 break;
10391 case GIMPLE_OMP_ORDERED:
10392 ctx = maybe_lookup_ctx (stmt);
10393 gcc_assert (ctx);
10394 lower_omp_ordered (gsi_p, ctx);
10395 break;
10396 case GIMPLE_OMP_CRITICAL:
10397 ctx = maybe_lookup_ctx (stmt);
10398 gcc_assert (ctx);
10399 lower_omp_critical (gsi_p, ctx);
10400 break;
10401 case GIMPLE_OMP_ATOMIC_LOAD:
10402 if ((ctx || task_shared_vars)
10403 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10404 as_a <gomp_atomic_load *> (stmt)),
10405 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10406 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10407 break;
10408 case GIMPLE_OMP_TARGET:
10409 ctx = maybe_lookup_ctx (stmt);
10410 gcc_assert (ctx);
10411 lower_omp_target (gsi_p, ctx);
10412 break;
10413 case GIMPLE_OMP_TEAMS:
10414 ctx = maybe_lookup_ctx (stmt);
10415 gcc_assert (ctx);
10416 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
10417 lower_omp_taskreg (gsi_p, ctx);
10418 else
10419 lower_omp_teams (gsi_p, ctx);
10420 break;
10421 case GIMPLE_OMP_GRID_BODY:
10422 ctx = maybe_lookup_ctx (stmt);
10423 gcc_assert (ctx);
10424 lower_omp_grid_body (gsi_p, ctx);
10425 break;
10426 case GIMPLE_CALL:
10427 tree fndecl;
10428 call_stmt = as_a <gcall *> (stmt);
10429 fndecl = gimple_call_fndecl (call_stmt);
10430 if (fndecl
10431 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10432 switch (DECL_FUNCTION_CODE (fndecl))
10433 {
10434 case BUILT_IN_GOMP_BARRIER:
10435 if (ctx == NULL)
10436 break;
10437 /* FALLTHRU */
10438 case BUILT_IN_GOMP_CANCEL:
10439 case BUILT_IN_GOMP_CANCELLATION_POINT:
10440 omp_context *cctx;
10441 cctx = ctx;
10442 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10443 cctx = cctx->outer;
10444 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
10445 if (!cctx->cancellable)
10446 {
10447 if (DECL_FUNCTION_CODE (fndecl)
10448 == BUILT_IN_GOMP_CANCELLATION_POINT)
10449 {
10450 stmt = gimple_build_nop ();
10451 gsi_replace (gsi_p, stmt, false);
10452 }
10453 break;
10454 }
10455 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10456 {
10457 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10458 gimple_call_set_fndecl (call_stmt, fndecl);
10459 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
10460 }
10461 tree lhs;
10462 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
10463 gimple_call_set_lhs (call_stmt, lhs);
10464 tree fallthru_label;
10465 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10466 gimple *g;
10467 g = gimple_build_label (fallthru_label);
10468 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10469 g = gimple_build_cond (NE_EXPR, lhs,
10470 fold_convert (TREE_TYPE (lhs),
10471 boolean_false_node),
10472 cctx->cancel_label, fallthru_label);
10473 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10474 break;
10475 default:
10476 break;
10477 }
10478 /* FALLTHRU */
10479 default:
10480 if ((ctx || task_shared_vars)
10481 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10482 ctx ? NULL : &wi))
10483 {
10484 /* Just remove clobbers, this should happen only if we have
10485 "privatized" local addressable variables in SIMD regions,
10486 the clobber isn't needed in that case and gimplifying address
10487 of the ARRAY_REF into a pointer and creating MEM_REF based
10488 clobber would create worse code than we get with the clobber
10489 dropped. */
10490 if (gimple_clobber_p (stmt))
10491 {
10492 gsi_replace (gsi_p, gimple_build_nop (), true);
10493 break;
10494 }
10495 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10496 }
10497 break;
10498 }
10499 }
10500
10501 static void
10502 lower_omp (gimple_seq *body, omp_context *ctx)
10503 {
10504 location_t saved_location = input_location;
10505 gimple_stmt_iterator gsi;
10506 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10507 lower_omp_1 (&gsi, ctx);
10508 /* During gimplification, we haven't folded statments inside offloading
10509 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10510 if (target_nesting_level || taskreg_nesting_level)
10511 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10512 fold_stmt (&gsi);
10513 input_location = saved_location;
10514 }
10515
10516 /* Main entry point. */
10517
10518 static unsigned int
10519 execute_lower_omp (void)
10520 {
10521 gimple_seq body;
10522 int i;
10523 omp_context *ctx;
10524
10525 /* This pass always runs, to provide PROP_gimple_lomp.
10526 But often, there is nothing to do. */
10527 if (flag_openacc == 0 && flag_openmp == 0
10528 && flag_openmp_simd == 0)
10529 return 0;
10530
10531 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10532 delete_omp_context);
10533
10534 body = gimple_body (current_function_decl);
10535
10536 if (hsa_gen_requested_p ())
10537 omp_grid_gridify_all_targets (&body);
10538
10539 scan_omp (&body, NULL);
10540 gcc_assert (taskreg_nesting_level == 0);
10541 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
10542 finish_taskreg_scan (ctx);
10543 taskreg_contexts.release ();
10544
10545 if (all_contexts->root)
10546 {
10547 if (task_shared_vars)
10548 push_gimplify_context ();
10549 lower_omp (&body, NULL);
10550 if (task_shared_vars)
10551 pop_gimplify_context (NULL);
10552 }
10553
10554 if (all_contexts)
10555 {
10556 splay_tree_delete (all_contexts);
10557 all_contexts = NULL;
10558 }
10559 BITMAP_FREE (task_shared_vars);
10560
10561 /* If current function is a method, remove artificial dummy VAR_DECL created
10562 for non-static data member privatization, they aren't needed for
10563 debuginfo nor anything else, have been already replaced everywhere in the
10564 IL and cause problems with LTO. */
10565 if (DECL_ARGUMENTS (current_function_decl)
10566 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
10567 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
10568 == POINTER_TYPE))
10569 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
10570 return 0;
10571 }
10572
10573 namespace {
10574
10575 const pass_data pass_data_lower_omp =
10576 {
10577 GIMPLE_PASS, /* type */
10578 "omplower", /* name */
10579 OPTGROUP_OMP, /* optinfo_flags */
10580 TV_NONE, /* tv_id */
10581 PROP_gimple_any, /* properties_required */
10582 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
10583 0, /* properties_destroyed */
10584 0, /* todo_flags_start */
10585 0, /* todo_flags_finish */
10586 };
10587
10588 class pass_lower_omp : public gimple_opt_pass
10589 {
10590 public:
10591 pass_lower_omp (gcc::context *ctxt)
10592 : gimple_opt_pass (pass_data_lower_omp, ctxt)
10593 {}
10594
10595 /* opt_pass methods: */
10596 virtual unsigned int execute (function *) { return execute_lower_omp (); }
10597
10598 }; // class pass_lower_omp
10599
10600 } // anon namespace
10601
10602 gimple_opt_pass *
10603 make_pass_lower_omp (gcc::context *ctxt)
10604 {
10605 return new pass_lower_omp (ctxt);
10606 }
10607 \f
10608 /* The following is a utility to diagnose structured block violations.
10609 It is not part of the "omplower" pass, as that's invoked too late. It
10610 should be invoked by the respective front ends after gimplification. */
10611
10612 static splay_tree all_labels;
10613
10614 /* Check for mismatched contexts and generate an error if needed. Return
10615 true if an error is detected. */
10616
10617 static bool
10618 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10619 gimple *branch_ctx, gimple *label_ctx)
10620 {
10621 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
10622 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
10623
10624 if (label_ctx == branch_ctx)
10625 return false;
10626
10627 const char* kind = NULL;
10628
10629 if (flag_openacc)
10630 {
10631 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
10632 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
10633 {
10634 gcc_checking_assert (kind == NULL);
10635 kind = "OpenACC";
10636 }
10637 }
10638 if (kind == NULL)
10639 {
10640 gcc_checking_assert (flag_openmp || flag_openmp_simd);
10641 kind = "OpenMP";
10642 }
10643
10644 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
10645 so we could traverse it and issue a correct "exit" or "enter" error
10646 message upon a structured block violation.
10647
10648 We built the context by building a list with tree_cons'ing, but there is
10649 no easy counterpart in gimple tuples. It seems like far too much work
10650 for issuing exit/enter error messages. If someone really misses the
10651 distinct error message... patches welcome. */
10652
10653 #if 0
10654 /* Try to avoid confusing the user by producing and error message
10655 with correct "exit" or "enter" verbiage. We prefer "exit"
10656 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10657 if (branch_ctx == NULL)
10658 exit_p = false;
10659 else
10660 {
10661 while (label_ctx)
10662 {
10663 if (TREE_VALUE (label_ctx) == branch_ctx)
10664 {
10665 exit_p = false;
10666 break;
10667 }
10668 label_ctx = TREE_CHAIN (label_ctx);
10669 }
10670 }
10671
10672 if (exit_p)
10673 error ("invalid exit from %s structured block", kind);
10674 else
10675 error ("invalid entry to %s structured block", kind);
10676 #endif
10677
10678 /* If it's obvious we have an invalid entry, be specific about the error. */
10679 if (branch_ctx == NULL)
10680 error ("invalid entry to %s structured block", kind);
10681 else
10682 {
10683 /* Otherwise, be vague and lazy, but efficient. */
10684 error ("invalid branch to/from %s structured block", kind);
10685 }
10686
10687 gsi_replace (gsi_p, gimple_build_nop (), false);
10688 return true;
10689 }
10690
10691 /* Pass 1: Create a minimal tree of structured blocks, and record
10692 where each label is found. */
10693
10694 static tree
10695 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10696 struct walk_stmt_info *wi)
10697 {
10698 gimple *context = (gimple *) wi->info;
10699 gimple *inner_context;
10700 gimple *stmt = gsi_stmt (*gsi_p);
10701
10702 *handled_ops_p = true;
10703
10704 switch (gimple_code (stmt))
10705 {
10706 WALK_SUBSTMTS;
10707
10708 case GIMPLE_OMP_PARALLEL:
10709 case GIMPLE_OMP_TASK:
10710 case GIMPLE_OMP_SECTIONS:
10711 case GIMPLE_OMP_SINGLE:
10712 case GIMPLE_OMP_SECTION:
10713 case GIMPLE_OMP_MASTER:
10714 case GIMPLE_OMP_ORDERED:
10715 case GIMPLE_OMP_CRITICAL:
10716 case GIMPLE_OMP_TARGET:
10717 case GIMPLE_OMP_TEAMS:
10718 case GIMPLE_OMP_TASKGROUP:
10719 /* The minimal context here is just the current OMP construct. */
10720 inner_context = stmt;
10721 wi->info = inner_context;
10722 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10723 wi->info = context;
10724 break;
10725
10726 case GIMPLE_OMP_FOR:
10727 inner_context = stmt;
10728 wi->info = inner_context;
10729 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10730 walk them. */
10731 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10732 diagnose_sb_1, NULL, wi);
10733 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10734 wi->info = context;
10735 break;
10736
10737 case GIMPLE_LABEL:
10738 splay_tree_insert (all_labels,
10739 (splay_tree_key) gimple_label_label (
10740 as_a <glabel *> (stmt)),
10741 (splay_tree_value) context);
10742 break;
10743
10744 default:
10745 break;
10746 }
10747
10748 return NULL_TREE;
10749 }
10750
10751 /* Pass 2: Check each branch and see if its context differs from that of
10752 the destination label's context. */
10753
10754 static tree
10755 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10756 struct walk_stmt_info *wi)
10757 {
10758 gimple *context = (gimple *) wi->info;
10759 splay_tree_node n;
10760 gimple *stmt = gsi_stmt (*gsi_p);
10761
10762 *handled_ops_p = true;
10763
10764 switch (gimple_code (stmt))
10765 {
10766 WALK_SUBSTMTS;
10767
10768 case GIMPLE_OMP_PARALLEL:
10769 case GIMPLE_OMP_TASK:
10770 case GIMPLE_OMP_SECTIONS:
10771 case GIMPLE_OMP_SINGLE:
10772 case GIMPLE_OMP_SECTION:
10773 case GIMPLE_OMP_MASTER:
10774 case GIMPLE_OMP_ORDERED:
10775 case GIMPLE_OMP_CRITICAL:
10776 case GIMPLE_OMP_TARGET:
10777 case GIMPLE_OMP_TEAMS:
10778 case GIMPLE_OMP_TASKGROUP:
10779 wi->info = stmt;
10780 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10781 wi->info = context;
10782 break;
10783
10784 case GIMPLE_OMP_FOR:
10785 wi->info = stmt;
10786 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10787 walk them. */
10788 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10789 diagnose_sb_2, NULL, wi);
10790 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10791 wi->info = context;
10792 break;
10793
10794 case GIMPLE_COND:
10795 {
10796 gcond *cond_stmt = as_a <gcond *> (stmt);
10797 tree lab = gimple_cond_true_label (cond_stmt);
10798 if (lab)
10799 {
10800 n = splay_tree_lookup (all_labels,
10801 (splay_tree_key) lab);
10802 diagnose_sb_0 (gsi_p, context,
10803 n ? (gimple *) n->value : NULL);
10804 }
10805 lab = gimple_cond_false_label (cond_stmt);
10806 if (lab)
10807 {
10808 n = splay_tree_lookup (all_labels,
10809 (splay_tree_key) lab);
10810 diagnose_sb_0 (gsi_p, context,
10811 n ? (gimple *) n->value : NULL);
10812 }
10813 }
10814 break;
10815
10816 case GIMPLE_GOTO:
10817 {
10818 tree lab = gimple_goto_dest (stmt);
10819 if (TREE_CODE (lab) != LABEL_DECL)
10820 break;
10821
10822 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10823 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
10824 }
10825 break;
10826
10827 case GIMPLE_SWITCH:
10828 {
10829 gswitch *switch_stmt = as_a <gswitch *> (stmt);
10830 unsigned int i;
10831 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
10832 {
10833 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
10834 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10835 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
10836 break;
10837 }
10838 }
10839 break;
10840
10841 case GIMPLE_RETURN:
10842 diagnose_sb_0 (gsi_p, context, NULL);
10843 break;
10844
10845 default:
10846 break;
10847 }
10848
10849 return NULL_TREE;
10850 }
10851
10852 static unsigned int
10853 diagnose_omp_structured_block_errors (void)
10854 {
10855 struct walk_stmt_info wi;
10856 gimple_seq body = gimple_body (current_function_decl);
10857
10858 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10859
10860 memset (&wi, 0, sizeof (wi));
10861 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
10862
10863 memset (&wi, 0, sizeof (wi));
10864 wi.want_locations = true;
10865 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10866
10867 gimple_set_body (current_function_decl, body);
10868
10869 splay_tree_delete (all_labels);
10870 all_labels = NULL;
10871
10872 return 0;
10873 }
10874
10875 namespace {
10876
10877 const pass_data pass_data_diagnose_omp_blocks =
10878 {
10879 GIMPLE_PASS, /* type */
10880 "*diagnose_omp_blocks", /* name */
10881 OPTGROUP_OMP, /* optinfo_flags */
10882 TV_NONE, /* tv_id */
10883 PROP_gimple_any, /* properties_required */
10884 0, /* properties_provided */
10885 0, /* properties_destroyed */
10886 0, /* todo_flags_start */
10887 0, /* todo_flags_finish */
10888 };
10889
10890 class pass_diagnose_omp_blocks : public gimple_opt_pass
10891 {
10892 public:
10893 pass_diagnose_omp_blocks (gcc::context *ctxt)
10894 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
10895 {}
10896
10897 /* opt_pass methods: */
10898 virtual bool gate (function *)
10899 {
10900 return flag_openacc || flag_openmp || flag_openmp_simd;
10901 }
10902 virtual unsigned int execute (function *)
10903 {
10904 return diagnose_omp_structured_block_errors ();
10905 }
10906
10907 }; // class pass_diagnose_omp_blocks
10908
10909 } // anon namespace
10910
10911 gimple_opt_pass *
10912 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10913 {
10914 return new pass_diagnose_omp_blocks (ctxt);
10915 }
10916 \f
10917
10918 #include "gt-omp-low.h"