a7f35ffe416764b777ed30b9a869c3b89626fcbd
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen0;
144 };
145
146 static splay_tree all_contexts;
147 static int taskreg_nesting_level;
148 static int target_nesting_level;
149 static bitmap task_shared_vars;
150 static vec<omp_context *> taskreg_contexts;
151
152 static void scan_omp (gimple_seq *, omp_context *);
153 static tree scan_omp_1_op (tree *, int *, void *);
154
155 #define WALK_SUBSTMTS \
156 case GIMPLE_BIND: \
157 case GIMPLE_TRY: \
158 case GIMPLE_CATCH: \
159 case GIMPLE_EH_FILTER: \
160 case GIMPLE_TRANSACTION: \
161 /* The sub-statements for these should be walked. */ \
162 *handled_ops_p = false; \
163 break;
164
165 /* Return true if CTX corresponds to an oacc parallel region. */
166
167 static bool
168 is_oacc_parallel (omp_context *ctx)
169 {
170 enum gimple_code outer_type = gimple_code (ctx->stmt);
171 return ((outer_type == GIMPLE_OMP_TARGET)
172 && (gimple_omp_target_kind (ctx->stmt)
173 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
174 }
175
176 /* Return true if CTX corresponds to an oacc kernels region. */
177
178 static bool
179 is_oacc_kernels (omp_context *ctx)
180 {
181 enum gimple_code outer_type = gimple_code (ctx->stmt);
182 return ((outer_type == GIMPLE_OMP_TARGET)
183 && (gimple_omp_target_kind (ctx->stmt)
184 == GF_OMP_TARGET_KIND_OACC_KERNELS));
185 }
186
187 /* If DECL is the artificial dummy VAR_DECL created for non-static
188 data member privatization, return the underlying "this" parameter,
189 otherwise return NULL. */
190
191 tree
192 omp_member_access_dummy_var (tree decl)
193 {
194 if (!VAR_P (decl)
195 || !DECL_ARTIFICIAL (decl)
196 || !DECL_IGNORED_P (decl)
197 || !DECL_HAS_VALUE_EXPR_P (decl)
198 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
199 return NULL_TREE;
200
201 tree v = DECL_VALUE_EXPR (decl);
202 if (TREE_CODE (v) != COMPONENT_REF)
203 return NULL_TREE;
204
205 while (1)
206 switch (TREE_CODE (v))
207 {
208 case COMPONENT_REF:
209 case MEM_REF:
210 case INDIRECT_REF:
211 CASE_CONVERT:
212 case POINTER_PLUS_EXPR:
213 v = TREE_OPERAND (v, 0);
214 continue;
215 case PARM_DECL:
216 if (DECL_CONTEXT (v) == current_function_decl
217 && DECL_ARTIFICIAL (v)
218 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
219 return v;
220 return NULL_TREE;
221 default:
222 return NULL_TREE;
223 }
224 }
225
226 /* Helper for unshare_and_remap, called through walk_tree. */
227
228 static tree
229 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
230 {
231 tree *pair = (tree *) data;
232 if (*tp == pair[0])
233 {
234 *tp = unshare_expr (pair[1]);
235 *walk_subtrees = 0;
236 }
237 else if (IS_TYPE_OR_DECL_P (*tp))
238 *walk_subtrees = 0;
239 return NULL_TREE;
240 }
241
242 /* Return unshare_expr (X) with all occurrences of FROM
243 replaced with TO. */
244
245 static tree
246 unshare_and_remap (tree x, tree from, tree to)
247 {
248 tree pair[2] = { from, to };
249 x = unshare_expr (x);
250 walk_tree (&x, unshare_and_remap_1, pair, NULL);
251 return x;
252 }
253
254 /* Convenience function for calling scan_omp_1_op on tree operands. */
255
256 static inline tree
257 scan_omp_op (tree *tp, omp_context *ctx)
258 {
259 struct walk_stmt_info wi;
260
261 memset (&wi, 0, sizeof (wi));
262 wi.info = ctx;
263 wi.want_locations = true;
264
265 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
266 }
267
268 static void lower_omp (gimple_seq *, omp_context *);
269 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
270 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
271
272 /* Return true if CTX is for an omp parallel. */
273
274 static inline bool
275 is_parallel_ctx (omp_context *ctx)
276 {
277 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
278 }
279
280
281 /* Return true if CTX is for an omp task. */
282
283 static inline bool
284 is_task_ctx (omp_context *ctx)
285 {
286 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
287 }
288
289
290 /* Return true if CTX is for an omp taskloop. */
291
292 static inline bool
293 is_taskloop_ctx (omp_context *ctx)
294 {
295 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
296 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
297 }
298
299
300 /* Return true if CTX is for a host omp teams. */
301
302 static inline bool
303 is_host_teams_ctx (omp_context *ctx)
304 {
305 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
306 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
307 }
308
309 /* Return true if CTX is for an omp parallel or omp task or host omp teams
310 (the last one is strictly not a task region in OpenMP speak, but we
311 need to treat it similarly). */
312
313 static inline bool
314 is_taskreg_ctx (omp_context *ctx)
315 {
316 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
317 }
318
319 /* Return true if EXPR is variable sized. */
320
321 static inline bool
322 is_variable_sized (const_tree expr)
323 {
324 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
325 }
326
327 /* Lookup variables. The "maybe" form
328 allows for the variable form to not have been entered, otherwise we
329 assert that the variable must have been entered. */
330
331 static inline tree
332 lookup_decl (tree var, omp_context *ctx)
333 {
334 tree *n = ctx->cb.decl_map->get (var);
335 return *n;
336 }
337
338 static inline tree
339 maybe_lookup_decl (const_tree var, omp_context *ctx)
340 {
341 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
342 return n ? *n : NULL_TREE;
343 }
344
345 static inline tree
346 lookup_field (tree var, omp_context *ctx)
347 {
348 splay_tree_node n;
349 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
350 return (tree) n->value;
351 }
352
353 static inline tree
354 lookup_sfield (splay_tree_key key, omp_context *ctx)
355 {
356 splay_tree_node n;
357 n = splay_tree_lookup (ctx->sfield_map
358 ? ctx->sfield_map : ctx->field_map, key);
359 return (tree) n->value;
360 }
361
362 static inline tree
363 lookup_sfield (tree var, omp_context *ctx)
364 {
365 return lookup_sfield ((splay_tree_key) var, ctx);
366 }
367
368 static inline tree
369 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
370 {
371 splay_tree_node n;
372 n = splay_tree_lookup (ctx->field_map, key);
373 return n ? (tree) n->value : NULL_TREE;
374 }
375
376 static inline tree
377 maybe_lookup_field (tree var, omp_context *ctx)
378 {
379 return maybe_lookup_field ((splay_tree_key) var, ctx);
380 }
381
382 /* Return true if DECL should be copied by pointer. SHARED_CTX is
383 the parallel context if DECL is to be shared. */
384
385 static bool
386 use_pointer_for_field (tree decl, omp_context *shared_ctx)
387 {
388 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
389 || TYPE_ATOMIC (TREE_TYPE (decl)))
390 return true;
391
392 /* We can only use copy-in/copy-out semantics for shared variables
393 when we know the value is not accessible from an outer scope. */
394 if (shared_ctx)
395 {
396 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
397
398 /* ??? Trivially accessible from anywhere. But why would we even
399 be passing an address in this case? Should we simply assert
400 this to be false, or should we have a cleanup pass that removes
401 these from the list of mappings? */
402 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
403 return true;
404
405 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
406 without analyzing the expression whether or not its location
407 is accessible to anyone else. In the case of nested parallel
408 regions it certainly may be. */
409 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
410 return true;
411
412 /* Do not use copy-in/copy-out for variables that have their
413 address taken. */
414 if (TREE_ADDRESSABLE (decl))
415 return true;
416
417 /* lower_send_shared_vars only uses copy-in, but not copy-out
418 for these. */
419 if (TREE_READONLY (decl)
420 || ((TREE_CODE (decl) == RESULT_DECL
421 || TREE_CODE (decl) == PARM_DECL)
422 && DECL_BY_REFERENCE (decl)))
423 return false;
424
425 /* Disallow copy-in/out in nested parallel if
426 decl is shared in outer parallel, otherwise
427 each thread could store the shared variable
428 in its own copy-in location, making the
429 variable no longer really shared. */
430 if (shared_ctx->is_nested)
431 {
432 omp_context *up;
433
434 for (up = shared_ctx->outer; up; up = up->outer)
435 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
436 break;
437
438 if (up)
439 {
440 tree c;
441
442 for (c = gimple_omp_taskreg_clauses (up->stmt);
443 c; c = OMP_CLAUSE_CHAIN (c))
444 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
445 && OMP_CLAUSE_DECL (c) == decl)
446 break;
447
448 if (c)
449 goto maybe_mark_addressable_and_ret;
450 }
451 }
452
453 /* For tasks avoid using copy-in/out. As tasks can be
454 deferred or executed in different thread, when GOMP_task
455 returns, the task hasn't necessarily terminated. */
456 if (is_task_ctx (shared_ctx))
457 {
458 tree outer;
459 maybe_mark_addressable_and_ret:
460 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
461 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
462 {
463 /* Taking address of OUTER in lower_send_shared_vars
464 might need regimplification of everything that uses the
465 variable. */
466 if (!task_shared_vars)
467 task_shared_vars = BITMAP_ALLOC (NULL);
468 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
469 TREE_ADDRESSABLE (outer) = 1;
470 }
471 return true;
472 }
473 }
474
475 return false;
476 }
477
478 /* Construct a new automatic decl similar to VAR. */
479
480 static tree
481 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
482 {
483 tree copy = copy_var_decl (var, name, type);
484
485 DECL_CONTEXT (copy) = current_function_decl;
486 DECL_CHAIN (copy) = ctx->block_vars;
487 /* If VAR is listed in task_shared_vars, it means it wasn't
488 originally addressable and is just because task needs to take
489 it's address. But we don't need to take address of privatizations
490 from that var. */
491 if (TREE_ADDRESSABLE (var)
492 && task_shared_vars
493 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
494 TREE_ADDRESSABLE (copy) = 0;
495 ctx->block_vars = copy;
496
497 return copy;
498 }
499
500 static tree
501 omp_copy_decl_1 (tree var, omp_context *ctx)
502 {
503 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
504 }
505
506 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
507 as appropriate. */
508 static tree
509 omp_build_component_ref (tree obj, tree field)
510 {
511 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
512 if (TREE_THIS_VOLATILE (field))
513 TREE_THIS_VOLATILE (ret) |= 1;
514 if (TREE_READONLY (field))
515 TREE_READONLY (ret) |= 1;
516 return ret;
517 }
518
519 /* Build tree nodes to access the field for VAR on the receiver side. */
520
521 static tree
522 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
523 {
524 tree x, field = lookup_field (var, ctx);
525
526 /* If the receiver record type was remapped in the child function,
527 remap the field into the new record type. */
528 x = maybe_lookup_field (field, ctx);
529 if (x != NULL)
530 field = x;
531
532 x = build_simple_mem_ref (ctx->receiver_decl);
533 TREE_THIS_NOTRAP (x) = 1;
534 x = omp_build_component_ref (x, field);
535 if (by_ref)
536 {
537 x = build_simple_mem_ref (x);
538 TREE_THIS_NOTRAP (x) = 1;
539 }
540
541 return x;
542 }
543
544 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
545 of a parallel, this is a component reference; for workshare constructs
546 this is some variable. */
547
548 static tree
549 build_outer_var_ref (tree var, omp_context *ctx,
550 enum omp_clause_code code = OMP_CLAUSE_ERROR)
551 {
552 tree x;
553 omp_context *outer = ctx->outer;
554 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
555 outer = outer->outer;
556
557 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
558 x = var;
559 else if (is_variable_sized (var))
560 {
561 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
562 x = build_outer_var_ref (x, ctx, code);
563 x = build_simple_mem_ref (x);
564 }
565 else if (is_taskreg_ctx (ctx))
566 {
567 bool by_ref = use_pointer_for_field (var, NULL);
568 x = build_receiver_ref (var, by_ref, ctx);
569 }
570 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
571 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
572 || (code == OMP_CLAUSE_PRIVATE
573 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
574 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
575 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
576 {
577 /* #pragma omp simd isn't a worksharing construct, and can reference
578 even private vars in its linear etc. clauses.
579 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
580 to private vars in all worksharing constructs. */
581 x = NULL_TREE;
582 if (outer && is_taskreg_ctx (outer))
583 x = lookup_decl (var, outer);
584 else if (outer)
585 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
586 if (x == NULL_TREE)
587 x = var;
588 }
589 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
590 {
591 gcc_assert (outer);
592 splay_tree_node n
593 = splay_tree_lookup (outer->field_map,
594 (splay_tree_key) &DECL_UID (var));
595 if (n == NULL)
596 {
597 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
598 x = var;
599 else
600 x = lookup_decl (var, outer);
601 }
602 else
603 {
604 tree field = (tree) n->value;
605 /* If the receiver record type was remapped in the child function,
606 remap the field into the new record type. */
607 x = maybe_lookup_field (field, outer);
608 if (x != NULL)
609 field = x;
610
611 x = build_simple_mem_ref (outer->receiver_decl);
612 x = omp_build_component_ref (x, field);
613 if (use_pointer_for_field (var, outer))
614 x = build_simple_mem_ref (x);
615 }
616 }
617 else if (outer)
618 {
619 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
620 {
621 outer = outer->outer;
622 gcc_assert (outer
623 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
624 }
625 x = lookup_decl (var, outer);
626 }
627 else if (omp_is_reference (var))
628 /* This can happen with orphaned constructs. If var is reference, it is
629 possible it is shared and as such valid. */
630 x = var;
631 else if (omp_member_access_dummy_var (var))
632 x = var;
633 else
634 gcc_unreachable ();
635
636 if (x == var)
637 {
638 tree t = omp_member_access_dummy_var (var);
639 if (t)
640 {
641 x = DECL_VALUE_EXPR (var);
642 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
643 if (o != t)
644 x = unshare_and_remap (x, t, o);
645 else
646 x = unshare_expr (x);
647 }
648 }
649
650 if (omp_is_reference (var))
651 x = build_simple_mem_ref (x);
652
653 return x;
654 }
655
656 /* Build tree nodes to access the field for VAR on the sender side. */
657
658 static tree
659 build_sender_ref (splay_tree_key key, omp_context *ctx)
660 {
661 tree field = lookup_sfield (key, ctx);
662 return omp_build_component_ref (ctx->sender_decl, field);
663 }
664
665 static tree
666 build_sender_ref (tree var, omp_context *ctx)
667 {
668 return build_sender_ref ((splay_tree_key) var, ctx);
669 }
670
671 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
672 BASE_POINTERS_RESTRICT, declare the field with restrict. */
673
674 static void
675 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
676 {
677 tree field, type, sfield = NULL_TREE;
678 splay_tree_key key = (splay_tree_key) var;
679
680 if ((mask & 8) != 0)
681 {
682 key = (splay_tree_key) &DECL_UID (var);
683 gcc_checking_assert (key != (splay_tree_key) var);
684 }
685 gcc_assert ((mask & 1) == 0
686 || !splay_tree_lookup (ctx->field_map, key));
687 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
688 || !splay_tree_lookup (ctx->sfield_map, key));
689 gcc_assert ((mask & 3) == 3
690 || !is_gimple_omp_oacc (ctx->stmt));
691
692 type = TREE_TYPE (var);
693 /* Prevent redeclaring the var in the split-off function with a restrict
694 pointer type. Note that we only clear type itself, restrict qualifiers in
695 the pointed-to type will be ignored by points-to analysis. */
696 if (POINTER_TYPE_P (type)
697 && TYPE_RESTRICT (type))
698 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
699
700 if (mask & 4)
701 {
702 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
703 type = build_pointer_type (build_pointer_type (type));
704 }
705 else if (by_ref)
706 type = build_pointer_type (type);
707 else if ((mask & 3) == 1 && omp_is_reference (var))
708 type = TREE_TYPE (type);
709
710 field = build_decl (DECL_SOURCE_LOCATION (var),
711 FIELD_DECL, DECL_NAME (var), type);
712
713 /* Remember what variable this field was created for. This does have a
714 side effect of making dwarf2out ignore this member, so for helpful
715 debugging we clear it later in delete_omp_context. */
716 DECL_ABSTRACT_ORIGIN (field) = var;
717 if (type == TREE_TYPE (var))
718 {
719 SET_DECL_ALIGN (field, DECL_ALIGN (var));
720 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
721 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
722 }
723 else
724 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
725
726 if ((mask & 3) == 3)
727 {
728 insert_field_into_struct (ctx->record_type, field);
729 if (ctx->srecord_type)
730 {
731 sfield = build_decl (DECL_SOURCE_LOCATION (var),
732 FIELD_DECL, DECL_NAME (var), type);
733 DECL_ABSTRACT_ORIGIN (sfield) = var;
734 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
735 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
736 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
737 insert_field_into_struct (ctx->srecord_type, sfield);
738 }
739 }
740 else
741 {
742 if (ctx->srecord_type == NULL_TREE)
743 {
744 tree t;
745
746 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
747 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
748 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
749 {
750 sfield = build_decl (DECL_SOURCE_LOCATION (t),
751 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
752 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
753 insert_field_into_struct (ctx->srecord_type, sfield);
754 splay_tree_insert (ctx->sfield_map,
755 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
756 (splay_tree_value) sfield);
757 }
758 }
759 sfield = field;
760 insert_field_into_struct ((mask & 1) ? ctx->record_type
761 : ctx->srecord_type, field);
762 }
763
764 if (mask & 1)
765 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
766 if ((mask & 2) && ctx->sfield_map)
767 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
768 }
769
770 static tree
771 install_var_local (tree var, omp_context *ctx)
772 {
773 tree new_var = omp_copy_decl_1 (var, ctx);
774 insert_decl_map (&ctx->cb, var, new_var);
775 return new_var;
776 }
777
778 /* Adjust the replacement for DECL in CTX for the new context. This means
779 copying the DECL_VALUE_EXPR, and fixing up the type. */
780
781 static void
782 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
783 {
784 tree new_decl, size;
785
786 new_decl = lookup_decl (decl, ctx);
787
788 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
789
790 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
791 && DECL_HAS_VALUE_EXPR_P (decl))
792 {
793 tree ve = DECL_VALUE_EXPR (decl);
794 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
795 SET_DECL_VALUE_EXPR (new_decl, ve);
796 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
797 }
798
799 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
800 {
801 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
802 if (size == error_mark_node)
803 size = TYPE_SIZE (TREE_TYPE (new_decl));
804 DECL_SIZE (new_decl) = size;
805
806 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
807 if (size == error_mark_node)
808 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
809 DECL_SIZE_UNIT (new_decl) = size;
810 }
811 }
812
813 /* The callback for remap_decl. Search all containing contexts for a
814 mapping of the variable; this avoids having to duplicate the splay
815 tree ahead of time. We know a mapping doesn't already exist in the
816 given context. Create new mappings to implement default semantics. */
817
818 static tree
819 omp_copy_decl (tree var, copy_body_data *cb)
820 {
821 omp_context *ctx = (omp_context *) cb;
822 tree new_var;
823
824 if (TREE_CODE (var) == LABEL_DECL)
825 {
826 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
827 return var;
828 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
829 DECL_CONTEXT (new_var) = current_function_decl;
830 insert_decl_map (&ctx->cb, var, new_var);
831 return new_var;
832 }
833
834 while (!is_taskreg_ctx (ctx))
835 {
836 ctx = ctx->outer;
837 if (ctx == NULL)
838 return var;
839 new_var = maybe_lookup_decl (var, ctx);
840 if (new_var)
841 return new_var;
842 }
843
844 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
845 return var;
846
847 return error_mark_node;
848 }
849
850 /* Create a new context, with OUTER_CTX being the surrounding context. */
851
852 static omp_context *
853 new_omp_context (gimple *stmt, omp_context *outer_ctx)
854 {
855 omp_context *ctx = XCNEW (omp_context);
856
857 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
858 (splay_tree_value) ctx);
859 ctx->stmt = stmt;
860
861 if (outer_ctx)
862 {
863 ctx->outer = outer_ctx;
864 ctx->cb = outer_ctx->cb;
865 ctx->cb.block = NULL;
866 ctx->depth = outer_ctx->depth + 1;
867 }
868 else
869 {
870 ctx->cb.src_fn = current_function_decl;
871 ctx->cb.dst_fn = current_function_decl;
872 ctx->cb.src_node = cgraph_node::get (current_function_decl);
873 gcc_checking_assert (ctx->cb.src_node);
874 ctx->cb.dst_node = ctx->cb.src_node;
875 ctx->cb.src_cfun = cfun;
876 ctx->cb.copy_decl = omp_copy_decl;
877 ctx->cb.eh_lp_nr = 0;
878 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
879 ctx->cb.adjust_array_error_bounds = true;
880 ctx->cb.dont_remap_vla_if_no_change = true;
881 ctx->depth = 1;
882 }
883
884 ctx->cb.decl_map = new hash_map<tree, tree>;
885
886 return ctx;
887 }
888
889 static gimple_seq maybe_catch_exception (gimple_seq);
890
891 /* Finalize task copyfn. */
892
893 static void
894 finalize_task_copyfn (gomp_task *task_stmt)
895 {
896 struct function *child_cfun;
897 tree child_fn;
898 gimple_seq seq = NULL, new_seq;
899 gbind *bind;
900
901 child_fn = gimple_omp_task_copy_fn (task_stmt);
902 if (child_fn == NULL_TREE)
903 return;
904
905 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
906 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
907
908 push_cfun (child_cfun);
909 bind = gimplify_body (child_fn, false);
910 gimple_seq_add_stmt (&seq, bind);
911 new_seq = maybe_catch_exception (seq);
912 if (new_seq != seq)
913 {
914 bind = gimple_build_bind (NULL, new_seq, NULL);
915 seq = NULL;
916 gimple_seq_add_stmt (&seq, bind);
917 }
918 gimple_set_body (child_fn, seq);
919 pop_cfun ();
920
921 /* Inform the callgraph about the new function. */
922 cgraph_node *node = cgraph_node::get_create (child_fn);
923 node->parallelized_function = 1;
924 cgraph_node::add_new_function (child_fn, false);
925 }
926
927 /* Destroy a omp_context data structures. Called through the splay tree
928 value delete callback. */
929
930 static void
931 delete_omp_context (splay_tree_value value)
932 {
933 omp_context *ctx = (omp_context *) value;
934
935 delete ctx->cb.decl_map;
936
937 if (ctx->field_map)
938 splay_tree_delete (ctx->field_map);
939 if (ctx->sfield_map)
940 splay_tree_delete (ctx->sfield_map);
941
942 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
943 it produces corrupt debug information. */
944 if (ctx->record_type)
945 {
946 tree t;
947 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
948 DECL_ABSTRACT_ORIGIN (t) = NULL;
949 }
950 if (ctx->srecord_type)
951 {
952 tree t;
953 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
954 DECL_ABSTRACT_ORIGIN (t) = NULL;
955 }
956
957 if (is_task_ctx (ctx))
958 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
959
960 if (ctx->task_reduction_map)
961 {
962 ctx->task_reductions.release ();
963 delete ctx->task_reduction_map;
964 }
965
966 delete ctx->lastprivate_conditional_map;
967
968 XDELETE (ctx);
969 }
970
971 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
972 context. */
973
974 static void
975 fixup_child_record_type (omp_context *ctx)
976 {
977 tree f, type = ctx->record_type;
978
979 if (!ctx->receiver_decl)
980 return;
981 /* ??? It isn't sufficient to just call remap_type here, because
982 variably_modified_type_p doesn't work the way we expect for
983 record types. Testing each field for whether it needs remapping
984 and creating a new record by hand works, however. */
985 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
986 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
987 break;
988 if (f)
989 {
990 tree name, new_fields = NULL;
991
992 type = lang_hooks.types.make_type (RECORD_TYPE);
993 name = DECL_NAME (TYPE_NAME (ctx->record_type));
994 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
995 TYPE_DECL, name, type);
996 TYPE_NAME (type) = name;
997
998 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
999 {
1000 tree new_f = copy_node (f);
1001 DECL_CONTEXT (new_f) = type;
1002 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1003 DECL_CHAIN (new_f) = new_fields;
1004 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1005 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1006 &ctx->cb, NULL);
1007 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1008 &ctx->cb, NULL);
1009 new_fields = new_f;
1010
1011 /* Arrange to be able to look up the receiver field
1012 given the sender field. */
1013 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1014 (splay_tree_value) new_f);
1015 }
1016 TYPE_FIELDS (type) = nreverse (new_fields);
1017 layout_type (type);
1018 }
1019
1020 /* In a target region we never modify any of the pointers in *.omp_data_i,
1021 so attempt to help the optimizers. */
1022 if (is_gimple_omp_offloaded (ctx->stmt))
1023 type = build_qualified_type (type, TYPE_QUAL_CONST);
1024
1025 TREE_TYPE (ctx->receiver_decl)
1026 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1027 }
1028
1029 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1030 specified by CLAUSES. */
1031
1032 static void
1033 scan_sharing_clauses (tree clauses, omp_context *ctx)
1034 {
1035 tree c, decl;
1036 bool scan_array_reductions = false;
1037
1038 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1039 {
1040 bool by_ref;
1041
1042 switch (OMP_CLAUSE_CODE (c))
1043 {
1044 case OMP_CLAUSE_PRIVATE:
1045 decl = OMP_CLAUSE_DECL (c);
1046 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1047 goto do_private;
1048 else if (!is_variable_sized (decl))
1049 install_var_local (decl, ctx);
1050 break;
1051
1052 case OMP_CLAUSE_SHARED:
1053 decl = OMP_CLAUSE_DECL (c);
1054 /* Ignore shared directives in teams construct inside of
1055 target construct. */
1056 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1057 && !is_host_teams_ctx (ctx))
1058 {
1059 /* Global variables don't need to be copied,
1060 the receiver side will use them directly. */
1061 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1062 if (is_global_var (odecl))
1063 break;
1064 insert_decl_map (&ctx->cb, decl, odecl);
1065 break;
1066 }
1067 gcc_assert (is_taskreg_ctx (ctx));
1068 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1069 || !is_variable_sized (decl));
1070 /* Global variables don't need to be copied,
1071 the receiver side will use them directly. */
1072 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1073 break;
1074 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1075 {
1076 use_pointer_for_field (decl, ctx);
1077 break;
1078 }
1079 by_ref = use_pointer_for_field (decl, NULL);
1080 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1081 || TREE_ADDRESSABLE (decl)
1082 || by_ref
1083 || omp_is_reference (decl))
1084 {
1085 by_ref = use_pointer_for_field (decl, ctx);
1086 install_var_field (decl, by_ref, 3, ctx);
1087 install_var_local (decl, ctx);
1088 break;
1089 }
1090 /* We don't need to copy const scalar vars back. */
1091 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1092 goto do_private;
1093
1094 case OMP_CLAUSE_REDUCTION:
1095 case OMP_CLAUSE_IN_REDUCTION:
1096 decl = OMP_CLAUSE_DECL (c);
1097 if (TREE_CODE (decl) == MEM_REF)
1098 {
1099 tree t = TREE_OPERAND (decl, 0);
1100 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1101 t = TREE_OPERAND (t, 0);
1102 if (TREE_CODE (t) == INDIRECT_REF
1103 || TREE_CODE (t) == ADDR_EXPR)
1104 t = TREE_OPERAND (t, 0);
1105 install_var_local (t, ctx);
1106 if (is_taskreg_ctx (ctx)
1107 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1108 || (is_task_ctx (ctx)
1109 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1110 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1111 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1112 == POINTER_TYPE)))))
1113 && !is_variable_sized (t)
1114 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1115 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1116 && !is_task_ctx (ctx))))
1117 {
1118 by_ref = use_pointer_for_field (t, NULL);
1119 if (is_task_ctx (ctx)
1120 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1121 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1122 {
1123 install_var_field (t, false, 1, ctx);
1124 install_var_field (t, by_ref, 2, ctx);
1125 }
1126 else
1127 install_var_field (t, by_ref, 3, ctx);
1128 }
1129 break;
1130 }
1131 if (is_task_ctx (ctx)
1132 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1133 && OMP_CLAUSE_REDUCTION_TASK (c)
1134 && is_parallel_ctx (ctx)))
1135 {
1136 /* Global variables don't need to be copied,
1137 the receiver side will use them directly. */
1138 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1139 {
1140 by_ref = use_pointer_for_field (decl, ctx);
1141 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1142 install_var_field (decl, by_ref, 3, ctx);
1143 }
1144 install_var_local (decl, ctx);
1145 break;
1146 }
1147 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1148 && OMP_CLAUSE_REDUCTION_TASK (c))
1149 {
1150 install_var_local (decl, ctx);
1151 break;
1152 }
1153 goto do_private;
1154
1155 case OMP_CLAUSE_LASTPRIVATE:
1156 /* Let the corresponding firstprivate clause create
1157 the variable. */
1158 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1159 break;
1160 /* FALLTHRU */
1161
1162 case OMP_CLAUSE_FIRSTPRIVATE:
1163 case OMP_CLAUSE_LINEAR:
1164 decl = OMP_CLAUSE_DECL (c);
1165 do_private:
1166 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1167 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1168 && is_gimple_omp_offloaded (ctx->stmt))
1169 {
1170 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1171 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1172 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1173 install_var_field (decl, true, 3, ctx);
1174 else
1175 install_var_field (decl, false, 3, ctx);
1176 }
1177 if (is_variable_sized (decl))
1178 {
1179 if (is_task_ctx (ctx))
1180 install_var_field (decl, false, 1, ctx);
1181 break;
1182 }
1183 else if (is_taskreg_ctx (ctx))
1184 {
1185 bool global
1186 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1187 by_ref = use_pointer_for_field (decl, NULL);
1188
1189 if (is_task_ctx (ctx)
1190 && (global || by_ref || omp_is_reference (decl)))
1191 {
1192 install_var_field (decl, false, 1, ctx);
1193 if (!global)
1194 install_var_field (decl, by_ref, 2, ctx);
1195 }
1196 else if (!global)
1197 install_var_field (decl, by_ref, 3, ctx);
1198 }
1199 install_var_local (decl, ctx);
1200 break;
1201
1202 case OMP_CLAUSE_USE_DEVICE_PTR:
1203 decl = OMP_CLAUSE_DECL (c);
1204 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1205 install_var_field (decl, true, 3, ctx);
1206 else
1207 install_var_field (decl, false, 3, ctx);
1208 if (DECL_SIZE (decl)
1209 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1210 {
1211 tree decl2 = DECL_VALUE_EXPR (decl);
1212 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1213 decl2 = TREE_OPERAND (decl2, 0);
1214 gcc_assert (DECL_P (decl2));
1215 install_var_local (decl2, ctx);
1216 }
1217 install_var_local (decl, ctx);
1218 break;
1219
1220 case OMP_CLAUSE_IS_DEVICE_PTR:
1221 decl = OMP_CLAUSE_DECL (c);
1222 goto do_private;
1223
1224 case OMP_CLAUSE__LOOPTEMP_:
1225 case OMP_CLAUSE__REDUCTEMP_:
1226 gcc_assert (is_taskreg_ctx (ctx));
1227 decl = OMP_CLAUSE_DECL (c);
1228 install_var_field (decl, false, 3, ctx);
1229 install_var_local (decl, ctx);
1230 break;
1231
1232 case OMP_CLAUSE_COPYPRIVATE:
1233 case OMP_CLAUSE_COPYIN:
1234 decl = OMP_CLAUSE_DECL (c);
1235 by_ref = use_pointer_for_field (decl, NULL);
1236 install_var_field (decl, by_ref, 3, ctx);
1237 break;
1238
1239 case OMP_CLAUSE_FINAL:
1240 case OMP_CLAUSE_IF:
1241 case OMP_CLAUSE_NUM_THREADS:
1242 case OMP_CLAUSE_NUM_TEAMS:
1243 case OMP_CLAUSE_THREAD_LIMIT:
1244 case OMP_CLAUSE_DEVICE:
1245 case OMP_CLAUSE_SCHEDULE:
1246 case OMP_CLAUSE_DIST_SCHEDULE:
1247 case OMP_CLAUSE_DEPEND:
1248 case OMP_CLAUSE_PRIORITY:
1249 case OMP_CLAUSE_GRAINSIZE:
1250 case OMP_CLAUSE_NUM_TASKS:
1251 case OMP_CLAUSE_NUM_GANGS:
1252 case OMP_CLAUSE_NUM_WORKERS:
1253 case OMP_CLAUSE_VECTOR_LENGTH:
1254 if (ctx->outer)
1255 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1256 break;
1257
1258 case OMP_CLAUSE_TO:
1259 case OMP_CLAUSE_FROM:
1260 case OMP_CLAUSE_MAP:
1261 if (ctx->outer)
1262 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1263 decl = OMP_CLAUSE_DECL (c);
1264 /* Global variables with "omp declare target" attribute
1265 don't need to be copied, the receiver side will use them
1266 directly. However, global variables with "omp declare target link"
1267 attribute need to be copied. Or when ALWAYS modifier is used. */
1268 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1269 && DECL_P (decl)
1270 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1271 && (OMP_CLAUSE_MAP_KIND (c)
1272 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1273 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1274 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1275 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1276 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1277 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1278 && varpool_node::get_create (decl)->offloadable
1279 && !lookup_attribute ("omp declare target link",
1280 DECL_ATTRIBUTES (decl)))
1281 break;
1282 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1283 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1284 {
1285 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1286 not offloaded; there is nothing to map for those. */
1287 if (!is_gimple_omp_offloaded (ctx->stmt)
1288 && !POINTER_TYPE_P (TREE_TYPE (decl))
1289 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1290 break;
1291 }
1292 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1293 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1294 || (OMP_CLAUSE_MAP_KIND (c)
1295 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1296 {
1297 if (TREE_CODE (decl) == COMPONENT_REF
1298 || (TREE_CODE (decl) == INDIRECT_REF
1299 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1300 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1301 == REFERENCE_TYPE)))
1302 break;
1303 if (DECL_SIZE (decl)
1304 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1305 {
1306 tree decl2 = DECL_VALUE_EXPR (decl);
1307 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1308 decl2 = TREE_OPERAND (decl2, 0);
1309 gcc_assert (DECL_P (decl2));
1310 install_var_local (decl2, ctx);
1311 }
1312 install_var_local (decl, ctx);
1313 break;
1314 }
1315 if (DECL_P (decl))
1316 {
1317 if (DECL_SIZE (decl)
1318 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1319 {
1320 tree decl2 = DECL_VALUE_EXPR (decl);
1321 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1322 decl2 = TREE_OPERAND (decl2, 0);
1323 gcc_assert (DECL_P (decl2));
1324 install_var_field (decl2, true, 3, ctx);
1325 install_var_local (decl2, ctx);
1326 install_var_local (decl, ctx);
1327 }
1328 else
1329 {
1330 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1331 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1332 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_field (decl, true, 7, ctx);
1335 else
1336 install_var_field (decl, true, 3, ctx);
1337 if (is_gimple_omp_offloaded (ctx->stmt)
1338 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1339 install_var_local (decl, ctx);
1340 }
1341 }
1342 else
1343 {
1344 tree base = get_base_address (decl);
1345 tree nc = OMP_CLAUSE_CHAIN (c);
1346 if (DECL_P (base)
1347 && nc != NULL_TREE
1348 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1349 && OMP_CLAUSE_DECL (nc) == base
1350 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1351 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1352 {
1353 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1354 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1355 }
1356 else
1357 {
1358 if (ctx->outer)
1359 {
1360 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1361 decl = OMP_CLAUSE_DECL (c);
1362 }
1363 gcc_assert (!splay_tree_lookup (ctx->field_map,
1364 (splay_tree_key) decl));
1365 tree field
1366 = build_decl (OMP_CLAUSE_LOCATION (c),
1367 FIELD_DECL, NULL_TREE, ptr_type_node);
1368 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1369 insert_field_into_struct (ctx->record_type, field);
1370 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1371 (splay_tree_value) field);
1372 }
1373 }
1374 break;
1375
1376 case OMP_CLAUSE__GRIDDIM_:
1377 if (ctx->outer)
1378 {
1379 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1380 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1381 }
1382 break;
1383
1384 case OMP_CLAUSE_NOWAIT:
1385 case OMP_CLAUSE_ORDERED:
1386 case OMP_CLAUSE_COLLAPSE:
1387 case OMP_CLAUSE_UNTIED:
1388 case OMP_CLAUSE_MERGEABLE:
1389 case OMP_CLAUSE_PROC_BIND:
1390 case OMP_CLAUSE_SAFELEN:
1391 case OMP_CLAUSE_SIMDLEN:
1392 case OMP_CLAUSE_THREADS:
1393 case OMP_CLAUSE_SIMD:
1394 case OMP_CLAUSE_NOGROUP:
1395 case OMP_CLAUSE_DEFAULTMAP:
1396 case OMP_CLAUSE_ASYNC:
1397 case OMP_CLAUSE_WAIT:
1398 case OMP_CLAUSE_GANG:
1399 case OMP_CLAUSE_WORKER:
1400 case OMP_CLAUSE_VECTOR:
1401 case OMP_CLAUSE_INDEPENDENT:
1402 case OMP_CLAUSE_AUTO:
1403 case OMP_CLAUSE_SEQ:
1404 case OMP_CLAUSE_TILE:
1405 case OMP_CLAUSE__SIMT_:
1406 case OMP_CLAUSE_DEFAULT:
1407 case OMP_CLAUSE_NONTEMPORAL:
1408 case OMP_CLAUSE_IF_PRESENT:
1409 case OMP_CLAUSE_FINALIZE:
1410 case OMP_CLAUSE_TASK_REDUCTION:
1411 break;
1412
1413 case OMP_CLAUSE_ALIGNED:
1414 decl = OMP_CLAUSE_DECL (c);
1415 if (is_global_var (decl)
1416 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1417 install_var_local (decl, ctx);
1418 break;
1419
1420 case OMP_CLAUSE__CONDTEMP_:
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (is_parallel_ctx (ctx))
1423 {
1424 install_var_field (decl, false, 3, ctx);
1425 install_var_local (decl, ctx);
1426 }
1427 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1428 && (gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
1429 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1430 install_var_local (decl, ctx);
1431 break;
1432
1433 case OMP_CLAUSE__CACHE_:
1434 default:
1435 gcc_unreachable ();
1436 }
1437 }
1438
1439 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1440 {
1441 switch (OMP_CLAUSE_CODE (c))
1442 {
1443 case OMP_CLAUSE_LASTPRIVATE:
1444 /* Let the corresponding firstprivate clause create
1445 the variable. */
1446 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1447 scan_array_reductions = true;
1448 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1449 break;
1450 /* FALLTHRU */
1451
1452 case OMP_CLAUSE_FIRSTPRIVATE:
1453 case OMP_CLAUSE_PRIVATE:
1454 case OMP_CLAUSE_LINEAR:
1455 case OMP_CLAUSE_IS_DEVICE_PTR:
1456 decl = OMP_CLAUSE_DECL (c);
1457 if (is_variable_sized (decl))
1458 {
1459 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1460 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1461 && is_gimple_omp_offloaded (ctx->stmt))
1462 {
1463 tree decl2 = DECL_VALUE_EXPR (decl);
1464 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1465 decl2 = TREE_OPERAND (decl2, 0);
1466 gcc_assert (DECL_P (decl2));
1467 install_var_local (decl2, ctx);
1468 fixup_remapped_decl (decl2, ctx, false);
1469 }
1470 install_var_local (decl, ctx);
1471 }
1472 fixup_remapped_decl (decl, ctx,
1473 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1474 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1475 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1476 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1477 scan_array_reductions = true;
1478 break;
1479
1480 case OMP_CLAUSE_REDUCTION:
1481 case OMP_CLAUSE_IN_REDUCTION:
1482 decl = OMP_CLAUSE_DECL (c);
1483 if (TREE_CODE (decl) != MEM_REF)
1484 {
1485 if (is_variable_sized (decl))
1486 install_var_local (decl, ctx);
1487 fixup_remapped_decl (decl, ctx, false);
1488 }
1489 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1490 scan_array_reductions = true;
1491 break;
1492
1493 case OMP_CLAUSE_TASK_REDUCTION:
1494 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1495 scan_array_reductions = true;
1496 break;
1497
1498 case OMP_CLAUSE_SHARED:
1499 /* Ignore shared directives in teams construct inside of
1500 target construct. */
1501 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1502 && !is_host_teams_ctx (ctx))
1503 break;
1504 decl = OMP_CLAUSE_DECL (c);
1505 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1506 break;
1507 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1508 {
1509 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1510 ctx->outer)))
1511 break;
1512 bool by_ref = use_pointer_for_field (decl, ctx);
1513 install_var_field (decl, by_ref, 11, ctx);
1514 break;
1515 }
1516 fixup_remapped_decl (decl, ctx, false);
1517 break;
1518
1519 case OMP_CLAUSE_MAP:
1520 if (!is_gimple_omp_offloaded (ctx->stmt))
1521 break;
1522 decl = OMP_CLAUSE_DECL (c);
1523 if (DECL_P (decl)
1524 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1525 && (OMP_CLAUSE_MAP_KIND (c)
1526 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1527 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1528 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1529 && varpool_node::get_create (decl)->offloadable)
1530 break;
1531 if (DECL_P (decl))
1532 {
1533 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1534 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1535 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1536 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1537 {
1538 tree new_decl = lookup_decl (decl, ctx);
1539 TREE_TYPE (new_decl)
1540 = remap_type (TREE_TYPE (decl), &ctx->cb);
1541 }
1542 else if (DECL_SIZE (decl)
1543 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1544 {
1545 tree decl2 = DECL_VALUE_EXPR (decl);
1546 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1547 decl2 = TREE_OPERAND (decl2, 0);
1548 gcc_assert (DECL_P (decl2));
1549 fixup_remapped_decl (decl2, ctx, false);
1550 fixup_remapped_decl (decl, ctx, true);
1551 }
1552 else
1553 fixup_remapped_decl (decl, ctx, false);
1554 }
1555 break;
1556
1557 case OMP_CLAUSE_COPYPRIVATE:
1558 case OMP_CLAUSE_COPYIN:
1559 case OMP_CLAUSE_DEFAULT:
1560 case OMP_CLAUSE_IF:
1561 case OMP_CLAUSE_NUM_THREADS:
1562 case OMP_CLAUSE_NUM_TEAMS:
1563 case OMP_CLAUSE_THREAD_LIMIT:
1564 case OMP_CLAUSE_DEVICE:
1565 case OMP_CLAUSE_SCHEDULE:
1566 case OMP_CLAUSE_DIST_SCHEDULE:
1567 case OMP_CLAUSE_NOWAIT:
1568 case OMP_CLAUSE_ORDERED:
1569 case OMP_CLAUSE_COLLAPSE:
1570 case OMP_CLAUSE_UNTIED:
1571 case OMP_CLAUSE_FINAL:
1572 case OMP_CLAUSE_MERGEABLE:
1573 case OMP_CLAUSE_PROC_BIND:
1574 case OMP_CLAUSE_SAFELEN:
1575 case OMP_CLAUSE_SIMDLEN:
1576 case OMP_CLAUSE_ALIGNED:
1577 case OMP_CLAUSE_DEPEND:
1578 case OMP_CLAUSE__LOOPTEMP_:
1579 case OMP_CLAUSE__REDUCTEMP_:
1580 case OMP_CLAUSE_TO:
1581 case OMP_CLAUSE_FROM:
1582 case OMP_CLAUSE_PRIORITY:
1583 case OMP_CLAUSE_GRAINSIZE:
1584 case OMP_CLAUSE_NUM_TASKS:
1585 case OMP_CLAUSE_THREADS:
1586 case OMP_CLAUSE_SIMD:
1587 case OMP_CLAUSE_NOGROUP:
1588 case OMP_CLAUSE_DEFAULTMAP:
1589 case OMP_CLAUSE_USE_DEVICE_PTR:
1590 case OMP_CLAUSE_NONTEMPORAL:
1591 case OMP_CLAUSE_ASYNC:
1592 case OMP_CLAUSE_WAIT:
1593 case OMP_CLAUSE_NUM_GANGS:
1594 case OMP_CLAUSE_NUM_WORKERS:
1595 case OMP_CLAUSE_VECTOR_LENGTH:
1596 case OMP_CLAUSE_GANG:
1597 case OMP_CLAUSE_WORKER:
1598 case OMP_CLAUSE_VECTOR:
1599 case OMP_CLAUSE_INDEPENDENT:
1600 case OMP_CLAUSE_AUTO:
1601 case OMP_CLAUSE_SEQ:
1602 case OMP_CLAUSE_TILE:
1603 case OMP_CLAUSE__GRIDDIM_:
1604 case OMP_CLAUSE__SIMT_:
1605 case OMP_CLAUSE_IF_PRESENT:
1606 case OMP_CLAUSE_FINALIZE:
1607 case OMP_CLAUSE__CONDTEMP_:
1608 break;
1609
1610 case OMP_CLAUSE__CACHE_:
1611 default:
1612 gcc_unreachable ();
1613 }
1614 }
1615
1616 gcc_checking_assert (!scan_array_reductions
1617 || !is_gimple_omp_oacc (ctx->stmt));
1618 if (scan_array_reductions)
1619 {
1620 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1621 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1622 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1623 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1624 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1625 {
1626 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1627 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1628 }
1629 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1630 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1631 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1632 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1633 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1634 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1635 }
1636 }
1637
1638 /* Create a new name for omp child function. Returns an identifier. */
1639
1640 static tree
1641 create_omp_child_function_name (bool task_copy)
1642 {
1643 return clone_function_name_numbered (current_function_decl,
1644 task_copy ? "_omp_cpyfn" : "_omp_fn");
1645 }
1646
1647 /* Return true if CTX may belong to offloaded code: either if current function
1648 is offloaded, or any enclosing context corresponds to a target region. */
1649
1650 static bool
1651 omp_maybe_offloaded_ctx (omp_context *ctx)
1652 {
1653 if (cgraph_node::get (current_function_decl)->offloadable)
1654 return true;
1655 for (; ctx; ctx = ctx->outer)
1656 if (is_gimple_omp_offloaded (ctx->stmt))
1657 return true;
1658 return false;
1659 }
1660
1661 /* Build a decl for the omp child function. It'll not contain a body
1662 yet, just the bare decl. */
1663
1664 static void
1665 create_omp_child_function (omp_context *ctx, bool task_copy)
1666 {
1667 tree decl, type, name, t;
1668
1669 name = create_omp_child_function_name (task_copy);
1670 if (task_copy)
1671 type = build_function_type_list (void_type_node, ptr_type_node,
1672 ptr_type_node, NULL_TREE);
1673 else
1674 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1675
1676 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1677
1678 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1679 || !task_copy);
1680 if (!task_copy)
1681 ctx->cb.dst_fn = decl;
1682 else
1683 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1684
1685 TREE_STATIC (decl) = 1;
1686 TREE_USED (decl) = 1;
1687 DECL_ARTIFICIAL (decl) = 1;
1688 DECL_IGNORED_P (decl) = 0;
1689 TREE_PUBLIC (decl) = 0;
1690 DECL_UNINLINABLE (decl) = 1;
1691 DECL_EXTERNAL (decl) = 0;
1692 DECL_CONTEXT (decl) = NULL_TREE;
1693 DECL_INITIAL (decl) = make_node (BLOCK);
1694 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1695 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1696 /* Remove omp declare simd attribute from the new attributes. */
1697 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1698 {
1699 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1700 a = a2;
1701 a = TREE_CHAIN (a);
1702 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1703 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1704 *p = TREE_CHAIN (*p);
1705 else
1706 {
1707 tree chain = TREE_CHAIN (*p);
1708 *p = copy_node (*p);
1709 p = &TREE_CHAIN (*p);
1710 *p = chain;
1711 }
1712 }
1713 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1714 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1715 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1716 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1717 DECL_FUNCTION_VERSIONED (decl)
1718 = DECL_FUNCTION_VERSIONED (current_function_decl);
1719
1720 if (omp_maybe_offloaded_ctx (ctx))
1721 {
1722 cgraph_node::get_create (decl)->offloadable = 1;
1723 if (ENABLE_OFFLOADING)
1724 g->have_offload = true;
1725 }
1726
1727 if (cgraph_node::get_create (decl)->offloadable
1728 && !lookup_attribute ("omp declare target",
1729 DECL_ATTRIBUTES (current_function_decl)))
1730 {
1731 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1732 ? "omp target entrypoint"
1733 : "omp declare target");
1734 DECL_ATTRIBUTES (decl)
1735 = tree_cons (get_identifier (target_attr),
1736 NULL_TREE, DECL_ATTRIBUTES (decl));
1737 }
1738
1739 t = build_decl (DECL_SOURCE_LOCATION (decl),
1740 RESULT_DECL, NULL_TREE, void_type_node);
1741 DECL_ARTIFICIAL (t) = 1;
1742 DECL_IGNORED_P (t) = 1;
1743 DECL_CONTEXT (t) = decl;
1744 DECL_RESULT (decl) = t;
1745
1746 tree data_name = get_identifier (".omp_data_i");
1747 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1748 ptr_type_node);
1749 DECL_ARTIFICIAL (t) = 1;
1750 DECL_NAMELESS (t) = 1;
1751 DECL_ARG_TYPE (t) = ptr_type_node;
1752 DECL_CONTEXT (t) = current_function_decl;
1753 TREE_USED (t) = 1;
1754 TREE_READONLY (t) = 1;
1755 DECL_ARGUMENTS (decl) = t;
1756 if (!task_copy)
1757 ctx->receiver_decl = t;
1758 else
1759 {
1760 t = build_decl (DECL_SOURCE_LOCATION (decl),
1761 PARM_DECL, get_identifier (".omp_data_o"),
1762 ptr_type_node);
1763 DECL_ARTIFICIAL (t) = 1;
1764 DECL_NAMELESS (t) = 1;
1765 DECL_ARG_TYPE (t) = ptr_type_node;
1766 DECL_CONTEXT (t) = current_function_decl;
1767 TREE_USED (t) = 1;
1768 TREE_ADDRESSABLE (t) = 1;
1769 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1770 DECL_ARGUMENTS (decl) = t;
1771 }
1772
1773 /* Allocate memory for the function structure. The call to
1774 allocate_struct_function clobbers CFUN, so we need to restore
1775 it afterward. */
1776 push_struct_function (decl);
1777 cfun->function_end_locus = gimple_location (ctx->stmt);
1778 init_tree_ssa (cfun);
1779 pop_cfun ();
1780 }
1781
1782 /* Callback for walk_gimple_seq. Check if combined parallel
1783 contains gimple_omp_for_combined_into_p OMP_FOR. */
1784
1785 tree
1786 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1787 bool *handled_ops_p,
1788 struct walk_stmt_info *wi)
1789 {
1790 gimple *stmt = gsi_stmt (*gsi_p);
1791
1792 *handled_ops_p = true;
1793 switch (gimple_code (stmt))
1794 {
1795 WALK_SUBSTMTS;
1796
1797 case GIMPLE_OMP_FOR:
1798 if (gimple_omp_for_combined_into_p (stmt)
1799 && gimple_omp_for_kind (stmt)
1800 == *(const enum gf_mask *) (wi->info))
1801 {
1802 wi->info = stmt;
1803 return integer_zero_node;
1804 }
1805 break;
1806 default:
1807 break;
1808 }
1809 return NULL;
1810 }
1811
1812 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1813
1814 static void
1815 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1816 omp_context *outer_ctx)
1817 {
1818 struct walk_stmt_info wi;
1819
1820 memset (&wi, 0, sizeof (wi));
1821 wi.val_only = true;
1822 wi.info = (void *) &msk;
1823 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1824 if (wi.info != (void *) &msk)
1825 {
1826 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1827 struct omp_for_data fd;
1828 omp_extract_for_data (for_stmt, &fd, NULL);
1829 /* We need two temporaries with fd.loop.v type (istart/iend)
1830 and then (fd.collapse - 1) temporaries with the same
1831 type for count2 ... countN-1 vars if not constant. */
1832 size_t count = 2, i;
1833 tree type = fd.iter_type;
1834 if (fd.collapse > 1
1835 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1836 {
1837 count += fd.collapse - 1;
1838 /* If there are lastprivate clauses on the inner
1839 GIMPLE_OMP_FOR, add one more temporaries for the total number
1840 of iterations (product of count1 ... countN-1). */
1841 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1842 OMP_CLAUSE_LASTPRIVATE))
1843 count++;
1844 else if (msk == GF_OMP_FOR_KIND_FOR
1845 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1846 OMP_CLAUSE_LASTPRIVATE))
1847 count++;
1848 }
1849 for (i = 0; i < count; i++)
1850 {
1851 tree temp = create_tmp_var (type);
1852 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1853 insert_decl_map (&outer_ctx->cb, temp, temp);
1854 OMP_CLAUSE_DECL (c) = temp;
1855 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1856 gimple_omp_taskreg_set_clauses (stmt, c);
1857 }
1858 }
1859 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1860 && omp_find_clause (gimple_omp_task_clauses (stmt),
1861 OMP_CLAUSE_REDUCTION))
1862 {
1863 tree type = build_pointer_type (pointer_sized_int_node);
1864 tree temp = create_tmp_var (type);
1865 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1866 insert_decl_map (&outer_ctx->cb, temp, temp);
1867 OMP_CLAUSE_DECL (c) = temp;
1868 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1869 gimple_omp_task_set_clauses (stmt, c);
1870 }
1871 }
1872
1873 /* Scan an OpenMP parallel directive. */
1874
1875 static void
1876 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1877 {
1878 omp_context *ctx;
1879 tree name;
1880 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1881
1882 /* Ignore parallel directives with empty bodies, unless there
1883 are copyin clauses. */
1884 if (optimize > 0
1885 && empty_body_p (gimple_omp_body (stmt))
1886 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1887 OMP_CLAUSE_COPYIN) == NULL)
1888 {
1889 gsi_replace (gsi, gimple_build_nop (), false);
1890 return;
1891 }
1892
1893 if (gimple_omp_parallel_combined_p (stmt))
1894 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1895 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1896 OMP_CLAUSE_REDUCTION);
1897 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1898 if (OMP_CLAUSE_REDUCTION_TASK (c))
1899 {
1900 tree type = build_pointer_type (pointer_sized_int_node);
1901 tree temp = create_tmp_var (type);
1902 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1903 if (outer_ctx)
1904 insert_decl_map (&outer_ctx->cb, temp, temp);
1905 OMP_CLAUSE_DECL (c) = temp;
1906 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1907 gimple_omp_parallel_set_clauses (stmt, c);
1908 break;
1909 }
1910 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1911 break;
1912
1913 ctx = new_omp_context (stmt, outer_ctx);
1914 taskreg_contexts.safe_push (ctx);
1915 if (taskreg_nesting_level > 1)
1916 ctx->is_nested = true;
1917 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1918 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1919 name = create_tmp_var_name (".omp_data_s");
1920 name = build_decl (gimple_location (stmt),
1921 TYPE_DECL, name, ctx->record_type);
1922 DECL_ARTIFICIAL (name) = 1;
1923 DECL_NAMELESS (name) = 1;
1924 TYPE_NAME (ctx->record_type) = name;
1925 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1926 if (!gimple_omp_parallel_grid_phony (stmt))
1927 {
1928 create_omp_child_function (ctx, false);
1929 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1930 }
1931
1932 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1933 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1934
1935 if (TYPE_FIELDS (ctx->record_type) == NULL)
1936 ctx->record_type = ctx->receiver_decl = NULL;
1937 }
1938
1939 /* Scan an OpenMP task directive. */
1940
1941 static void
1942 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1943 {
1944 omp_context *ctx;
1945 tree name, t;
1946 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1947
1948 /* Ignore task directives with empty bodies, unless they have depend
1949 clause. */
1950 if (optimize > 0
1951 && gimple_omp_body (stmt)
1952 && empty_body_p (gimple_omp_body (stmt))
1953 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1954 {
1955 gsi_replace (gsi, gimple_build_nop (), false);
1956 return;
1957 }
1958
1959 if (gimple_omp_task_taskloop_p (stmt))
1960 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1961
1962 ctx = new_omp_context (stmt, outer_ctx);
1963
1964 if (gimple_omp_task_taskwait_p (stmt))
1965 {
1966 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1967 return;
1968 }
1969
1970 taskreg_contexts.safe_push (ctx);
1971 if (taskreg_nesting_level > 1)
1972 ctx->is_nested = true;
1973 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1974 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1975 name = create_tmp_var_name (".omp_data_s");
1976 name = build_decl (gimple_location (stmt),
1977 TYPE_DECL, name, ctx->record_type);
1978 DECL_ARTIFICIAL (name) = 1;
1979 DECL_NAMELESS (name) = 1;
1980 TYPE_NAME (ctx->record_type) = name;
1981 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1982 create_omp_child_function (ctx, false);
1983 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1984
1985 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1986
1987 if (ctx->srecord_type)
1988 {
1989 name = create_tmp_var_name (".omp_data_a");
1990 name = build_decl (gimple_location (stmt),
1991 TYPE_DECL, name, ctx->srecord_type);
1992 DECL_ARTIFICIAL (name) = 1;
1993 DECL_NAMELESS (name) = 1;
1994 TYPE_NAME (ctx->srecord_type) = name;
1995 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1996 create_omp_child_function (ctx, true);
1997 }
1998
1999 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2000
2001 if (TYPE_FIELDS (ctx->record_type) == NULL)
2002 {
2003 ctx->record_type = ctx->receiver_decl = NULL;
2004 t = build_int_cst (long_integer_type_node, 0);
2005 gimple_omp_task_set_arg_size (stmt, t);
2006 t = build_int_cst (long_integer_type_node, 1);
2007 gimple_omp_task_set_arg_align (stmt, t);
2008 }
2009 }
2010
2011 /* Helper function for finish_taskreg_scan, called through walk_tree.
2012 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2013 tree, replace it in the expression. */
2014
2015 static tree
2016 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2017 {
2018 if (VAR_P (*tp))
2019 {
2020 omp_context *ctx = (omp_context *) data;
2021 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2022 if (t != *tp)
2023 {
2024 if (DECL_HAS_VALUE_EXPR_P (t))
2025 t = unshare_expr (DECL_VALUE_EXPR (t));
2026 *tp = t;
2027 }
2028 *walk_subtrees = 0;
2029 }
2030 else if (IS_TYPE_OR_DECL_P (*tp))
2031 *walk_subtrees = 0;
2032 return NULL_TREE;
2033 }
2034
2035 /* If any decls have been made addressable during scan_omp,
2036 adjust their fields if needed, and layout record types
2037 of parallel/task constructs. */
2038
2039 static void
2040 finish_taskreg_scan (omp_context *ctx)
2041 {
2042 if (ctx->record_type == NULL_TREE)
2043 return;
2044
2045 /* If any task_shared_vars were needed, verify all
2046 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2047 statements if use_pointer_for_field hasn't changed
2048 because of that. If it did, update field types now. */
2049 if (task_shared_vars)
2050 {
2051 tree c;
2052
2053 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2054 c; c = OMP_CLAUSE_CHAIN (c))
2055 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2056 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2057 {
2058 tree decl = OMP_CLAUSE_DECL (c);
2059
2060 /* Global variables don't need to be copied,
2061 the receiver side will use them directly. */
2062 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2063 continue;
2064 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2065 || !use_pointer_for_field (decl, ctx))
2066 continue;
2067 tree field = lookup_field (decl, ctx);
2068 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2069 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2070 continue;
2071 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2072 TREE_THIS_VOLATILE (field) = 0;
2073 DECL_USER_ALIGN (field) = 0;
2074 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2075 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2076 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2077 if (ctx->srecord_type)
2078 {
2079 tree sfield = lookup_sfield (decl, ctx);
2080 TREE_TYPE (sfield) = TREE_TYPE (field);
2081 TREE_THIS_VOLATILE (sfield) = 0;
2082 DECL_USER_ALIGN (sfield) = 0;
2083 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2084 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2085 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2086 }
2087 }
2088 }
2089
2090 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2091 {
2092 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2093 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2094 if (c)
2095 {
2096 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2097 expects to find it at the start of data. */
2098 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2099 tree *p = &TYPE_FIELDS (ctx->record_type);
2100 while (*p)
2101 if (*p == f)
2102 {
2103 *p = DECL_CHAIN (*p);
2104 break;
2105 }
2106 else
2107 p = &DECL_CHAIN (*p);
2108 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2109 TYPE_FIELDS (ctx->record_type) = f;
2110 }
2111 layout_type (ctx->record_type);
2112 fixup_child_record_type (ctx);
2113 }
2114 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2115 {
2116 layout_type (ctx->record_type);
2117 fixup_child_record_type (ctx);
2118 }
2119 else
2120 {
2121 location_t loc = gimple_location (ctx->stmt);
2122 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2123 /* Move VLA fields to the end. */
2124 p = &TYPE_FIELDS (ctx->record_type);
2125 while (*p)
2126 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2127 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2128 {
2129 *q = *p;
2130 *p = TREE_CHAIN (*p);
2131 TREE_CHAIN (*q) = NULL_TREE;
2132 q = &TREE_CHAIN (*q);
2133 }
2134 else
2135 p = &DECL_CHAIN (*p);
2136 *p = vla_fields;
2137 if (gimple_omp_task_taskloop_p (ctx->stmt))
2138 {
2139 /* Move fields corresponding to first and second _looptemp_
2140 clause first. There are filled by GOMP_taskloop
2141 and thus need to be in specific positions. */
2142 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2143 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2144 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2145 OMP_CLAUSE__LOOPTEMP_);
2146 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2147 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2148 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2149 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2150 p = &TYPE_FIELDS (ctx->record_type);
2151 while (*p)
2152 if (*p == f1 || *p == f2 || *p == f3)
2153 *p = DECL_CHAIN (*p);
2154 else
2155 p = &DECL_CHAIN (*p);
2156 DECL_CHAIN (f1) = f2;
2157 if (c3)
2158 {
2159 DECL_CHAIN (f2) = f3;
2160 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2161 }
2162 else
2163 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2164 TYPE_FIELDS (ctx->record_type) = f1;
2165 if (ctx->srecord_type)
2166 {
2167 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2168 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2169 if (c3)
2170 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2171 p = &TYPE_FIELDS (ctx->srecord_type);
2172 while (*p)
2173 if (*p == f1 || *p == f2 || *p == f3)
2174 *p = DECL_CHAIN (*p);
2175 else
2176 p = &DECL_CHAIN (*p);
2177 DECL_CHAIN (f1) = f2;
2178 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2179 if (c3)
2180 {
2181 DECL_CHAIN (f2) = f3;
2182 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2183 }
2184 else
2185 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2186 TYPE_FIELDS (ctx->srecord_type) = f1;
2187 }
2188 }
2189 layout_type (ctx->record_type);
2190 fixup_child_record_type (ctx);
2191 if (ctx->srecord_type)
2192 layout_type (ctx->srecord_type);
2193 tree t = fold_convert_loc (loc, long_integer_type_node,
2194 TYPE_SIZE_UNIT (ctx->record_type));
2195 if (TREE_CODE (t) != INTEGER_CST)
2196 {
2197 t = unshare_expr (t);
2198 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2199 }
2200 gimple_omp_task_set_arg_size (ctx->stmt, t);
2201 t = build_int_cst (long_integer_type_node,
2202 TYPE_ALIGN_UNIT (ctx->record_type));
2203 gimple_omp_task_set_arg_align (ctx->stmt, t);
2204 }
2205 }
2206
2207 /* Find the enclosing offload context. */
2208
2209 static omp_context *
2210 enclosing_target_ctx (omp_context *ctx)
2211 {
2212 for (; ctx; ctx = ctx->outer)
2213 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2214 break;
2215
2216 return ctx;
2217 }
2218
2219 /* Return true if ctx is part of an oacc kernels region. */
2220
2221 static bool
2222 ctx_in_oacc_kernels_region (omp_context *ctx)
2223 {
2224 for (;ctx != NULL; ctx = ctx->outer)
2225 {
2226 gimple *stmt = ctx->stmt;
2227 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2228 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2229 return true;
2230 }
2231
2232 return false;
2233 }
2234
2235 /* Check the parallelism clauses inside a kernels regions.
2236 Until kernels handling moves to use the same loop indirection
2237 scheme as parallel, we need to do this checking early. */
2238
2239 static unsigned
2240 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2241 {
2242 bool checking = true;
2243 unsigned outer_mask = 0;
2244 unsigned this_mask = 0;
2245 bool has_seq = false, has_auto = false;
2246
2247 if (ctx->outer)
2248 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2249 if (!stmt)
2250 {
2251 checking = false;
2252 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2253 return outer_mask;
2254 stmt = as_a <gomp_for *> (ctx->stmt);
2255 }
2256
2257 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2258 {
2259 switch (OMP_CLAUSE_CODE (c))
2260 {
2261 case OMP_CLAUSE_GANG:
2262 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2263 break;
2264 case OMP_CLAUSE_WORKER:
2265 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2266 break;
2267 case OMP_CLAUSE_VECTOR:
2268 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2269 break;
2270 case OMP_CLAUSE_SEQ:
2271 has_seq = true;
2272 break;
2273 case OMP_CLAUSE_AUTO:
2274 has_auto = true;
2275 break;
2276 default:
2277 break;
2278 }
2279 }
2280
2281 if (checking)
2282 {
2283 if (has_seq && (this_mask || has_auto))
2284 error_at (gimple_location (stmt), "%<seq%> overrides other"
2285 " OpenACC loop specifiers");
2286 else if (has_auto && this_mask)
2287 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2288 " OpenACC loop specifiers");
2289
2290 if (this_mask & outer_mask)
2291 error_at (gimple_location (stmt), "inner loop uses same"
2292 " OpenACC parallelism as containing loop");
2293 }
2294
2295 return outer_mask | this_mask;
2296 }
2297
2298 /* Scan a GIMPLE_OMP_FOR. */
2299
2300 static omp_context *
2301 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2302 {
2303 omp_context *ctx;
2304 size_t i;
2305 tree clauses = gimple_omp_for_clauses (stmt);
2306
2307 ctx = new_omp_context (stmt, outer_ctx);
2308
2309 if (is_gimple_omp_oacc (stmt))
2310 {
2311 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2312
2313 if (!tgt || is_oacc_parallel (tgt))
2314 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2315 {
2316 char const *check = NULL;
2317
2318 switch (OMP_CLAUSE_CODE (c))
2319 {
2320 case OMP_CLAUSE_GANG:
2321 check = "gang";
2322 break;
2323
2324 case OMP_CLAUSE_WORKER:
2325 check = "worker";
2326 break;
2327
2328 case OMP_CLAUSE_VECTOR:
2329 check = "vector";
2330 break;
2331
2332 default:
2333 break;
2334 }
2335
2336 if (check && OMP_CLAUSE_OPERAND (c, 0))
2337 error_at (gimple_location (stmt),
2338 "argument not permitted on %qs clause in"
2339 " OpenACC %<parallel%>", check);
2340 }
2341
2342 if (tgt && is_oacc_kernels (tgt))
2343 {
2344 /* Strip out reductions, as they are not handled yet. */
2345 tree *prev_ptr = &clauses;
2346
2347 while (tree probe = *prev_ptr)
2348 {
2349 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2350
2351 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2352 *prev_ptr = *next_ptr;
2353 else
2354 prev_ptr = next_ptr;
2355 }
2356
2357 gimple_omp_for_set_clauses (stmt, clauses);
2358 check_oacc_kernel_gwv (stmt, ctx);
2359 }
2360 }
2361
2362 scan_sharing_clauses (clauses, ctx);
2363
2364 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2365 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2366 {
2367 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2368 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2369 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2370 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2371 }
2372 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2373 return ctx;
2374 }
2375
2376 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2377
2378 static void
2379 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2380 omp_context *outer_ctx)
2381 {
2382 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2383 gsi_replace (gsi, bind, false);
2384 gimple_seq seq = NULL;
2385 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2386 tree cond = create_tmp_var_raw (integer_type_node);
2387 DECL_CONTEXT (cond) = current_function_decl;
2388 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2389 gimple_bind_set_vars (bind, cond);
2390 gimple_call_set_lhs (g, cond);
2391 gimple_seq_add_stmt (&seq, g);
2392 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2393 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2394 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2395 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2396 gimple_seq_add_stmt (&seq, g);
2397 g = gimple_build_label (lab1);
2398 gimple_seq_add_stmt (&seq, g);
2399 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2400 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2401 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2402 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2403 gimple_omp_for_set_clauses (new_stmt, clause);
2404 gimple_seq_add_stmt (&seq, new_stmt);
2405 g = gimple_build_goto (lab3);
2406 gimple_seq_add_stmt (&seq, g);
2407 g = gimple_build_label (lab2);
2408 gimple_seq_add_stmt (&seq, g);
2409 gimple_seq_add_stmt (&seq, stmt);
2410 g = gimple_build_label (lab3);
2411 gimple_seq_add_stmt (&seq, g);
2412 gimple_bind_set_body (bind, seq);
2413 update_stmt (bind);
2414 scan_omp_for (new_stmt, outer_ctx);
2415 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2416 }
2417
2418 /* Scan an OpenMP sections directive. */
2419
2420 static void
2421 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2422 {
2423 omp_context *ctx;
2424
2425 ctx = new_omp_context (stmt, outer_ctx);
2426 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2427 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2428 }
2429
2430 /* Scan an OpenMP single directive. */
2431
2432 static void
2433 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2434 {
2435 omp_context *ctx;
2436 tree name;
2437
2438 ctx = new_omp_context (stmt, outer_ctx);
2439 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2440 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2441 name = create_tmp_var_name (".omp_copy_s");
2442 name = build_decl (gimple_location (stmt),
2443 TYPE_DECL, name, ctx->record_type);
2444 TYPE_NAME (ctx->record_type) = name;
2445
2446 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2447 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2448
2449 if (TYPE_FIELDS (ctx->record_type) == NULL)
2450 ctx->record_type = NULL;
2451 else
2452 layout_type (ctx->record_type);
2453 }
2454
2455 /* Scan a GIMPLE_OMP_TARGET. */
2456
2457 static void
2458 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2459 {
2460 omp_context *ctx;
2461 tree name;
2462 bool offloaded = is_gimple_omp_offloaded (stmt);
2463 tree clauses = gimple_omp_target_clauses (stmt);
2464
2465 ctx = new_omp_context (stmt, outer_ctx);
2466 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2467 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2468 name = create_tmp_var_name (".omp_data_t");
2469 name = build_decl (gimple_location (stmt),
2470 TYPE_DECL, name, ctx->record_type);
2471 DECL_ARTIFICIAL (name) = 1;
2472 DECL_NAMELESS (name) = 1;
2473 TYPE_NAME (ctx->record_type) = name;
2474 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2475
2476 if (offloaded)
2477 {
2478 create_omp_child_function (ctx, false);
2479 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2480 }
2481
2482 scan_sharing_clauses (clauses, ctx);
2483 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2484
2485 if (TYPE_FIELDS (ctx->record_type) == NULL)
2486 ctx->record_type = ctx->receiver_decl = NULL;
2487 else
2488 {
2489 TYPE_FIELDS (ctx->record_type)
2490 = nreverse (TYPE_FIELDS (ctx->record_type));
2491 if (flag_checking)
2492 {
2493 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2494 for (tree field = TYPE_FIELDS (ctx->record_type);
2495 field;
2496 field = DECL_CHAIN (field))
2497 gcc_assert (DECL_ALIGN (field) == align);
2498 }
2499 layout_type (ctx->record_type);
2500 if (offloaded)
2501 fixup_child_record_type (ctx);
2502 }
2503 }
2504
2505 /* Scan an OpenMP teams directive. */
2506
2507 static void
2508 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2509 {
2510 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2511
2512 if (!gimple_omp_teams_host (stmt))
2513 {
2514 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2515 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2516 return;
2517 }
2518 taskreg_contexts.safe_push (ctx);
2519 gcc_assert (taskreg_nesting_level == 1);
2520 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2521 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2522 tree name = create_tmp_var_name (".omp_data_s");
2523 name = build_decl (gimple_location (stmt),
2524 TYPE_DECL, name, ctx->record_type);
2525 DECL_ARTIFICIAL (name) = 1;
2526 DECL_NAMELESS (name) = 1;
2527 TYPE_NAME (ctx->record_type) = name;
2528 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2529 create_omp_child_function (ctx, false);
2530 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2531
2532 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2533 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2534
2535 if (TYPE_FIELDS (ctx->record_type) == NULL)
2536 ctx->record_type = ctx->receiver_decl = NULL;
2537 }
2538
2539 /* Check nesting restrictions. */
2540 static bool
2541 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2542 {
2543 tree c;
2544
2545 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2546 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2547 the original copy of its contents. */
2548 return true;
2549
2550 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2551 inside an OpenACC CTX. */
2552 if (!(is_gimple_omp (stmt)
2553 && is_gimple_omp_oacc (stmt))
2554 /* Except for atomic codes that we share with OpenMP. */
2555 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2556 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2557 {
2558 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2559 {
2560 error_at (gimple_location (stmt),
2561 "non-OpenACC construct inside of OpenACC routine");
2562 return false;
2563 }
2564 else
2565 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2566 if (is_gimple_omp (octx->stmt)
2567 && is_gimple_omp_oacc (octx->stmt))
2568 {
2569 error_at (gimple_location (stmt),
2570 "non-OpenACC construct inside of OpenACC region");
2571 return false;
2572 }
2573 }
2574
2575 if (ctx != NULL)
2576 {
2577 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2578 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2579 {
2580 c = NULL_TREE;
2581 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2582 {
2583 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2584 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2585 {
2586 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2587 && (ctx->outer == NULL
2588 || !gimple_omp_for_combined_into_p (ctx->stmt)
2589 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2590 || (gimple_omp_for_kind (ctx->outer->stmt)
2591 != GF_OMP_FOR_KIND_FOR)
2592 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2593 {
2594 error_at (gimple_location (stmt),
2595 "%<ordered simd threads%> must be closely "
2596 "nested inside of %<for simd%> region");
2597 return false;
2598 }
2599 return true;
2600 }
2601 }
2602 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2603 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
2604 return true;
2605 error_at (gimple_location (stmt),
2606 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2607 " or %<#pragma omp atomic%> may not be nested inside"
2608 " %<simd%> region");
2609 return false;
2610 }
2611 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2612 {
2613 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2614 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2615 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2616 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2617 {
2618 error_at (gimple_location (stmt),
2619 "only %<distribute%> or %<parallel%> regions are "
2620 "allowed to be strictly nested inside %<teams%> "
2621 "region");
2622 return false;
2623 }
2624 }
2625 }
2626 switch (gimple_code (stmt))
2627 {
2628 case GIMPLE_OMP_FOR:
2629 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2630 return true;
2631 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2632 {
2633 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2634 {
2635 error_at (gimple_location (stmt),
2636 "%<distribute%> region must be strictly nested "
2637 "inside %<teams%> construct");
2638 return false;
2639 }
2640 return true;
2641 }
2642 /* We split taskloop into task and nested taskloop in it. */
2643 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2644 return true;
2645 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2646 {
2647 bool ok = false;
2648
2649 if (ctx)
2650 switch (gimple_code (ctx->stmt))
2651 {
2652 case GIMPLE_OMP_FOR:
2653 ok = (gimple_omp_for_kind (ctx->stmt)
2654 == GF_OMP_FOR_KIND_OACC_LOOP);
2655 break;
2656
2657 case GIMPLE_OMP_TARGET:
2658 switch (gimple_omp_target_kind (ctx->stmt))
2659 {
2660 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2661 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2662 ok = true;
2663 break;
2664
2665 default:
2666 break;
2667 }
2668
2669 default:
2670 break;
2671 }
2672 else if (oacc_get_fn_attrib (current_function_decl))
2673 ok = true;
2674 if (!ok)
2675 {
2676 error_at (gimple_location (stmt),
2677 "OpenACC loop directive must be associated with"
2678 " an OpenACC compute region");
2679 return false;
2680 }
2681 }
2682 /* FALLTHRU */
2683 case GIMPLE_CALL:
2684 if (is_gimple_call (stmt)
2685 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2686 == BUILT_IN_GOMP_CANCEL
2687 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2688 == BUILT_IN_GOMP_CANCELLATION_POINT))
2689 {
2690 const char *bad = NULL;
2691 const char *kind = NULL;
2692 const char *construct
2693 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2694 == BUILT_IN_GOMP_CANCEL)
2695 ? "#pragma omp cancel"
2696 : "#pragma omp cancellation point";
2697 if (ctx == NULL)
2698 {
2699 error_at (gimple_location (stmt), "orphaned %qs construct",
2700 construct);
2701 return false;
2702 }
2703 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2704 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2705 : 0)
2706 {
2707 case 1:
2708 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2709 bad = "#pragma omp parallel";
2710 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2711 == BUILT_IN_GOMP_CANCEL
2712 && !integer_zerop (gimple_call_arg (stmt, 1)))
2713 ctx->cancellable = true;
2714 kind = "parallel";
2715 break;
2716 case 2:
2717 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2718 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2719 bad = "#pragma omp for";
2720 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2721 == BUILT_IN_GOMP_CANCEL
2722 && !integer_zerop (gimple_call_arg (stmt, 1)))
2723 {
2724 ctx->cancellable = true;
2725 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2726 OMP_CLAUSE_NOWAIT))
2727 warning_at (gimple_location (stmt), 0,
2728 "%<#pragma omp cancel for%> inside "
2729 "%<nowait%> for construct");
2730 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2731 OMP_CLAUSE_ORDERED))
2732 warning_at (gimple_location (stmt), 0,
2733 "%<#pragma omp cancel for%> inside "
2734 "%<ordered%> for construct");
2735 }
2736 kind = "for";
2737 break;
2738 case 4:
2739 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2740 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2741 bad = "#pragma omp sections";
2742 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2743 == BUILT_IN_GOMP_CANCEL
2744 && !integer_zerop (gimple_call_arg (stmt, 1)))
2745 {
2746 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2747 {
2748 ctx->cancellable = true;
2749 if (omp_find_clause (gimple_omp_sections_clauses
2750 (ctx->stmt),
2751 OMP_CLAUSE_NOWAIT))
2752 warning_at (gimple_location (stmt), 0,
2753 "%<#pragma omp cancel sections%> inside "
2754 "%<nowait%> sections construct");
2755 }
2756 else
2757 {
2758 gcc_assert (ctx->outer
2759 && gimple_code (ctx->outer->stmt)
2760 == GIMPLE_OMP_SECTIONS);
2761 ctx->outer->cancellable = true;
2762 if (omp_find_clause (gimple_omp_sections_clauses
2763 (ctx->outer->stmt),
2764 OMP_CLAUSE_NOWAIT))
2765 warning_at (gimple_location (stmt), 0,
2766 "%<#pragma omp cancel sections%> inside "
2767 "%<nowait%> sections construct");
2768 }
2769 }
2770 kind = "sections";
2771 break;
2772 case 8:
2773 if (!is_task_ctx (ctx)
2774 && (!is_taskloop_ctx (ctx)
2775 || ctx->outer == NULL
2776 || !is_task_ctx (ctx->outer)))
2777 bad = "#pragma omp task";
2778 else
2779 {
2780 for (omp_context *octx = ctx->outer;
2781 octx; octx = octx->outer)
2782 {
2783 switch (gimple_code (octx->stmt))
2784 {
2785 case GIMPLE_OMP_TASKGROUP:
2786 break;
2787 case GIMPLE_OMP_TARGET:
2788 if (gimple_omp_target_kind (octx->stmt)
2789 != GF_OMP_TARGET_KIND_REGION)
2790 continue;
2791 /* FALLTHRU */
2792 case GIMPLE_OMP_PARALLEL:
2793 case GIMPLE_OMP_TEAMS:
2794 error_at (gimple_location (stmt),
2795 "%<%s taskgroup%> construct not closely "
2796 "nested inside of %<taskgroup%> region",
2797 construct);
2798 return false;
2799 case GIMPLE_OMP_TASK:
2800 if (gimple_omp_task_taskloop_p (octx->stmt)
2801 && octx->outer
2802 && is_taskloop_ctx (octx->outer))
2803 {
2804 tree clauses
2805 = gimple_omp_for_clauses (octx->outer->stmt);
2806 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2807 break;
2808 }
2809 continue;
2810 default:
2811 continue;
2812 }
2813 break;
2814 }
2815 ctx->cancellable = true;
2816 }
2817 kind = "taskgroup";
2818 break;
2819 default:
2820 error_at (gimple_location (stmt), "invalid arguments");
2821 return false;
2822 }
2823 if (bad)
2824 {
2825 error_at (gimple_location (stmt),
2826 "%<%s %s%> construct not closely nested inside of %qs",
2827 construct, kind, bad);
2828 return false;
2829 }
2830 }
2831 /* FALLTHRU */
2832 case GIMPLE_OMP_SECTIONS:
2833 case GIMPLE_OMP_SINGLE:
2834 for (; ctx != NULL; ctx = ctx->outer)
2835 switch (gimple_code (ctx->stmt))
2836 {
2837 case GIMPLE_OMP_FOR:
2838 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2839 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2840 break;
2841 /* FALLTHRU */
2842 case GIMPLE_OMP_SECTIONS:
2843 case GIMPLE_OMP_SINGLE:
2844 case GIMPLE_OMP_ORDERED:
2845 case GIMPLE_OMP_MASTER:
2846 case GIMPLE_OMP_TASK:
2847 case GIMPLE_OMP_CRITICAL:
2848 if (is_gimple_call (stmt))
2849 {
2850 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2851 != BUILT_IN_GOMP_BARRIER)
2852 return true;
2853 error_at (gimple_location (stmt),
2854 "barrier region may not be closely nested inside "
2855 "of work-sharing, %<critical%>, %<ordered%>, "
2856 "%<master%>, explicit %<task%> or %<taskloop%> "
2857 "region");
2858 return false;
2859 }
2860 error_at (gimple_location (stmt),
2861 "work-sharing region may not be closely nested inside "
2862 "of work-sharing, %<critical%>, %<ordered%>, "
2863 "%<master%>, explicit %<task%> or %<taskloop%> region");
2864 return false;
2865 case GIMPLE_OMP_PARALLEL:
2866 case GIMPLE_OMP_TEAMS:
2867 return true;
2868 case GIMPLE_OMP_TARGET:
2869 if (gimple_omp_target_kind (ctx->stmt)
2870 == GF_OMP_TARGET_KIND_REGION)
2871 return true;
2872 break;
2873 default:
2874 break;
2875 }
2876 break;
2877 case GIMPLE_OMP_MASTER:
2878 for (; ctx != NULL; ctx = ctx->outer)
2879 switch (gimple_code (ctx->stmt))
2880 {
2881 case GIMPLE_OMP_FOR:
2882 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2883 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2884 break;
2885 /* FALLTHRU */
2886 case GIMPLE_OMP_SECTIONS:
2887 case GIMPLE_OMP_SINGLE:
2888 case GIMPLE_OMP_TASK:
2889 error_at (gimple_location (stmt),
2890 "%<master%> region may not be closely nested inside "
2891 "of work-sharing, explicit %<task%> or %<taskloop%> "
2892 "region");
2893 return false;
2894 case GIMPLE_OMP_PARALLEL:
2895 case GIMPLE_OMP_TEAMS:
2896 return true;
2897 case GIMPLE_OMP_TARGET:
2898 if (gimple_omp_target_kind (ctx->stmt)
2899 == GF_OMP_TARGET_KIND_REGION)
2900 return true;
2901 break;
2902 default:
2903 break;
2904 }
2905 break;
2906 case GIMPLE_OMP_TASK:
2907 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2908 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2909 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2910 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2911 {
2912 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2913 error_at (OMP_CLAUSE_LOCATION (c),
2914 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2915 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2916 return false;
2917 }
2918 break;
2919 case GIMPLE_OMP_ORDERED:
2920 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2921 c; c = OMP_CLAUSE_CHAIN (c))
2922 {
2923 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2924 {
2925 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2926 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2927 continue;
2928 }
2929 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2930 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2931 || kind == OMP_CLAUSE_DEPEND_SINK)
2932 {
2933 tree oclause;
2934 /* Look for containing ordered(N) loop. */
2935 if (ctx == NULL
2936 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2937 || (oclause
2938 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2939 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2940 {
2941 error_at (OMP_CLAUSE_LOCATION (c),
2942 "%<ordered%> construct with %<depend%> clause "
2943 "must be closely nested inside an %<ordered%> "
2944 "loop");
2945 return false;
2946 }
2947 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2948 {
2949 error_at (OMP_CLAUSE_LOCATION (c),
2950 "%<ordered%> construct with %<depend%> clause "
2951 "must be closely nested inside a loop with "
2952 "%<ordered%> clause with a parameter");
2953 return false;
2954 }
2955 }
2956 else
2957 {
2958 error_at (OMP_CLAUSE_LOCATION (c),
2959 "invalid depend kind in omp %<ordered%> %<depend%>");
2960 return false;
2961 }
2962 }
2963 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2964 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2965 {
2966 /* ordered simd must be closely nested inside of simd region,
2967 and simd region must not encounter constructs other than
2968 ordered simd, therefore ordered simd may be either orphaned,
2969 or ctx->stmt must be simd. The latter case is handled already
2970 earlier. */
2971 if (ctx != NULL)
2972 {
2973 error_at (gimple_location (stmt),
2974 "%<ordered%> %<simd%> must be closely nested inside "
2975 "%<simd%> region");
2976 return false;
2977 }
2978 }
2979 for (; ctx != NULL; ctx = ctx->outer)
2980 switch (gimple_code (ctx->stmt))
2981 {
2982 case GIMPLE_OMP_CRITICAL:
2983 case GIMPLE_OMP_TASK:
2984 case GIMPLE_OMP_ORDERED:
2985 ordered_in_taskloop:
2986 error_at (gimple_location (stmt),
2987 "%<ordered%> region may not be closely nested inside "
2988 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2989 "%<taskloop%> region");
2990 return false;
2991 case GIMPLE_OMP_FOR:
2992 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2993 goto ordered_in_taskloop;
2994 tree o;
2995 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2996 OMP_CLAUSE_ORDERED);
2997 if (o == NULL)
2998 {
2999 error_at (gimple_location (stmt),
3000 "%<ordered%> region must be closely nested inside "
3001 "a loop region with an %<ordered%> clause");
3002 return false;
3003 }
3004 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3005 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3006 {
3007 error_at (gimple_location (stmt),
3008 "%<ordered%> region without %<depend%> clause may "
3009 "not be closely nested inside a loop region with "
3010 "an %<ordered%> clause with a parameter");
3011 return false;
3012 }
3013 return true;
3014 case GIMPLE_OMP_TARGET:
3015 if (gimple_omp_target_kind (ctx->stmt)
3016 != GF_OMP_TARGET_KIND_REGION)
3017 break;
3018 /* FALLTHRU */
3019 case GIMPLE_OMP_PARALLEL:
3020 case GIMPLE_OMP_TEAMS:
3021 error_at (gimple_location (stmt),
3022 "%<ordered%> region must be closely nested inside "
3023 "a loop region with an %<ordered%> clause");
3024 return false;
3025 default:
3026 break;
3027 }
3028 break;
3029 case GIMPLE_OMP_CRITICAL:
3030 {
3031 tree this_stmt_name
3032 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3033 for (; ctx != NULL; ctx = ctx->outer)
3034 if (gomp_critical *other_crit
3035 = dyn_cast <gomp_critical *> (ctx->stmt))
3036 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3037 {
3038 error_at (gimple_location (stmt),
3039 "%<critical%> region may not be nested inside "
3040 "a %<critical%> region with the same name");
3041 return false;
3042 }
3043 }
3044 break;
3045 case GIMPLE_OMP_TEAMS:
3046 if (ctx == NULL)
3047 break;
3048 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3049 || (gimple_omp_target_kind (ctx->stmt)
3050 != GF_OMP_TARGET_KIND_REGION))
3051 {
3052 /* Teams construct can appear either strictly nested inside of
3053 target construct with no intervening stmts, or can be encountered
3054 only by initial task (so must not appear inside any OpenMP
3055 construct. */
3056 error_at (gimple_location (stmt),
3057 "%<teams%> construct must be closely nested inside of "
3058 "%<target%> construct or not nested in any OpenMP "
3059 "construct");
3060 return false;
3061 }
3062 break;
3063 case GIMPLE_OMP_TARGET:
3064 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3065 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3066 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3067 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3068 {
3069 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3070 error_at (OMP_CLAUSE_LOCATION (c),
3071 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3072 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3073 return false;
3074 }
3075 if (is_gimple_omp_offloaded (stmt)
3076 && oacc_get_fn_attrib (cfun->decl) != NULL)
3077 {
3078 error_at (gimple_location (stmt),
3079 "OpenACC region inside of OpenACC routine, nested "
3080 "parallelism not supported yet");
3081 return false;
3082 }
3083 for (; ctx != NULL; ctx = ctx->outer)
3084 {
3085 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3086 {
3087 if (is_gimple_omp (stmt)
3088 && is_gimple_omp_oacc (stmt)
3089 && is_gimple_omp (ctx->stmt))
3090 {
3091 error_at (gimple_location (stmt),
3092 "OpenACC construct inside of non-OpenACC region");
3093 return false;
3094 }
3095 continue;
3096 }
3097
3098 const char *stmt_name, *ctx_stmt_name;
3099 switch (gimple_omp_target_kind (stmt))
3100 {
3101 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3102 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3103 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3104 case GF_OMP_TARGET_KIND_ENTER_DATA:
3105 stmt_name = "target enter data"; break;
3106 case GF_OMP_TARGET_KIND_EXIT_DATA:
3107 stmt_name = "target exit data"; break;
3108 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3109 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3110 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3111 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3112 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3113 stmt_name = "enter/exit data"; break;
3114 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3115 break;
3116 default: gcc_unreachable ();
3117 }
3118 switch (gimple_omp_target_kind (ctx->stmt))
3119 {
3120 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3121 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3122 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3123 ctx_stmt_name = "parallel"; break;
3124 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3125 ctx_stmt_name = "kernels"; break;
3126 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3127 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3128 ctx_stmt_name = "host_data"; break;
3129 default: gcc_unreachable ();
3130 }
3131
3132 /* OpenACC/OpenMP mismatch? */
3133 if (is_gimple_omp_oacc (stmt)
3134 != is_gimple_omp_oacc (ctx->stmt))
3135 {
3136 error_at (gimple_location (stmt),
3137 "%s %qs construct inside of %s %qs region",
3138 (is_gimple_omp_oacc (stmt)
3139 ? "OpenACC" : "OpenMP"), stmt_name,
3140 (is_gimple_omp_oacc (ctx->stmt)
3141 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3142 return false;
3143 }
3144 if (is_gimple_omp_offloaded (ctx->stmt))
3145 {
3146 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3147 if (is_gimple_omp_oacc (ctx->stmt))
3148 {
3149 error_at (gimple_location (stmt),
3150 "%qs construct inside of %qs region",
3151 stmt_name, ctx_stmt_name);
3152 return false;
3153 }
3154 else
3155 {
3156 warning_at (gimple_location (stmt), 0,
3157 "%qs construct inside of %qs region",
3158 stmt_name, ctx_stmt_name);
3159 }
3160 }
3161 }
3162 break;
3163 default:
3164 break;
3165 }
3166 return true;
3167 }
3168
3169
3170 /* Helper function scan_omp.
3171
3172 Callback for walk_tree or operators in walk_gimple_stmt used to
3173 scan for OMP directives in TP. */
3174
3175 static tree
3176 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3177 {
3178 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3179 omp_context *ctx = (omp_context *) wi->info;
3180 tree t = *tp;
3181
3182 switch (TREE_CODE (t))
3183 {
3184 case VAR_DECL:
3185 case PARM_DECL:
3186 case LABEL_DECL:
3187 case RESULT_DECL:
3188 if (ctx)
3189 {
3190 tree repl = remap_decl (t, &ctx->cb);
3191 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3192 *tp = repl;
3193 }
3194 break;
3195
3196 default:
3197 if (ctx && TYPE_P (t))
3198 *tp = remap_type (t, &ctx->cb);
3199 else if (!DECL_P (t))
3200 {
3201 *walk_subtrees = 1;
3202 if (ctx)
3203 {
3204 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3205 if (tem != TREE_TYPE (t))
3206 {
3207 if (TREE_CODE (t) == INTEGER_CST)
3208 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3209 else
3210 TREE_TYPE (t) = tem;
3211 }
3212 }
3213 }
3214 break;
3215 }
3216
3217 return NULL_TREE;
3218 }
3219
3220 /* Return true if FNDECL is a setjmp or a longjmp. */
3221
3222 static bool
3223 setjmp_or_longjmp_p (const_tree fndecl)
3224 {
3225 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3226 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3227 return true;
3228
3229 tree declname = DECL_NAME (fndecl);
3230 if (!declname)
3231 return false;
3232 const char *name = IDENTIFIER_POINTER (declname);
3233 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3234 }
3235
3236
3237 /* Helper function for scan_omp.
3238
3239 Callback for walk_gimple_stmt used to scan for OMP directives in
3240 the current statement in GSI. */
3241
3242 static tree
3243 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3244 struct walk_stmt_info *wi)
3245 {
3246 gimple *stmt = gsi_stmt (*gsi);
3247 omp_context *ctx = (omp_context *) wi->info;
3248
3249 if (gimple_has_location (stmt))
3250 input_location = gimple_location (stmt);
3251
3252 /* Check the nesting restrictions. */
3253 bool remove = false;
3254 if (is_gimple_omp (stmt))
3255 remove = !check_omp_nesting_restrictions (stmt, ctx);
3256 else if (is_gimple_call (stmt))
3257 {
3258 tree fndecl = gimple_call_fndecl (stmt);
3259 if (fndecl)
3260 {
3261 if (setjmp_or_longjmp_p (fndecl)
3262 && ctx
3263 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3264 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3265 {
3266 remove = true;
3267 error_at (gimple_location (stmt),
3268 "setjmp/longjmp inside simd construct");
3269 }
3270 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3271 switch (DECL_FUNCTION_CODE (fndecl))
3272 {
3273 case BUILT_IN_GOMP_BARRIER:
3274 case BUILT_IN_GOMP_CANCEL:
3275 case BUILT_IN_GOMP_CANCELLATION_POINT:
3276 case BUILT_IN_GOMP_TASKYIELD:
3277 case BUILT_IN_GOMP_TASKWAIT:
3278 case BUILT_IN_GOMP_TASKGROUP_START:
3279 case BUILT_IN_GOMP_TASKGROUP_END:
3280 remove = !check_omp_nesting_restrictions (stmt, ctx);
3281 break;
3282 default:
3283 break;
3284 }
3285 }
3286 }
3287 if (remove)
3288 {
3289 stmt = gimple_build_nop ();
3290 gsi_replace (gsi, stmt, false);
3291 }
3292
3293 *handled_ops_p = true;
3294
3295 switch (gimple_code (stmt))
3296 {
3297 case GIMPLE_OMP_PARALLEL:
3298 taskreg_nesting_level++;
3299 scan_omp_parallel (gsi, ctx);
3300 taskreg_nesting_level--;
3301 break;
3302
3303 case GIMPLE_OMP_TASK:
3304 taskreg_nesting_level++;
3305 scan_omp_task (gsi, ctx);
3306 taskreg_nesting_level--;
3307 break;
3308
3309 case GIMPLE_OMP_FOR:
3310 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3311 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3312 && omp_maybe_offloaded_ctx (ctx)
3313 && omp_max_simt_vf ())
3314 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3315 else
3316 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3317 break;
3318
3319 case GIMPLE_OMP_SECTIONS:
3320 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3321 break;
3322
3323 case GIMPLE_OMP_SINGLE:
3324 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3325 break;
3326
3327 case GIMPLE_OMP_SECTION:
3328 case GIMPLE_OMP_MASTER:
3329 case GIMPLE_OMP_ORDERED:
3330 case GIMPLE_OMP_CRITICAL:
3331 case GIMPLE_OMP_GRID_BODY:
3332 ctx = new_omp_context (stmt, ctx);
3333 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3334 break;
3335
3336 case GIMPLE_OMP_TASKGROUP:
3337 ctx = new_omp_context (stmt, ctx);
3338 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3339 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3340 break;
3341
3342 case GIMPLE_OMP_TARGET:
3343 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3344 break;
3345
3346 case GIMPLE_OMP_TEAMS:
3347 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3348 {
3349 taskreg_nesting_level++;
3350 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3351 taskreg_nesting_level--;
3352 }
3353 else
3354 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3355 break;
3356
3357 case GIMPLE_BIND:
3358 {
3359 tree var;
3360
3361 *handled_ops_p = false;
3362 if (ctx)
3363 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3364 var ;
3365 var = DECL_CHAIN (var))
3366 insert_decl_map (&ctx->cb, var, var);
3367 }
3368 break;
3369 default:
3370 *handled_ops_p = false;
3371 break;
3372 }
3373
3374 return NULL_TREE;
3375 }
3376
3377
3378 /* Scan all the statements starting at the current statement. CTX
3379 contains context information about the OMP directives and
3380 clauses found during the scan. */
3381
3382 static void
3383 scan_omp (gimple_seq *body_p, omp_context *ctx)
3384 {
3385 location_t saved_location;
3386 struct walk_stmt_info wi;
3387
3388 memset (&wi, 0, sizeof (wi));
3389 wi.info = ctx;
3390 wi.want_locations = true;
3391
3392 saved_location = input_location;
3393 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3394 input_location = saved_location;
3395 }
3396 \f
3397 /* Re-gimplification and code generation routines. */
3398
3399 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3400 of BIND if in a method. */
3401
3402 static void
3403 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3404 {
3405 if (DECL_ARGUMENTS (current_function_decl)
3406 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3407 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3408 == POINTER_TYPE))
3409 {
3410 tree vars = gimple_bind_vars (bind);
3411 for (tree *pvar = &vars; *pvar; )
3412 if (omp_member_access_dummy_var (*pvar))
3413 *pvar = DECL_CHAIN (*pvar);
3414 else
3415 pvar = &DECL_CHAIN (*pvar);
3416 gimple_bind_set_vars (bind, vars);
3417 }
3418 }
3419
3420 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3421 block and its subblocks. */
3422
3423 static void
3424 remove_member_access_dummy_vars (tree block)
3425 {
3426 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3427 if (omp_member_access_dummy_var (*pvar))
3428 *pvar = DECL_CHAIN (*pvar);
3429 else
3430 pvar = &DECL_CHAIN (*pvar);
3431
3432 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3433 remove_member_access_dummy_vars (block);
3434 }
3435
3436 /* If a context was created for STMT when it was scanned, return it. */
3437
3438 static omp_context *
3439 maybe_lookup_ctx (gimple *stmt)
3440 {
3441 splay_tree_node n;
3442 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3443 return n ? (omp_context *) n->value : NULL;
3444 }
3445
3446
3447 /* Find the mapping for DECL in CTX or the immediately enclosing
3448 context that has a mapping for DECL.
3449
3450 If CTX is a nested parallel directive, we may have to use the decl
3451 mappings created in CTX's parent context. Suppose that we have the
3452 following parallel nesting (variable UIDs showed for clarity):
3453
3454 iD.1562 = 0;
3455 #omp parallel shared(iD.1562) -> outer parallel
3456 iD.1562 = iD.1562 + 1;
3457
3458 #omp parallel shared (iD.1562) -> inner parallel
3459 iD.1562 = iD.1562 - 1;
3460
3461 Each parallel structure will create a distinct .omp_data_s structure
3462 for copying iD.1562 in/out of the directive:
3463
3464 outer parallel .omp_data_s.1.i -> iD.1562
3465 inner parallel .omp_data_s.2.i -> iD.1562
3466
3467 A shared variable mapping will produce a copy-out operation before
3468 the parallel directive and a copy-in operation after it. So, in
3469 this case we would have:
3470
3471 iD.1562 = 0;
3472 .omp_data_o.1.i = iD.1562;
3473 #omp parallel shared(iD.1562) -> outer parallel
3474 .omp_data_i.1 = &.omp_data_o.1
3475 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3476
3477 .omp_data_o.2.i = iD.1562; -> **
3478 #omp parallel shared(iD.1562) -> inner parallel
3479 .omp_data_i.2 = &.omp_data_o.2
3480 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3481
3482
3483 ** This is a problem. The symbol iD.1562 cannot be referenced
3484 inside the body of the outer parallel region. But since we are
3485 emitting this copy operation while expanding the inner parallel
3486 directive, we need to access the CTX structure of the outer
3487 parallel directive to get the correct mapping:
3488
3489 .omp_data_o.2.i = .omp_data_i.1->i
3490
3491 Since there may be other workshare or parallel directives enclosing
3492 the parallel directive, it may be necessary to walk up the context
3493 parent chain. This is not a problem in general because nested
3494 parallelism happens only rarely. */
3495
3496 static tree
3497 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3498 {
3499 tree t;
3500 omp_context *up;
3501
3502 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3503 t = maybe_lookup_decl (decl, up);
3504
3505 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3506
3507 return t ? t : decl;
3508 }
3509
3510
3511 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3512 in outer contexts. */
3513
3514 static tree
3515 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3516 {
3517 tree t = NULL;
3518 omp_context *up;
3519
3520 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3521 t = maybe_lookup_decl (decl, up);
3522
3523 return t ? t : decl;
3524 }
3525
3526
3527 /* Construct the initialization value for reduction operation OP. */
3528
3529 tree
3530 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3531 {
3532 switch (op)
3533 {
3534 case PLUS_EXPR:
3535 case MINUS_EXPR:
3536 case BIT_IOR_EXPR:
3537 case BIT_XOR_EXPR:
3538 case TRUTH_OR_EXPR:
3539 case TRUTH_ORIF_EXPR:
3540 case TRUTH_XOR_EXPR:
3541 case NE_EXPR:
3542 return build_zero_cst (type);
3543
3544 case MULT_EXPR:
3545 case TRUTH_AND_EXPR:
3546 case TRUTH_ANDIF_EXPR:
3547 case EQ_EXPR:
3548 return fold_convert_loc (loc, type, integer_one_node);
3549
3550 case BIT_AND_EXPR:
3551 return fold_convert_loc (loc, type, integer_minus_one_node);
3552
3553 case MAX_EXPR:
3554 if (SCALAR_FLOAT_TYPE_P (type))
3555 {
3556 REAL_VALUE_TYPE max, min;
3557 if (HONOR_INFINITIES (type))
3558 {
3559 real_inf (&max);
3560 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3561 }
3562 else
3563 real_maxval (&min, 1, TYPE_MODE (type));
3564 return build_real (type, min);
3565 }
3566 else if (POINTER_TYPE_P (type))
3567 {
3568 wide_int min
3569 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3570 return wide_int_to_tree (type, min);
3571 }
3572 else
3573 {
3574 gcc_assert (INTEGRAL_TYPE_P (type));
3575 return TYPE_MIN_VALUE (type);
3576 }
3577
3578 case MIN_EXPR:
3579 if (SCALAR_FLOAT_TYPE_P (type))
3580 {
3581 REAL_VALUE_TYPE max;
3582 if (HONOR_INFINITIES (type))
3583 real_inf (&max);
3584 else
3585 real_maxval (&max, 0, TYPE_MODE (type));
3586 return build_real (type, max);
3587 }
3588 else if (POINTER_TYPE_P (type))
3589 {
3590 wide_int max
3591 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3592 return wide_int_to_tree (type, max);
3593 }
3594 else
3595 {
3596 gcc_assert (INTEGRAL_TYPE_P (type));
3597 return TYPE_MAX_VALUE (type);
3598 }
3599
3600 default:
3601 gcc_unreachable ();
3602 }
3603 }
3604
3605 /* Construct the initialization value for reduction CLAUSE. */
3606
3607 tree
3608 omp_reduction_init (tree clause, tree type)
3609 {
3610 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3611 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3612 }
3613
3614 /* Return alignment to be assumed for var in CLAUSE, which should be
3615 OMP_CLAUSE_ALIGNED. */
3616
3617 static tree
3618 omp_clause_aligned_alignment (tree clause)
3619 {
3620 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3621 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3622
3623 /* Otherwise return implementation defined alignment. */
3624 unsigned int al = 1;
3625 opt_scalar_mode mode_iter;
3626 auto_vector_sizes sizes;
3627 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3628 poly_uint64 vs = 0;
3629 for (unsigned int i = 0; i < sizes.length (); ++i)
3630 vs = ordered_max (vs, sizes[i]);
3631 static enum mode_class classes[]
3632 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3633 for (int i = 0; i < 4; i += 2)
3634 /* The for loop above dictates that we only walk through scalar classes. */
3635 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3636 {
3637 scalar_mode mode = mode_iter.require ();
3638 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3639 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3640 continue;
3641 while (maybe_ne (vs, 0U)
3642 && known_lt (GET_MODE_SIZE (vmode), vs)
3643 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3644 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3645
3646 tree type = lang_hooks.types.type_for_mode (mode, 1);
3647 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3648 continue;
3649 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3650 GET_MODE_SIZE (mode));
3651 type = build_vector_type (type, nelts);
3652 if (TYPE_MODE (type) != vmode)
3653 continue;
3654 if (TYPE_ALIGN_UNIT (type) > al)
3655 al = TYPE_ALIGN_UNIT (type);
3656 }
3657 return build_int_cst (integer_type_node, al);
3658 }
3659
3660
3661 /* This structure is part of the interface between lower_rec_simd_input_clauses
3662 and lower_rec_input_clauses. */
3663
3664 struct omplow_simd_context {
3665 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3666 tree idx;
3667 tree lane;
3668 vec<tree, va_heap> simt_eargs;
3669 gimple_seq simt_dlist;
3670 poly_uint64_pod max_vf;
3671 bool is_simt;
3672 };
3673
3674 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3675 privatization. */
3676
3677 static bool
3678 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3679 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3680 {
3681 if (known_eq (sctx->max_vf, 0U))
3682 {
3683 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3684 if (maybe_gt (sctx->max_vf, 1U))
3685 {
3686 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3687 OMP_CLAUSE_SAFELEN);
3688 if (c)
3689 {
3690 poly_uint64 safe_len;
3691 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3692 || maybe_lt (safe_len, 1U))
3693 sctx->max_vf = 1;
3694 else
3695 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3696 }
3697 }
3698 if (maybe_gt (sctx->max_vf, 1U))
3699 {
3700 sctx->idx = create_tmp_var (unsigned_type_node);
3701 sctx->lane = create_tmp_var (unsigned_type_node);
3702 }
3703 }
3704 if (known_eq (sctx->max_vf, 1U))
3705 return false;
3706
3707 if (sctx->is_simt)
3708 {
3709 if (is_gimple_reg (new_var))
3710 {
3711 ivar = lvar = new_var;
3712 return true;
3713 }
3714 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3715 ivar = lvar = create_tmp_var (type);
3716 TREE_ADDRESSABLE (ivar) = 1;
3717 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3718 NULL, DECL_ATTRIBUTES (ivar));
3719 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3720 tree clobber = build_constructor (type, NULL);
3721 TREE_THIS_VOLATILE (clobber) = 1;
3722 gimple *g = gimple_build_assign (ivar, clobber);
3723 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3724 }
3725 else
3726 {
3727 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3728 tree avar = create_tmp_var_raw (atype);
3729 if (TREE_ADDRESSABLE (new_var))
3730 TREE_ADDRESSABLE (avar) = 1;
3731 DECL_ATTRIBUTES (avar)
3732 = tree_cons (get_identifier ("omp simd array"), NULL,
3733 DECL_ATTRIBUTES (avar));
3734 gimple_add_tmp_var (avar);
3735 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3736 NULL_TREE, NULL_TREE);
3737 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3738 NULL_TREE, NULL_TREE);
3739 TREE_THIS_NOTRAP (ivar) = 1;
3740 TREE_THIS_NOTRAP (lvar) = 1;
3741 }
3742 if (DECL_P (new_var))
3743 {
3744 SET_DECL_VALUE_EXPR (new_var, lvar);
3745 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3746 }
3747 return true;
3748 }
3749
3750 /* Helper function of lower_rec_input_clauses. For a reference
3751 in simd reduction, add an underlying variable it will reference. */
3752
3753 static void
3754 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3755 {
3756 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3757 if (TREE_CONSTANT (z))
3758 {
3759 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3760 get_name (new_vard));
3761 gimple_add_tmp_var (z);
3762 TREE_ADDRESSABLE (z) = 1;
3763 z = build_fold_addr_expr_loc (loc, z);
3764 gimplify_assign (new_vard, z, ilist);
3765 }
3766 }
3767
3768 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3769 code to emit (type) (tskred_temp[idx]). */
3770
3771 static tree
3772 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3773 unsigned idx)
3774 {
3775 unsigned HOST_WIDE_INT sz
3776 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3777 tree r = build2 (MEM_REF, pointer_sized_int_node,
3778 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3779 idx * sz));
3780 tree v = create_tmp_var (pointer_sized_int_node);
3781 gimple *g = gimple_build_assign (v, r);
3782 gimple_seq_add_stmt (ilist, g);
3783 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3784 {
3785 v = create_tmp_var (type);
3786 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3787 gimple_seq_add_stmt (ilist, g);
3788 }
3789 return v;
3790 }
3791
3792 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3793 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3794 private variables. Initialization statements go in ILIST, while calls
3795 to destructors go in DLIST. */
3796
3797 static void
3798 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3799 omp_context *ctx, struct omp_for_data *fd)
3800 {
3801 tree c, dtor, copyin_seq, x, ptr;
3802 bool copyin_by_ref = false;
3803 bool lastprivate_firstprivate = false;
3804 bool reduction_omp_orig_ref = false;
3805 int pass;
3806 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3807 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3808 omplow_simd_context sctx = omplow_simd_context ();
3809 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3810 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3811 gimple_seq llist[3] = { };
3812 tree nonconst_simd_if = NULL_TREE;
3813
3814 copyin_seq = NULL;
3815 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3816
3817 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3818 with data sharing clauses referencing variable sized vars. That
3819 is unnecessarily hard to support and very unlikely to result in
3820 vectorized code anyway. */
3821 if (is_simd)
3822 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3823 switch (OMP_CLAUSE_CODE (c))
3824 {
3825 case OMP_CLAUSE_LINEAR:
3826 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3827 sctx.max_vf = 1;
3828 /* FALLTHRU */
3829 case OMP_CLAUSE_PRIVATE:
3830 case OMP_CLAUSE_FIRSTPRIVATE:
3831 case OMP_CLAUSE_LASTPRIVATE:
3832 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3833 sctx.max_vf = 1;
3834 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3835 {
3836 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3837 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3838 sctx.max_vf = 1;
3839 }
3840 break;
3841 case OMP_CLAUSE_REDUCTION:
3842 case OMP_CLAUSE_IN_REDUCTION:
3843 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3844 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3845 sctx.max_vf = 1;
3846 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3847 {
3848 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3849 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3850 sctx.max_vf = 1;
3851 }
3852 break;
3853 case OMP_CLAUSE_IF:
3854 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3855 sctx.max_vf = 1;
3856 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3857 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
3858 break;
3859 case OMP_CLAUSE_SIMDLEN:
3860 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3861 sctx.max_vf = 1;
3862 break;
3863 case OMP_CLAUSE__CONDTEMP_:
3864 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3865 if (sctx.is_simt)
3866 sctx.max_vf = 1;
3867 break;
3868 default:
3869 continue;
3870 }
3871
3872 /* Add a placeholder for simduid. */
3873 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3874 sctx.simt_eargs.safe_push (NULL_TREE);
3875
3876 unsigned task_reduction_cnt = 0;
3877 unsigned task_reduction_cntorig = 0;
3878 unsigned task_reduction_cnt_full = 0;
3879 unsigned task_reduction_cntorig_full = 0;
3880 unsigned task_reduction_other_cnt = 0;
3881 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3882 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3883 /* Do all the fixed sized types in the first pass, and the variable sized
3884 types in the second pass. This makes sure that the scalar arguments to
3885 the variable sized types are processed before we use them in the
3886 variable sized operations. For task reductions we use 4 passes, in the
3887 first two we ignore them, in the third one gather arguments for
3888 GOMP_task_reduction_remap call and in the last pass actually handle
3889 the task reductions. */
3890 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3891 ? 4 : 2); ++pass)
3892 {
3893 if (pass == 2 && task_reduction_cnt)
3894 {
3895 tskred_atype
3896 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3897 + task_reduction_cntorig);
3898 tskred_avar = create_tmp_var_raw (tskred_atype);
3899 gimple_add_tmp_var (tskred_avar);
3900 TREE_ADDRESSABLE (tskred_avar) = 1;
3901 task_reduction_cnt_full = task_reduction_cnt;
3902 task_reduction_cntorig_full = task_reduction_cntorig;
3903 }
3904 else if (pass == 3 && task_reduction_cnt)
3905 {
3906 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3907 gimple *g
3908 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3909 size_int (task_reduction_cntorig),
3910 build_fold_addr_expr (tskred_avar));
3911 gimple_seq_add_stmt (ilist, g);
3912 }
3913 if (pass == 3 && task_reduction_other_cnt)
3914 {
3915 /* For reduction clauses, build
3916 tskred_base = (void *) tskred_temp[2]
3917 + omp_get_thread_num () * tskred_temp[1]
3918 or if tskred_temp[1] is known to be constant, that constant
3919 directly. This is the start of the private reduction copy block
3920 for the current thread. */
3921 tree v = create_tmp_var (integer_type_node);
3922 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3923 gimple *g = gimple_build_call (x, 0);
3924 gimple_call_set_lhs (g, v);
3925 gimple_seq_add_stmt (ilist, g);
3926 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3927 tskred_temp = OMP_CLAUSE_DECL (c);
3928 if (is_taskreg_ctx (ctx))
3929 tskred_temp = lookup_decl (tskred_temp, ctx);
3930 tree v2 = create_tmp_var (sizetype);
3931 g = gimple_build_assign (v2, NOP_EXPR, v);
3932 gimple_seq_add_stmt (ilist, g);
3933 if (ctx->task_reductions[0])
3934 v = fold_convert (sizetype, ctx->task_reductions[0]);
3935 else
3936 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3937 tree v3 = create_tmp_var (sizetype);
3938 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3939 gimple_seq_add_stmt (ilist, g);
3940 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3941 tskred_base = create_tmp_var (ptr_type_node);
3942 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3943 gimple_seq_add_stmt (ilist, g);
3944 }
3945 task_reduction_cnt = 0;
3946 task_reduction_cntorig = 0;
3947 task_reduction_other_cnt = 0;
3948 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3949 {
3950 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3951 tree var, new_var;
3952 bool by_ref;
3953 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3954 bool task_reduction_p = false;
3955 bool task_reduction_needs_orig_p = false;
3956 tree cond = NULL_TREE;
3957
3958 switch (c_kind)
3959 {
3960 case OMP_CLAUSE_PRIVATE:
3961 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3962 continue;
3963 break;
3964 case OMP_CLAUSE_SHARED:
3965 /* Ignore shared directives in teams construct inside
3966 of target construct. */
3967 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
3968 && !is_host_teams_ctx (ctx))
3969 continue;
3970 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3971 {
3972 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3973 || is_global_var (OMP_CLAUSE_DECL (c)));
3974 continue;
3975 }
3976 case OMP_CLAUSE_FIRSTPRIVATE:
3977 case OMP_CLAUSE_COPYIN:
3978 break;
3979 case OMP_CLAUSE_LINEAR:
3980 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3981 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3982 lastprivate_firstprivate = true;
3983 break;
3984 case OMP_CLAUSE_REDUCTION:
3985 case OMP_CLAUSE_IN_REDUCTION:
3986 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
3987 {
3988 task_reduction_p = true;
3989 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
3990 {
3991 task_reduction_other_cnt++;
3992 if (pass == 2)
3993 continue;
3994 }
3995 else
3996 task_reduction_cnt++;
3997 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3998 {
3999 var = OMP_CLAUSE_DECL (c);
4000 /* If var is a global variable that isn't privatized
4001 in outer contexts, we don't need to look up the
4002 original address, it is always the address of the
4003 global variable itself. */
4004 if (!DECL_P (var)
4005 || omp_is_reference (var)
4006 || !is_global_var
4007 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4008 {
4009 task_reduction_needs_orig_p = true;
4010 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4011 task_reduction_cntorig++;
4012 }
4013 }
4014 }
4015 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4016 reduction_omp_orig_ref = true;
4017 break;
4018 case OMP_CLAUSE__REDUCTEMP_:
4019 if (!is_taskreg_ctx (ctx))
4020 continue;
4021 /* FALLTHRU */
4022 case OMP_CLAUSE__LOOPTEMP_:
4023 /* Handle _looptemp_/_reductemp_ clauses only on
4024 parallel/task. */
4025 if (fd)
4026 continue;
4027 break;
4028 case OMP_CLAUSE_LASTPRIVATE:
4029 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4030 {
4031 lastprivate_firstprivate = true;
4032 if (pass != 0 || is_taskloop_ctx (ctx))
4033 continue;
4034 }
4035 /* Even without corresponding firstprivate, if
4036 decl is Fortran allocatable, it needs outer var
4037 reference. */
4038 else if (pass == 0
4039 && lang_hooks.decls.omp_private_outer_ref
4040 (OMP_CLAUSE_DECL (c)))
4041 lastprivate_firstprivate = true;
4042 break;
4043 case OMP_CLAUSE_ALIGNED:
4044 if (pass != 1)
4045 continue;
4046 var = OMP_CLAUSE_DECL (c);
4047 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4048 && !is_global_var (var))
4049 {
4050 new_var = maybe_lookup_decl (var, ctx);
4051 if (new_var == NULL_TREE)
4052 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4053 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4054 tree alarg = omp_clause_aligned_alignment (c);
4055 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4056 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4057 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4058 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4059 gimplify_and_add (x, ilist);
4060 }
4061 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4062 && is_global_var (var))
4063 {
4064 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4065 new_var = lookup_decl (var, ctx);
4066 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4067 t = build_fold_addr_expr_loc (clause_loc, t);
4068 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4069 tree alarg = omp_clause_aligned_alignment (c);
4070 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4071 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4072 t = fold_convert_loc (clause_loc, ptype, t);
4073 x = create_tmp_var (ptype);
4074 t = build2 (MODIFY_EXPR, ptype, x, t);
4075 gimplify_and_add (t, ilist);
4076 t = build_simple_mem_ref_loc (clause_loc, x);
4077 SET_DECL_VALUE_EXPR (new_var, t);
4078 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4079 }
4080 continue;
4081 case OMP_CLAUSE__CONDTEMP_:
4082 if (is_parallel_ctx (ctx)
4083 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4084 break;
4085 continue;
4086 default:
4087 continue;
4088 }
4089
4090 if (task_reduction_p != (pass >= 2))
4091 continue;
4092
4093 new_var = var = OMP_CLAUSE_DECL (c);
4094 if ((c_kind == OMP_CLAUSE_REDUCTION
4095 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4096 && TREE_CODE (var) == MEM_REF)
4097 {
4098 var = TREE_OPERAND (var, 0);
4099 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4100 var = TREE_OPERAND (var, 0);
4101 if (TREE_CODE (var) == INDIRECT_REF
4102 || TREE_CODE (var) == ADDR_EXPR)
4103 var = TREE_OPERAND (var, 0);
4104 if (is_variable_sized (var))
4105 {
4106 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4107 var = DECL_VALUE_EXPR (var);
4108 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4109 var = TREE_OPERAND (var, 0);
4110 gcc_assert (DECL_P (var));
4111 }
4112 new_var = var;
4113 }
4114 if (c_kind != OMP_CLAUSE_COPYIN)
4115 new_var = lookup_decl (var, ctx);
4116
4117 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4118 {
4119 if (pass != 0)
4120 continue;
4121 }
4122 /* C/C++ array section reductions. */
4123 else if ((c_kind == OMP_CLAUSE_REDUCTION
4124 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4125 && var != OMP_CLAUSE_DECL (c))
4126 {
4127 if (pass == 0)
4128 continue;
4129
4130 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4131 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4132
4133 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4134 {
4135 tree b = TREE_OPERAND (orig_var, 1);
4136 b = maybe_lookup_decl (b, ctx);
4137 if (b == NULL)
4138 {
4139 b = TREE_OPERAND (orig_var, 1);
4140 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4141 }
4142 if (integer_zerop (bias))
4143 bias = b;
4144 else
4145 {
4146 bias = fold_convert_loc (clause_loc,
4147 TREE_TYPE (b), bias);
4148 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4149 TREE_TYPE (b), b, bias);
4150 }
4151 orig_var = TREE_OPERAND (orig_var, 0);
4152 }
4153 if (pass == 2)
4154 {
4155 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4156 if (is_global_var (out)
4157 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4158 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4159 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4160 != POINTER_TYPE)))
4161 x = var;
4162 else
4163 {
4164 bool by_ref = use_pointer_for_field (var, NULL);
4165 x = build_receiver_ref (var, by_ref, ctx);
4166 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4167 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4168 == POINTER_TYPE))
4169 x = build_fold_addr_expr (x);
4170 }
4171 if (TREE_CODE (orig_var) == INDIRECT_REF)
4172 x = build_simple_mem_ref (x);
4173 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4174 {
4175 if (var == TREE_OPERAND (orig_var, 0))
4176 x = build_fold_addr_expr (x);
4177 }
4178 bias = fold_convert (sizetype, bias);
4179 x = fold_convert (ptr_type_node, x);
4180 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4181 TREE_TYPE (x), x, bias);
4182 unsigned cnt = task_reduction_cnt - 1;
4183 if (!task_reduction_needs_orig_p)
4184 cnt += (task_reduction_cntorig_full
4185 - task_reduction_cntorig);
4186 else
4187 cnt = task_reduction_cntorig - 1;
4188 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4189 size_int (cnt), NULL_TREE, NULL_TREE);
4190 gimplify_assign (r, x, ilist);
4191 continue;
4192 }
4193
4194 if (TREE_CODE (orig_var) == INDIRECT_REF
4195 || TREE_CODE (orig_var) == ADDR_EXPR)
4196 orig_var = TREE_OPERAND (orig_var, 0);
4197 tree d = OMP_CLAUSE_DECL (c);
4198 tree type = TREE_TYPE (d);
4199 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4200 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4201 const char *name = get_name (orig_var);
4202 if (pass == 3)
4203 {
4204 tree xv = create_tmp_var (ptr_type_node);
4205 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4206 {
4207 unsigned cnt = task_reduction_cnt - 1;
4208 if (!task_reduction_needs_orig_p)
4209 cnt += (task_reduction_cntorig_full
4210 - task_reduction_cntorig);
4211 else
4212 cnt = task_reduction_cntorig - 1;
4213 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4214 size_int (cnt), NULL_TREE, NULL_TREE);
4215
4216 gimple *g = gimple_build_assign (xv, x);
4217 gimple_seq_add_stmt (ilist, g);
4218 }
4219 else
4220 {
4221 unsigned int idx = *ctx->task_reduction_map->get (c);
4222 tree off;
4223 if (ctx->task_reductions[1 + idx])
4224 off = fold_convert (sizetype,
4225 ctx->task_reductions[1 + idx]);
4226 else
4227 off = task_reduction_read (ilist, tskred_temp, sizetype,
4228 7 + 3 * idx + 1);
4229 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4230 tskred_base, off);
4231 gimple_seq_add_stmt (ilist, g);
4232 }
4233 x = fold_convert (build_pointer_type (boolean_type_node),
4234 xv);
4235 if (TREE_CONSTANT (v))
4236 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4237 TYPE_SIZE_UNIT (type));
4238 else
4239 {
4240 tree t = maybe_lookup_decl (v, ctx);
4241 if (t)
4242 v = t;
4243 else
4244 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4245 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4246 fb_rvalue);
4247 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4248 TREE_TYPE (v), v,
4249 build_int_cst (TREE_TYPE (v), 1));
4250 t = fold_build2_loc (clause_loc, MULT_EXPR,
4251 TREE_TYPE (v), t,
4252 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4253 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4254 }
4255 cond = create_tmp_var (TREE_TYPE (x));
4256 gimplify_assign (cond, x, ilist);
4257 x = xv;
4258 }
4259 else if (TREE_CONSTANT (v))
4260 {
4261 x = create_tmp_var_raw (type, name);
4262 gimple_add_tmp_var (x);
4263 TREE_ADDRESSABLE (x) = 1;
4264 x = build_fold_addr_expr_loc (clause_loc, x);
4265 }
4266 else
4267 {
4268 tree atmp
4269 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4270 tree t = maybe_lookup_decl (v, ctx);
4271 if (t)
4272 v = t;
4273 else
4274 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4275 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4276 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4277 TREE_TYPE (v), v,
4278 build_int_cst (TREE_TYPE (v), 1));
4279 t = fold_build2_loc (clause_loc, MULT_EXPR,
4280 TREE_TYPE (v), t,
4281 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4282 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4283 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4284 }
4285
4286 tree ptype = build_pointer_type (TREE_TYPE (type));
4287 x = fold_convert_loc (clause_loc, ptype, x);
4288 tree y = create_tmp_var (ptype, name);
4289 gimplify_assign (y, x, ilist);
4290 x = y;
4291 tree yb = y;
4292
4293 if (!integer_zerop (bias))
4294 {
4295 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4296 bias);
4297 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4298 x);
4299 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4300 pointer_sized_int_node, yb, bias);
4301 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4302 yb = create_tmp_var (ptype, name);
4303 gimplify_assign (yb, x, ilist);
4304 x = yb;
4305 }
4306
4307 d = TREE_OPERAND (d, 0);
4308 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4309 d = TREE_OPERAND (d, 0);
4310 if (TREE_CODE (d) == ADDR_EXPR)
4311 {
4312 if (orig_var != var)
4313 {
4314 gcc_assert (is_variable_sized (orig_var));
4315 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4316 x);
4317 gimplify_assign (new_var, x, ilist);
4318 tree new_orig_var = lookup_decl (orig_var, ctx);
4319 tree t = build_fold_indirect_ref (new_var);
4320 DECL_IGNORED_P (new_var) = 0;
4321 TREE_THIS_NOTRAP (t) = 1;
4322 SET_DECL_VALUE_EXPR (new_orig_var, t);
4323 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4324 }
4325 else
4326 {
4327 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4328 build_int_cst (ptype, 0));
4329 SET_DECL_VALUE_EXPR (new_var, x);
4330 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4331 }
4332 }
4333 else
4334 {
4335 gcc_assert (orig_var == var);
4336 if (TREE_CODE (d) == INDIRECT_REF)
4337 {
4338 x = create_tmp_var (ptype, name);
4339 TREE_ADDRESSABLE (x) = 1;
4340 gimplify_assign (x, yb, ilist);
4341 x = build_fold_addr_expr_loc (clause_loc, x);
4342 }
4343 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4344 gimplify_assign (new_var, x, ilist);
4345 }
4346 /* GOMP_taskgroup_reduction_register memsets the whole
4347 array to zero. If the initializer is zero, we don't
4348 need to initialize it again, just mark it as ever
4349 used unconditionally, i.e. cond = true. */
4350 if (cond
4351 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4352 && initializer_zerop (omp_reduction_init (c,
4353 TREE_TYPE (type))))
4354 {
4355 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4356 boolean_true_node);
4357 gimple_seq_add_stmt (ilist, g);
4358 continue;
4359 }
4360 tree end = create_artificial_label (UNKNOWN_LOCATION);
4361 if (cond)
4362 {
4363 gimple *g;
4364 if (!is_parallel_ctx (ctx))
4365 {
4366 tree condv = create_tmp_var (boolean_type_node);
4367 g = gimple_build_assign (condv,
4368 build_simple_mem_ref (cond));
4369 gimple_seq_add_stmt (ilist, g);
4370 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4371 g = gimple_build_cond (NE_EXPR, condv,
4372 boolean_false_node, end, lab1);
4373 gimple_seq_add_stmt (ilist, g);
4374 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4375 }
4376 g = gimple_build_assign (build_simple_mem_ref (cond),
4377 boolean_true_node);
4378 gimple_seq_add_stmt (ilist, g);
4379 }
4380
4381 tree y1 = create_tmp_var (ptype);
4382 gimplify_assign (y1, y, ilist);
4383 tree i2 = NULL_TREE, y2 = NULL_TREE;
4384 tree body2 = NULL_TREE, end2 = NULL_TREE;
4385 tree y3 = NULL_TREE, y4 = NULL_TREE;
4386 if (task_reduction_needs_orig_p)
4387 {
4388 y3 = create_tmp_var (ptype);
4389 tree ref;
4390 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4391 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4392 size_int (task_reduction_cnt_full
4393 + task_reduction_cntorig - 1),
4394 NULL_TREE, NULL_TREE);
4395 else
4396 {
4397 unsigned int idx = *ctx->task_reduction_map->get (c);
4398 ref = task_reduction_read (ilist, tskred_temp, ptype,
4399 7 + 3 * idx);
4400 }
4401 gimplify_assign (y3, ref, ilist);
4402 }
4403 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4404 {
4405 if (pass != 3)
4406 {
4407 y2 = create_tmp_var (ptype);
4408 gimplify_assign (y2, y, ilist);
4409 }
4410 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4411 {
4412 tree ref = build_outer_var_ref (var, ctx);
4413 /* For ref build_outer_var_ref already performs this. */
4414 if (TREE_CODE (d) == INDIRECT_REF)
4415 gcc_assert (omp_is_reference (var));
4416 else if (TREE_CODE (d) == ADDR_EXPR)
4417 ref = build_fold_addr_expr (ref);
4418 else if (omp_is_reference (var))
4419 ref = build_fold_addr_expr (ref);
4420 ref = fold_convert_loc (clause_loc, ptype, ref);
4421 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4422 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4423 {
4424 y3 = create_tmp_var (ptype);
4425 gimplify_assign (y3, unshare_expr (ref), ilist);
4426 }
4427 if (is_simd)
4428 {
4429 y4 = create_tmp_var (ptype);
4430 gimplify_assign (y4, ref, dlist);
4431 }
4432 }
4433 }
4434 tree i = create_tmp_var (TREE_TYPE (v));
4435 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4436 tree body = create_artificial_label (UNKNOWN_LOCATION);
4437 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4438 if (y2)
4439 {
4440 i2 = create_tmp_var (TREE_TYPE (v));
4441 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4442 body2 = create_artificial_label (UNKNOWN_LOCATION);
4443 end2 = create_artificial_label (UNKNOWN_LOCATION);
4444 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4445 }
4446 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4447 {
4448 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4449 tree decl_placeholder
4450 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4451 SET_DECL_VALUE_EXPR (decl_placeholder,
4452 build_simple_mem_ref (y1));
4453 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4454 SET_DECL_VALUE_EXPR (placeholder,
4455 y3 ? build_simple_mem_ref (y3)
4456 : error_mark_node);
4457 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4458 x = lang_hooks.decls.omp_clause_default_ctor
4459 (c, build_simple_mem_ref (y1),
4460 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4461 if (x)
4462 gimplify_and_add (x, ilist);
4463 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4464 {
4465 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4466 lower_omp (&tseq, ctx);
4467 gimple_seq_add_seq (ilist, tseq);
4468 }
4469 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4470 if (is_simd)
4471 {
4472 SET_DECL_VALUE_EXPR (decl_placeholder,
4473 build_simple_mem_ref (y2));
4474 SET_DECL_VALUE_EXPR (placeholder,
4475 build_simple_mem_ref (y4));
4476 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4477 lower_omp (&tseq, ctx);
4478 gimple_seq_add_seq (dlist, tseq);
4479 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4480 }
4481 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4482 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4483 if (y2)
4484 {
4485 x = lang_hooks.decls.omp_clause_dtor
4486 (c, build_simple_mem_ref (y2));
4487 if (x)
4488 {
4489 gimple_seq tseq = NULL;
4490 dtor = x;
4491 gimplify_stmt (&dtor, &tseq);
4492 gimple_seq_add_seq (dlist, tseq);
4493 }
4494 }
4495 }
4496 else
4497 {
4498 x = omp_reduction_init (c, TREE_TYPE (type));
4499 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4500
4501 /* reduction(-:var) sums up the partial results, so it
4502 acts identically to reduction(+:var). */
4503 if (code == MINUS_EXPR)
4504 code = PLUS_EXPR;
4505
4506 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4507 if (is_simd)
4508 {
4509 x = build2 (code, TREE_TYPE (type),
4510 build_simple_mem_ref (y4),
4511 build_simple_mem_ref (y2));
4512 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4513 }
4514 }
4515 gimple *g
4516 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4517 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4518 gimple_seq_add_stmt (ilist, g);
4519 if (y3)
4520 {
4521 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4522 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4523 gimple_seq_add_stmt (ilist, g);
4524 }
4525 g = gimple_build_assign (i, PLUS_EXPR, i,
4526 build_int_cst (TREE_TYPE (i), 1));
4527 gimple_seq_add_stmt (ilist, g);
4528 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4529 gimple_seq_add_stmt (ilist, g);
4530 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4531 if (y2)
4532 {
4533 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4534 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4535 gimple_seq_add_stmt (dlist, g);
4536 if (y4)
4537 {
4538 g = gimple_build_assign
4539 (y4, POINTER_PLUS_EXPR, y4,
4540 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4541 gimple_seq_add_stmt (dlist, g);
4542 }
4543 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4544 build_int_cst (TREE_TYPE (i2), 1));
4545 gimple_seq_add_stmt (dlist, g);
4546 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4547 gimple_seq_add_stmt (dlist, g);
4548 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4549 }
4550 continue;
4551 }
4552 else if (pass == 2)
4553 {
4554 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4555 x = var;
4556 else
4557 {
4558 bool by_ref = use_pointer_for_field (var, ctx);
4559 x = build_receiver_ref (var, by_ref, ctx);
4560 }
4561 if (!omp_is_reference (var))
4562 x = build_fold_addr_expr (x);
4563 x = fold_convert (ptr_type_node, x);
4564 unsigned cnt = task_reduction_cnt - 1;
4565 if (!task_reduction_needs_orig_p)
4566 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4567 else
4568 cnt = task_reduction_cntorig - 1;
4569 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4570 size_int (cnt), NULL_TREE, NULL_TREE);
4571 gimplify_assign (r, x, ilist);
4572 continue;
4573 }
4574 else if (pass == 3)
4575 {
4576 tree type = TREE_TYPE (new_var);
4577 if (!omp_is_reference (var))
4578 type = build_pointer_type (type);
4579 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4580 {
4581 unsigned cnt = task_reduction_cnt - 1;
4582 if (!task_reduction_needs_orig_p)
4583 cnt += (task_reduction_cntorig_full
4584 - task_reduction_cntorig);
4585 else
4586 cnt = task_reduction_cntorig - 1;
4587 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4588 size_int (cnt), NULL_TREE, NULL_TREE);
4589 }
4590 else
4591 {
4592 unsigned int idx = *ctx->task_reduction_map->get (c);
4593 tree off;
4594 if (ctx->task_reductions[1 + idx])
4595 off = fold_convert (sizetype,
4596 ctx->task_reductions[1 + idx]);
4597 else
4598 off = task_reduction_read (ilist, tskred_temp, sizetype,
4599 7 + 3 * idx + 1);
4600 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4601 tskred_base, off);
4602 }
4603 x = fold_convert (type, x);
4604 tree t;
4605 if (omp_is_reference (var))
4606 {
4607 gimplify_assign (new_var, x, ilist);
4608 t = new_var;
4609 new_var = build_simple_mem_ref (new_var);
4610 }
4611 else
4612 {
4613 t = create_tmp_var (type);
4614 gimplify_assign (t, x, ilist);
4615 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4616 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4617 }
4618 t = fold_convert (build_pointer_type (boolean_type_node), t);
4619 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4620 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4621 cond = create_tmp_var (TREE_TYPE (t));
4622 gimplify_assign (cond, t, ilist);
4623 }
4624 else if (is_variable_sized (var))
4625 {
4626 /* For variable sized types, we need to allocate the
4627 actual storage here. Call alloca and store the
4628 result in the pointer decl that we created elsewhere. */
4629 if (pass == 0)
4630 continue;
4631
4632 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4633 {
4634 gcall *stmt;
4635 tree tmp, atmp;
4636
4637 ptr = DECL_VALUE_EXPR (new_var);
4638 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4639 ptr = TREE_OPERAND (ptr, 0);
4640 gcc_assert (DECL_P (ptr));
4641 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4642
4643 /* void *tmp = __builtin_alloca */
4644 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4645 stmt = gimple_build_call (atmp, 2, x,
4646 size_int (DECL_ALIGN (var)));
4647 tmp = create_tmp_var_raw (ptr_type_node);
4648 gimple_add_tmp_var (tmp);
4649 gimple_call_set_lhs (stmt, tmp);
4650
4651 gimple_seq_add_stmt (ilist, stmt);
4652
4653 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4654 gimplify_assign (ptr, x, ilist);
4655 }
4656 }
4657 else if (omp_is_reference (var)
4658 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4659 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4660 {
4661 /* For references that are being privatized for Fortran,
4662 allocate new backing storage for the new pointer
4663 variable. This allows us to avoid changing all the
4664 code that expects a pointer to something that expects
4665 a direct variable. */
4666 if (pass == 0)
4667 continue;
4668
4669 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4670 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4671 {
4672 x = build_receiver_ref (var, false, ctx);
4673 x = build_fold_addr_expr_loc (clause_loc, x);
4674 }
4675 else if (TREE_CONSTANT (x))
4676 {
4677 /* For reduction in SIMD loop, defer adding the
4678 initialization of the reference, because if we decide
4679 to use SIMD array for it, the initilization could cause
4680 expansion ICE. Ditto for other privatization clauses. */
4681 if (is_simd)
4682 x = NULL_TREE;
4683 else
4684 {
4685 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4686 get_name (var));
4687 gimple_add_tmp_var (x);
4688 TREE_ADDRESSABLE (x) = 1;
4689 x = build_fold_addr_expr_loc (clause_loc, x);
4690 }
4691 }
4692 else
4693 {
4694 tree atmp
4695 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4696 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4697 tree al = size_int (TYPE_ALIGN (rtype));
4698 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4699 }
4700
4701 if (x)
4702 {
4703 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4704 gimplify_assign (new_var, x, ilist);
4705 }
4706
4707 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4708 }
4709 else if ((c_kind == OMP_CLAUSE_REDUCTION
4710 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4711 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4712 {
4713 if (pass == 0)
4714 continue;
4715 }
4716 else if (pass != 0)
4717 continue;
4718
4719 switch (OMP_CLAUSE_CODE (c))
4720 {
4721 case OMP_CLAUSE_SHARED:
4722 /* Ignore shared directives in teams construct inside
4723 target construct. */
4724 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4725 && !is_host_teams_ctx (ctx))
4726 continue;
4727 /* Shared global vars are just accessed directly. */
4728 if (is_global_var (new_var))
4729 break;
4730 /* For taskloop firstprivate/lastprivate, represented
4731 as firstprivate and shared clause on the task, new_var
4732 is the firstprivate var. */
4733 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4734 break;
4735 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4736 needs to be delayed until after fixup_child_record_type so
4737 that we get the correct type during the dereference. */
4738 by_ref = use_pointer_for_field (var, ctx);
4739 x = build_receiver_ref (var, by_ref, ctx);
4740 SET_DECL_VALUE_EXPR (new_var, x);
4741 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4742
4743 /* ??? If VAR is not passed by reference, and the variable
4744 hasn't been initialized yet, then we'll get a warning for
4745 the store into the omp_data_s structure. Ideally, we'd be
4746 able to notice this and not store anything at all, but
4747 we're generating code too early. Suppress the warning. */
4748 if (!by_ref)
4749 TREE_NO_WARNING (var) = 1;
4750 break;
4751
4752 case OMP_CLAUSE__CONDTEMP_:
4753 if (is_parallel_ctx (ctx))
4754 {
4755 x = build_receiver_ref (var, false, ctx);
4756 SET_DECL_VALUE_EXPR (new_var, x);
4757 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4758 }
4759 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
4760 {
4761 x = build_zero_cst (TREE_TYPE (var));
4762 goto do_private;
4763 }
4764 break;
4765
4766 case OMP_CLAUSE_LASTPRIVATE:
4767 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4768 break;
4769 /* FALLTHRU */
4770
4771 case OMP_CLAUSE_PRIVATE:
4772 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4773 x = build_outer_var_ref (var, ctx);
4774 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4775 {
4776 if (is_task_ctx (ctx))
4777 x = build_receiver_ref (var, false, ctx);
4778 else
4779 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4780 }
4781 else
4782 x = NULL;
4783 do_private:
4784 tree nx;
4785 nx = lang_hooks.decls.omp_clause_default_ctor
4786 (c, unshare_expr (new_var), x);
4787 if (is_simd)
4788 {
4789 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4790 if ((TREE_ADDRESSABLE (new_var) || nx || y
4791 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4792 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
4793 || omp_is_reference (var))
4794 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4795 ivar, lvar))
4796 {
4797 if (omp_is_reference (var))
4798 {
4799 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4800 tree new_vard = TREE_OPERAND (new_var, 0);
4801 gcc_assert (DECL_P (new_vard));
4802 SET_DECL_VALUE_EXPR (new_vard,
4803 build_fold_addr_expr (lvar));
4804 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4805 }
4806
4807 if (nx)
4808 x = lang_hooks.decls.omp_clause_default_ctor
4809 (c, unshare_expr (ivar), x);
4810 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
4811 {
4812 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
4813 unshare_expr (ivar), x);
4814 nx = x;
4815 }
4816 if (nx && x)
4817 gimplify_and_add (x, &llist[0]);
4818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4819 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
4820 {
4821 tree v = new_var;
4822 if (!DECL_P (v))
4823 {
4824 gcc_assert (TREE_CODE (v) == MEM_REF);
4825 v = TREE_OPERAND (v, 0);
4826 gcc_assert (DECL_P (v));
4827 }
4828 v = *ctx->lastprivate_conditional_map->get (v);
4829 tree t = create_tmp_var (TREE_TYPE (v));
4830 tree z = build_zero_cst (TREE_TYPE (v));
4831 tree orig_v
4832 = build_outer_var_ref (var, ctx,
4833 OMP_CLAUSE_LASTPRIVATE);
4834 gimple_seq_add_stmt (dlist,
4835 gimple_build_assign (t, z));
4836 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
4837 tree civar = DECL_VALUE_EXPR (v);
4838 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
4839 civar = unshare_expr (civar);
4840 TREE_OPERAND (civar, 1) = sctx.idx;
4841 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
4842 unshare_expr (civar));
4843 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
4844 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
4845 orig_v, unshare_expr (ivar)));
4846 tree cond = build2 (LT_EXPR, boolean_type_node, t,
4847 civar);
4848 x = build3 (COND_EXPR, void_type_node, cond, x,
4849 void_node);
4850 gimple_seq tseq = NULL;
4851 gimplify_and_add (x, &tseq);
4852 if (ctx->outer)
4853 lower_omp (&tseq, ctx->outer);
4854 gimple_seq_add_seq (&llist[1], tseq);
4855 }
4856 if (y)
4857 {
4858 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4859 if (y)
4860 {
4861 gimple_seq tseq = NULL;
4862
4863 dtor = y;
4864 gimplify_stmt (&dtor, &tseq);
4865 gimple_seq_add_seq (&llist[1], tseq);
4866 }
4867 }
4868 break;
4869 }
4870 if (omp_is_reference (var))
4871 {
4872 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4873 tree new_vard = TREE_OPERAND (new_var, 0);
4874 gcc_assert (DECL_P (new_vard));
4875 tree type = TREE_TYPE (TREE_TYPE (new_vard));
4876 x = TYPE_SIZE_UNIT (type);
4877 if (TREE_CONSTANT (x))
4878 {
4879 x = create_tmp_var_raw (type, get_name (var));
4880 gimple_add_tmp_var (x);
4881 TREE_ADDRESSABLE (x) = 1;
4882 x = build_fold_addr_expr_loc (clause_loc, x);
4883 x = fold_convert_loc (clause_loc,
4884 TREE_TYPE (new_vard), x);
4885 gimplify_assign (new_vard, x, ilist);
4886 }
4887 }
4888 }
4889 if (nx)
4890 gimplify_and_add (nx, ilist);
4891 /* FALLTHRU */
4892
4893 do_dtor:
4894 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4895 if (x)
4896 {
4897 gimple_seq tseq = NULL;
4898
4899 dtor = x;
4900 gimplify_stmt (&dtor, &tseq);
4901 gimple_seq_add_seq (dlist, tseq);
4902 }
4903 break;
4904
4905 case OMP_CLAUSE_LINEAR:
4906 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4907 goto do_firstprivate;
4908 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4909 x = NULL;
4910 else
4911 x = build_outer_var_ref (var, ctx);
4912 goto do_private;
4913
4914 case OMP_CLAUSE_FIRSTPRIVATE:
4915 if (is_task_ctx (ctx))
4916 {
4917 if ((omp_is_reference (var)
4918 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4919 || is_variable_sized (var))
4920 goto do_dtor;
4921 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4922 ctx))
4923 || use_pointer_for_field (var, NULL))
4924 {
4925 x = build_receiver_ref (var, false, ctx);
4926 SET_DECL_VALUE_EXPR (new_var, x);
4927 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4928 goto do_dtor;
4929 }
4930 }
4931 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4932 && omp_is_reference (var))
4933 {
4934 x = build_outer_var_ref (var, ctx);
4935 gcc_assert (TREE_CODE (x) == MEM_REF
4936 && integer_zerop (TREE_OPERAND (x, 1)));
4937 x = TREE_OPERAND (x, 0);
4938 x = lang_hooks.decls.omp_clause_copy_ctor
4939 (c, unshare_expr (new_var), x);
4940 gimplify_and_add (x, ilist);
4941 goto do_dtor;
4942 }
4943 do_firstprivate:
4944 x = build_outer_var_ref (var, ctx);
4945 if (is_simd)
4946 {
4947 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4948 && gimple_omp_for_combined_into_p (ctx->stmt))
4949 {
4950 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4951 tree stept = TREE_TYPE (t);
4952 tree ct = omp_find_clause (clauses,
4953 OMP_CLAUSE__LOOPTEMP_);
4954 gcc_assert (ct);
4955 tree l = OMP_CLAUSE_DECL (ct);
4956 tree n1 = fd->loop.n1;
4957 tree step = fd->loop.step;
4958 tree itype = TREE_TYPE (l);
4959 if (POINTER_TYPE_P (itype))
4960 itype = signed_type_for (itype);
4961 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4962 if (TYPE_UNSIGNED (itype)
4963 && fd->loop.cond_code == GT_EXPR)
4964 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4965 fold_build1 (NEGATE_EXPR, itype, l),
4966 fold_build1 (NEGATE_EXPR,
4967 itype, step));
4968 else
4969 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4970 t = fold_build2 (MULT_EXPR, stept,
4971 fold_convert (stept, l), t);
4972
4973 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4974 {
4975 if (omp_is_reference (var))
4976 {
4977 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4978 tree new_vard = TREE_OPERAND (new_var, 0);
4979 gcc_assert (DECL_P (new_vard));
4980 tree type = TREE_TYPE (TREE_TYPE (new_vard));
4981 nx = TYPE_SIZE_UNIT (type);
4982 if (TREE_CONSTANT (nx))
4983 {
4984 nx = create_tmp_var_raw (type,
4985 get_name (var));
4986 gimple_add_tmp_var (nx);
4987 TREE_ADDRESSABLE (nx) = 1;
4988 nx = build_fold_addr_expr_loc (clause_loc,
4989 nx);
4990 nx = fold_convert_loc (clause_loc,
4991 TREE_TYPE (new_vard),
4992 nx);
4993 gimplify_assign (new_vard, nx, ilist);
4994 }
4995 }
4996
4997 x = lang_hooks.decls.omp_clause_linear_ctor
4998 (c, new_var, x, t);
4999 gimplify_and_add (x, ilist);
5000 goto do_dtor;
5001 }
5002
5003 if (POINTER_TYPE_P (TREE_TYPE (x)))
5004 x = fold_build2 (POINTER_PLUS_EXPR,
5005 TREE_TYPE (x), x, t);
5006 else
5007 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5008 }
5009
5010 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5011 || TREE_ADDRESSABLE (new_var)
5012 || omp_is_reference (var))
5013 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5014 ivar, lvar))
5015 {
5016 if (omp_is_reference (var))
5017 {
5018 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5019 tree new_vard = TREE_OPERAND (new_var, 0);
5020 gcc_assert (DECL_P (new_vard));
5021 SET_DECL_VALUE_EXPR (new_vard,
5022 build_fold_addr_expr (lvar));
5023 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5024 }
5025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5026 {
5027 tree iv = create_tmp_var (TREE_TYPE (new_var));
5028 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5029 gimplify_and_add (x, ilist);
5030 gimple_stmt_iterator gsi
5031 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5032 gassign *g
5033 = gimple_build_assign (unshare_expr (lvar), iv);
5034 gsi_insert_before_without_update (&gsi, g,
5035 GSI_SAME_STMT);
5036 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5037 enum tree_code code = PLUS_EXPR;
5038 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5039 code = POINTER_PLUS_EXPR;
5040 g = gimple_build_assign (iv, code, iv, t);
5041 gsi_insert_before_without_update (&gsi, g,
5042 GSI_SAME_STMT);
5043 break;
5044 }
5045 x = lang_hooks.decls.omp_clause_copy_ctor
5046 (c, unshare_expr (ivar), x);
5047 gimplify_and_add (x, &llist[0]);
5048 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5049 if (x)
5050 {
5051 gimple_seq tseq = NULL;
5052
5053 dtor = x;
5054 gimplify_stmt (&dtor, &tseq);
5055 gimple_seq_add_seq (&llist[1], tseq);
5056 }
5057 break;
5058 }
5059 if (omp_is_reference (var))
5060 {
5061 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5062 tree new_vard = TREE_OPERAND (new_var, 0);
5063 gcc_assert (DECL_P (new_vard));
5064 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5065 nx = TYPE_SIZE_UNIT (type);
5066 if (TREE_CONSTANT (nx))
5067 {
5068 nx = create_tmp_var_raw (type, get_name (var));
5069 gimple_add_tmp_var (nx);
5070 TREE_ADDRESSABLE (nx) = 1;
5071 nx = build_fold_addr_expr_loc (clause_loc, nx);
5072 nx = fold_convert_loc (clause_loc,
5073 TREE_TYPE (new_vard), nx);
5074 gimplify_assign (new_vard, nx, ilist);
5075 }
5076 }
5077 }
5078 x = lang_hooks.decls.omp_clause_copy_ctor
5079 (c, unshare_expr (new_var), x);
5080 gimplify_and_add (x, ilist);
5081 goto do_dtor;
5082
5083 case OMP_CLAUSE__LOOPTEMP_:
5084 case OMP_CLAUSE__REDUCTEMP_:
5085 gcc_assert (is_taskreg_ctx (ctx));
5086 x = build_outer_var_ref (var, ctx);
5087 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5088 gimplify_and_add (x, ilist);
5089 break;
5090
5091 case OMP_CLAUSE_COPYIN:
5092 by_ref = use_pointer_for_field (var, NULL);
5093 x = build_receiver_ref (var, by_ref, ctx);
5094 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5095 append_to_statement_list (x, &copyin_seq);
5096 copyin_by_ref |= by_ref;
5097 break;
5098
5099 case OMP_CLAUSE_REDUCTION:
5100 case OMP_CLAUSE_IN_REDUCTION:
5101 /* OpenACC reductions are initialized using the
5102 GOACC_REDUCTION internal function. */
5103 if (is_gimple_omp_oacc (ctx->stmt))
5104 break;
5105 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5106 {
5107 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5108 gimple *tseq;
5109 tree ptype = TREE_TYPE (placeholder);
5110 if (cond)
5111 {
5112 x = error_mark_node;
5113 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5114 && !task_reduction_needs_orig_p)
5115 x = var;
5116 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5117 {
5118 tree pptype = build_pointer_type (ptype);
5119 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5120 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5121 size_int (task_reduction_cnt_full
5122 + task_reduction_cntorig - 1),
5123 NULL_TREE, NULL_TREE);
5124 else
5125 {
5126 unsigned int idx
5127 = *ctx->task_reduction_map->get (c);
5128 x = task_reduction_read (ilist, tskred_temp,
5129 pptype, 7 + 3 * idx);
5130 }
5131 x = fold_convert (pptype, x);
5132 x = build_simple_mem_ref (x);
5133 }
5134 }
5135 else
5136 {
5137 x = build_outer_var_ref (var, ctx);
5138
5139 if (omp_is_reference (var)
5140 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5141 x = build_fold_addr_expr_loc (clause_loc, x);
5142 }
5143 SET_DECL_VALUE_EXPR (placeholder, x);
5144 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5145 tree new_vard = new_var;
5146 if (omp_is_reference (var))
5147 {
5148 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5149 new_vard = TREE_OPERAND (new_var, 0);
5150 gcc_assert (DECL_P (new_vard));
5151 }
5152 if (is_simd
5153 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5154 ivar, lvar))
5155 {
5156 if (new_vard == new_var)
5157 {
5158 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5159 SET_DECL_VALUE_EXPR (new_var, ivar);
5160 }
5161 else
5162 {
5163 SET_DECL_VALUE_EXPR (new_vard,
5164 build_fold_addr_expr (ivar));
5165 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5166 }
5167 x = lang_hooks.decls.omp_clause_default_ctor
5168 (c, unshare_expr (ivar),
5169 build_outer_var_ref (var, ctx));
5170 if (x)
5171 gimplify_and_add (x, &llist[0]);
5172 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5173 {
5174 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5175 lower_omp (&tseq, ctx);
5176 gimple_seq_add_seq (&llist[0], tseq);
5177 }
5178 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5179 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5180 lower_omp (&tseq, ctx);
5181 gimple_seq_add_seq (&llist[1], tseq);
5182 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5183 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5184 if (new_vard == new_var)
5185 SET_DECL_VALUE_EXPR (new_var, lvar);
5186 else
5187 SET_DECL_VALUE_EXPR (new_vard,
5188 build_fold_addr_expr (lvar));
5189 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5190 if (x)
5191 {
5192 tseq = NULL;
5193 dtor = x;
5194 gimplify_stmt (&dtor, &tseq);
5195 gimple_seq_add_seq (&llist[1], tseq);
5196 }
5197 break;
5198 }
5199 /* If this is a reference to constant size reduction var
5200 with placeholder, we haven't emitted the initializer
5201 for it because it is undesirable if SIMD arrays are used.
5202 But if they aren't used, we need to emit the deferred
5203 initialization now. */
5204 else if (omp_is_reference (var) && is_simd)
5205 handle_simd_reference (clause_loc, new_vard, ilist);
5206
5207 tree lab2 = NULL_TREE;
5208 if (cond)
5209 {
5210 gimple *g;
5211 if (!is_parallel_ctx (ctx))
5212 {
5213 tree condv = create_tmp_var (boolean_type_node);
5214 tree m = build_simple_mem_ref (cond);
5215 g = gimple_build_assign (condv, m);
5216 gimple_seq_add_stmt (ilist, g);
5217 tree lab1
5218 = create_artificial_label (UNKNOWN_LOCATION);
5219 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5220 g = gimple_build_cond (NE_EXPR, condv,
5221 boolean_false_node,
5222 lab2, lab1);
5223 gimple_seq_add_stmt (ilist, g);
5224 gimple_seq_add_stmt (ilist,
5225 gimple_build_label (lab1));
5226 }
5227 g = gimple_build_assign (build_simple_mem_ref (cond),
5228 boolean_true_node);
5229 gimple_seq_add_stmt (ilist, g);
5230 }
5231 x = lang_hooks.decls.omp_clause_default_ctor
5232 (c, unshare_expr (new_var),
5233 cond ? NULL_TREE
5234 : build_outer_var_ref (var, ctx));
5235 if (x)
5236 gimplify_and_add (x, ilist);
5237 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5238 {
5239 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5240 lower_omp (&tseq, ctx);
5241 gimple_seq_add_seq (ilist, tseq);
5242 }
5243 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5244 if (is_simd)
5245 {
5246 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5247 lower_omp (&tseq, ctx);
5248 gimple_seq_add_seq (dlist, tseq);
5249 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5250 }
5251 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5252 if (cond)
5253 {
5254 if (lab2)
5255 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5256 break;
5257 }
5258 goto do_dtor;
5259 }
5260 else
5261 {
5262 x = omp_reduction_init (c, TREE_TYPE (new_var));
5263 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5264 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5265
5266 if (cond)
5267 {
5268 gimple *g;
5269 tree lab2 = NULL_TREE;
5270 /* GOMP_taskgroup_reduction_register memsets the whole
5271 array to zero. If the initializer is zero, we don't
5272 need to initialize it again, just mark it as ever
5273 used unconditionally, i.e. cond = true. */
5274 if (initializer_zerop (x))
5275 {
5276 g = gimple_build_assign (build_simple_mem_ref (cond),
5277 boolean_true_node);
5278 gimple_seq_add_stmt (ilist, g);
5279 break;
5280 }
5281
5282 /* Otherwise, emit
5283 if (!cond) { cond = true; new_var = x; } */
5284 if (!is_parallel_ctx (ctx))
5285 {
5286 tree condv = create_tmp_var (boolean_type_node);
5287 tree m = build_simple_mem_ref (cond);
5288 g = gimple_build_assign (condv, m);
5289 gimple_seq_add_stmt (ilist, g);
5290 tree lab1
5291 = create_artificial_label (UNKNOWN_LOCATION);
5292 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5293 g = gimple_build_cond (NE_EXPR, condv,
5294 boolean_false_node,
5295 lab2, lab1);
5296 gimple_seq_add_stmt (ilist, g);
5297 gimple_seq_add_stmt (ilist,
5298 gimple_build_label (lab1));
5299 }
5300 g = gimple_build_assign (build_simple_mem_ref (cond),
5301 boolean_true_node);
5302 gimple_seq_add_stmt (ilist, g);
5303 gimplify_assign (new_var, x, ilist);
5304 if (lab2)
5305 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5306 break;
5307 }
5308
5309 /* reduction(-:var) sums up the partial results, so it
5310 acts identically to reduction(+:var). */
5311 if (code == MINUS_EXPR)
5312 code = PLUS_EXPR;
5313
5314 tree new_vard = new_var;
5315 if (is_simd && omp_is_reference (var))
5316 {
5317 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5318 new_vard = TREE_OPERAND (new_var, 0);
5319 gcc_assert (DECL_P (new_vard));
5320 }
5321 if (is_simd
5322 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5323 ivar, lvar))
5324 {
5325 tree ref = build_outer_var_ref (var, ctx);
5326
5327 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5328
5329 if (sctx.is_simt)
5330 {
5331 if (!simt_lane)
5332 simt_lane = create_tmp_var (unsigned_type_node);
5333 x = build_call_expr_internal_loc
5334 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5335 TREE_TYPE (ivar), 2, ivar, simt_lane);
5336 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5337 gimplify_assign (ivar, x, &llist[2]);
5338 }
5339 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5340 ref = build_outer_var_ref (var, ctx);
5341 gimplify_assign (ref, x, &llist[1]);
5342
5343 if (new_vard != new_var)
5344 {
5345 SET_DECL_VALUE_EXPR (new_vard,
5346 build_fold_addr_expr (lvar));
5347 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5348 }
5349 }
5350 else
5351 {
5352 if (omp_is_reference (var) && is_simd)
5353 handle_simd_reference (clause_loc, new_vard, ilist);
5354 gimplify_assign (new_var, x, ilist);
5355 if (is_simd)
5356 {
5357 tree ref = build_outer_var_ref (var, ctx);
5358
5359 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5360 ref = build_outer_var_ref (var, ctx);
5361 gimplify_assign (ref, x, dlist);
5362 }
5363 }
5364 }
5365 break;
5366
5367 default:
5368 gcc_unreachable ();
5369 }
5370 }
5371 }
5372 if (tskred_avar)
5373 {
5374 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5375 TREE_THIS_VOLATILE (clobber) = 1;
5376 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5377 }
5378
5379 if (known_eq (sctx.max_vf, 1U))
5380 {
5381 sctx.is_simt = false;
5382 if (ctx->lastprivate_conditional_map)
5383 {
5384 if (gimple_omp_for_combined_into_p (ctx->stmt))
5385 {
5386 /* Signal to lower_omp_1 that it should use parent context. */
5387 ctx->combined_into_simd_safelen0 = true;
5388 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5389 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5390 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5391 {
5392 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5393 tree *v
5394 = ctx->lastprivate_conditional_map->get (o);
5395 tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx->outer);
5396 tree *pv
5397 = ctx->outer->lastprivate_conditional_map->get (po);
5398 *v = *pv;
5399 }
5400 }
5401 else
5402 {
5403 /* When not vectorized, treat lastprivate(conditional:) like
5404 normal lastprivate, as there will be just one simd lane
5405 writing the privatized variable. */
5406 delete ctx->lastprivate_conditional_map;
5407 ctx->lastprivate_conditional_map = NULL;
5408 }
5409 }
5410 }
5411
5412 if (nonconst_simd_if)
5413 {
5414 if (sctx.lane == NULL_TREE)
5415 {
5416 sctx.idx = create_tmp_var (unsigned_type_node);
5417 sctx.lane = create_tmp_var (unsigned_type_node);
5418 }
5419 /* FIXME: For now. */
5420 sctx.is_simt = false;
5421 }
5422
5423 if (sctx.lane || sctx.is_simt)
5424 {
5425 uid = create_tmp_var (ptr_type_node, "simduid");
5426 /* Don't want uninit warnings on simduid, it is always uninitialized,
5427 but we use it not for the value, but for the DECL_UID only. */
5428 TREE_NO_WARNING (uid) = 1;
5429 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5430 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5431 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5432 gimple_omp_for_set_clauses (ctx->stmt, c);
5433 }
5434 /* Emit calls denoting privatized variables and initializing a pointer to
5435 structure that holds private variables as fields after ompdevlow pass. */
5436 if (sctx.is_simt)
5437 {
5438 sctx.simt_eargs[0] = uid;
5439 gimple *g
5440 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5441 gimple_call_set_lhs (g, uid);
5442 gimple_seq_add_stmt (ilist, g);
5443 sctx.simt_eargs.release ();
5444
5445 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5446 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5447 gimple_call_set_lhs (g, simtrec);
5448 gimple_seq_add_stmt (ilist, g);
5449 }
5450 if (sctx.lane)
5451 {
5452 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5453 1 + (nonconst_simd_if != NULL),
5454 uid, nonconst_simd_if);
5455 gimple_call_set_lhs (g, sctx.lane);
5456 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5457 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5458 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5459 build_int_cst (unsigned_type_node, 0));
5460 gimple_seq_add_stmt (ilist, g);
5461 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5462 if (llist[2])
5463 {
5464 tree simt_vf = create_tmp_var (unsigned_type_node);
5465 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5466 gimple_call_set_lhs (g, simt_vf);
5467 gimple_seq_add_stmt (dlist, g);
5468
5469 tree t = build_int_cst (unsigned_type_node, 1);
5470 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5471 gimple_seq_add_stmt (dlist, g);
5472
5473 t = build_int_cst (unsigned_type_node, 0);
5474 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5475 gimple_seq_add_stmt (dlist, g);
5476
5477 tree body = create_artificial_label (UNKNOWN_LOCATION);
5478 tree header = create_artificial_label (UNKNOWN_LOCATION);
5479 tree end = create_artificial_label (UNKNOWN_LOCATION);
5480 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5481 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5482
5483 gimple_seq_add_seq (dlist, llist[2]);
5484
5485 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5486 gimple_seq_add_stmt (dlist, g);
5487
5488 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5489 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5490 gimple_seq_add_stmt (dlist, g);
5491
5492 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5493 }
5494 for (int i = 0; i < 2; i++)
5495 if (llist[i])
5496 {
5497 tree vf = create_tmp_var (unsigned_type_node);
5498 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5499 gimple_call_set_lhs (g, vf);
5500 gimple_seq *seq = i == 0 ? ilist : dlist;
5501 gimple_seq_add_stmt (seq, g);
5502 tree t = build_int_cst (unsigned_type_node, 0);
5503 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5504 gimple_seq_add_stmt (seq, g);
5505 tree body = create_artificial_label (UNKNOWN_LOCATION);
5506 tree header = create_artificial_label (UNKNOWN_LOCATION);
5507 tree end = create_artificial_label (UNKNOWN_LOCATION);
5508 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5509 gimple_seq_add_stmt (seq, gimple_build_label (body));
5510 gimple_seq_add_seq (seq, llist[i]);
5511 t = build_int_cst (unsigned_type_node, 1);
5512 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5513 gimple_seq_add_stmt (seq, g);
5514 gimple_seq_add_stmt (seq, gimple_build_label (header));
5515 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5516 gimple_seq_add_stmt (seq, g);
5517 gimple_seq_add_stmt (seq, gimple_build_label (end));
5518 }
5519 }
5520 if (sctx.is_simt)
5521 {
5522 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5523 gimple *g
5524 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5525 gimple_seq_add_stmt (dlist, g);
5526 }
5527
5528 /* The copyin sequence is not to be executed by the main thread, since
5529 that would result in self-copies. Perhaps not visible to scalars,
5530 but it certainly is to C++ operator=. */
5531 if (copyin_seq)
5532 {
5533 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5534 0);
5535 x = build2 (NE_EXPR, boolean_type_node, x,
5536 build_int_cst (TREE_TYPE (x), 0));
5537 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5538 gimplify_and_add (x, ilist);
5539 }
5540
5541 /* If any copyin variable is passed by reference, we must ensure the
5542 master thread doesn't modify it before it is copied over in all
5543 threads. Similarly for variables in both firstprivate and
5544 lastprivate clauses we need to ensure the lastprivate copying
5545 happens after firstprivate copying in all threads. And similarly
5546 for UDRs if initializer expression refers to omp_orig. */
5547 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
5548 {
5549 /* Don't add any barrier for #pragma omp simd or
5550 #pragma omp distribute. */
5551 if (!is_task_ctx (ctx)
5552 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5553 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5554 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5555 }
5556
5557 /* If max_vf is non-zero, then we can use only a vectorization factor
5558 up to the max_vf we chose. So stick it into the safelen clause. */
5559 if (maybe_ne (sctx.max_vf, 0U))
5560 {
5561 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5562 OMP_CLAUSE_SAFELEN);
5563 poly_uint64 safe_len;
5564 if (c == NULL_TREE
5565 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5566 && maybe_gt (safe_len, sctx.max_vf)))
5567 {
5568 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5569 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5570 sctx.max_vf);
5571 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5572 gimple_omp_for_set_clauses (ctx->stmt, c);
5573 }
5574 }
5575 }
5576
5577 /* Create temporary variables for lastprivate(conditional:) implementation
5578 in context CTX with CLAUSES. */
5579
5580 static void
5581 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
5582 {
5583 tree iter_type = NULL_TREE;
5584 tree cond_ptr = NULL_TREE;
5585 tree iter_var = NULL_TREE;
5586 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5587 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
5588 tree next = *clauses;
5589 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
5590 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5591 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5592 {
5593 if (is_simd)
5594 {
5595 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
5596 gcc_assert (cc);
5597 if (iter_type == NULL_TREE)
5598 {
5599 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
5600 iter_var = create_tmp_var_raw (iter_type);
5601 DECL_CONTEXT (iter_var) = current_function_decl;
5602 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5603 DECL_CHAIN (iter_var) = ctx->block_vars;
5604 ctx->block_vars = iter_var;
5605 tree c3
5606 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5607 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5608 OMP_CLAUSE_DECL (c3) = iter_var;
5609 OMP_CLAUSE_CHAIN (c3) = *clauses;
5610 *clauses = c3;
5611 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5612 }
5613 next = OMP_CLAUSE_CHAIN (cc);
5614 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5615 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
5616 ctx->lastprivate_conditional_map->put (o, v);
5617 continue;
5618 }
5619 if (iter_type == NULL)
5620 {
5621 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
5622 {
5623 struct omp_for_data fd;
5624 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
5625 NULL);
5626 iter_type = unsigned_type_for (fd.iter_type);
5627 }
5628 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
5629 iter_type = unsigned_type_node;
5630 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
5631 if (c2)
5632 {
5633 cond_ptr
5634 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
5635 OMP_CLAUSE_DECL (c2) = cond_ptr;
5636 }
5637 else
5638 {
5639 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
5640 DECL_CONTEXT (cond_ptr) = current_function_decl;
5641 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
5642 DECL_CHAIN (cond_ptr) = ctx->block_vars;
5643 ctx->block_vars = cond_ptr;
5644 c2 = build_omp_clause (UNKNOWN_LOCATION,
5645 OMP_CLAUSE__CONDTEMP_);
5646 OMP_CLAUSE_DECL (c2) = cond_ptr;
5647 OMP_CLAUSE_CHAIN (c2) = *clauses;
5648 *clauses = c2;
5649 }
5650 iter_var = create_tmp_var_raw (iter_type);
5651 DECL_CONTEXT (iter_var) = current_function_decl;
5652 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5653 DECL_CHAIN (iter_var) = ctx->block_vars;
5654 ctx->block_vars = iter_var;
5655 tree c3
5656 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5657 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5658 OMP_CLAUSE_DECL (c3) = iter_var;
5659 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
5660 OMP_CLAUSE_CHAIN (c2) = c3;
5661 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5662 }
5663 tree v = create_tmp_var_raw (iter_type);
5664 DECL_CONTEXT (v) = current_function_decl;
5665 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
5666 DECL_CHAIN (v) = ctx->block_vars;
5667 ctx->block_vars = v;
5668 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5669 ctx->lastprivate_conditional_map->put (o, v);
5670 }
5671 }
5672
5673
5674 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5675 both parallel and workshare constructs. PREDICATE may be NULL if it's
5676 always true. BODY_P is the sequence to insert early initialization
5677 if needed, STMT_LIST is where the non-conditional lastprivate handling
5678 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5679 section. */
5680
5681 static void
5682 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
5683 gimple_seq *stmt_list, gimple_seq *cstmt_list,
5684 omp_context *ctx)
5685 {
5686 tree x, c, label = NULL, orig_clauses = clauses;
5687 bool par_clauses = false;
5688 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5689 unsigned HOST_WIDE_INT conditional_off = 0;
5690
5691 /* Early exit if there are no lastprivate or linear clauses. */
5692 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5693 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5694 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5695 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5696 break;
5697 if (clauses == NULL)
5698 {
5699 /* If this was a workshare clause, see if it had been combined
5700 with its parallel. In that case, look for the clauses on the
5701 parallel statement itself. */
5702 if (is_parallel_ctx (ctx))
5703 return;
5704
5705 ctx = ctx->outer;
5706 if (ctx == NULL || !is_parallel_ctx (ctx))
5707 return;
5708
5709 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5710 OMP_CLAUSE_LASTPRIVATE);
5711 if (clauses == NULL)
5712 return;
5713 par_clauses = true;
5714 }
5715
5716 bool maybe_simt = false;
5717 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5718 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5719 {
5720 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5721 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5722 if (simduid)
5723 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5724 }
5725
5726 if (predicate)
5727 {
5728 gcond *stmt;
5729 tree label_true, arm1, arm2;
5730 enum tree_code pred_code = TREE_CODE (predicate);
5731
5732 label = create_artificial_label (UNKNOWN_LOCATION);
5733 label_true = create_artificial_label (UNKNOWN_LOCATION);
5734 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5735 {
5736 arm1 = TREE_OPERAND (predicate, 0);
5737 arm2 = TREE_OPERAND (predicate, 1);
5738 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5739 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5740 }
5741 else
5742 {
5743 arm1 = predicate;
5744 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5745 arm2 = boolean_false_node;
5746 pred_code = NE_EXPR;
5747 }
5748 if (maybe_simt)
5749 {
5750 c = build2 (pred_code, boolean_type_node, arm1, arm2);
5751 c = fold_convert (integer_type_node, c);
5752 simtcond = create_tmp_var (integer_type_node);
5753 gimplify_assign (simtcond, c, stmt_list);
5754 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5755 1, simtcond);
5756 c = create_tmp_var (integer_type_node);
5757 gimple_call_set_lhs (g, c);
5758 gimple_seq_add_stmt (stmt_list, g);
5759 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5760 label_true, label);
5761 }
5762 else
5763 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5764 gimple_seq_add_stmt (stmt_list, stmt);
5765 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5766 }
5767
5768 tree cond_ptr = NULL_TREE;
5769 for (c = clauses; c ;)
5770 {
5771 tree var, new_var;
5772 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5773 gimple_seq *this_stmt_list = stmt_list;
5774 tree lab2 = NULL_TREE;
5775
5776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5777 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
5778 && ctx->lastprivate_conditional_map
5779 && !ctx->combined_into_simd_safelen0)
5780 {
5781 gcc_assert (body_p);
5782 if (simduid)
5783 goto next;
5784 if (cond_ptr == NULL_TREE)
5785 {
5786 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
5787 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
5788 }
5789 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
5790 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5791 tree v = *ctx->lastprivate_conditional_map->get (o);
5792 gimplify_assign (v, build_zero_cst (type), body_p);
5793 this_stmt_list = cstmt_list;
5794 tree mem;
5795 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
5796 {
5797 mem = build2 (MEM_REF, type, cond_ptr,
5798 build_int_cst (TREE_TYPE (cond_ptr),
5799 conditional_off));
5800 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
5801 }
5802 else
5803 mem = build4 (ARRAY_REF, type, cond_ptr,
5804 size_int (conditional_off++), NULL_TREE, NULL_TREE);
5805 tree mem2 = copy_node (mem);
5806 gimple_seq seq = NULL;
5807 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
5808 gimple_seq_add_seq (this_stmt_list, seq);
5809 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5810 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5811 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
5812 gimple_seq_add_stmt (this_stmt_list, g);
5813 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
5814 gimplify_assign (mem2, v, this_stmt_list);
5815 }
5816
5817 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5818 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5819 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
5820 {
5821 var = OMP_CLAUSE_DECL (c);
5822 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5823 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5824 && is_taskloop_ctx (ctx))
5825 {
5826 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
5827 new_var = lookup_decl (var, ctx->outer);
5828 }
5829 else
5830 {
5831 new_var = lookup_decl (var, ctx);
5832 /* Avoid uninitialized warnings for lastprivate and
5833 for linear iterators. */
5834 if (predicate
5835 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5836 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
5837 TREE_NO_WARNING (new_var) = 1;
5838 }
5839
5840 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
5841 {
5842 tree val = DECL_VALUE_EXPR (new_var);
5843 if (TREE_CODE (val) == ARRAY_REF
5844 && VAR_P (TREE_OPERAND (val, 0))
5845 && lookup_attribute ("omp simd array",
5846 DECL_ATTRIBUTES (TREE_OPERAND (val,
5847 0))))
5848 {
5849 if (lastlane == NULL)
5850 {
5851 lastlane = create_tmp_var (unsigned_type_node);
5852 gcall *g
5853 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5854 2, simduid,
5855 TREE_OPERAND (val, 1));
5856 gimple_call_set_lhs (g, lastlane);
5857 gimple_seq_add_stmt (this_stmt_list, g);
5858 }
5859 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
5860 TREE_OPERAND (val, 0), lastlane,
5861 NULL_TREE, NULL_TREE);
5862 }
5863 }
5864 else if (maybe_simt)
5865 {
5866 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
5867 ? DECL_VALUE_EXPR (new_var)
5868 : new_var);
5869 if (simtlast == NULL)
5870 {
5871 simtlast = create_tmp_var (unsigned_type_node);
5872 gcall *g = gimple_build_call_internal
5873 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
5874 gimple_call_set_lhs (g, simtlast);
5875 gimple_seq_add_stmt (this_stmt_list, g);
5876 }
5877 x = build_call_expr_internal_loc
5878 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
5879 TREE_TYPE (val), 2, val, simtlast);
5880 new_var = unshare_expr (new_var);
5881 gimplify_assign (new_var, x, this_stmt_list);
5882 new_var = unshare_expr (new_var);
5883 }
5884
5885 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5886 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
5887 {
5888 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
5889 gimple_seq_add_seq (this_stmt_list,
5890 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5891 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
5892 }
5893 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5894 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
5895 {
5896 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
5897 gimple_seq_add_seq (this_stmt_list,
5898 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
5899 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
5900 }
5901
5902 x = NULL_TREE;
5903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5904 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
5905 {
5906 gcc_checking_assert (is_taskloop_ctx (ctx));
5907 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
5908 ctx->outer->outer);
5909 if (is_global_var (ovar))
5910 x = ovar;
5911 }
5912 if (!x)
5913 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
5914 if (omp_is_reference (var))
5915 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5916 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
5917 gimplify_and_add (x, this_stmt_list);
5918
5919 if (lab2)
5920 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
5921 }
5922
5923 next:
5924 c = OMP_CLAUSE_CHAIN (c);
5925 if (c == NULL && !par_clauses)
5926 {
5927 /* If this was a workshare clause, see if it had been combined
5928 with its parallel. In that case, continue looking for the
5929 clauses also on the parallel statement itself. */
5930 if (is_parallel_ctx (ctx))
5931 break;
5932
5933 ctx = ctx->outer;
5934 if (ctx == NULL || !is_parallel_ctx (ctx))
5935 break;
5936
5937 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5938 OMP_CLAUSE_LASTPRIVATE);
5939 par_clauses = true;
5940 }
5941 }
5942
5943 if (label)
5944 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
5945 }
5946
5947 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5948 (which might be a placeholder). INNER is true if this is an inner
5949 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5950 join markers. Generate the before-loop forking sequence in
5951 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5952 general form of these sequences is
5953
5954 GOACC_REDUCTION_SETUP
5955 GOACC_FORK
5956 GOACC_REDUCTION_INIT
5957 ...
5958 GOACC_REDUCTION_FINI
5959 GOACC_JOIN
5960 GOACC_REDUCTION_TEARDOWN. */
5961
5962 static void
5963 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
5964 gcall *fork, gcall *join, gimple_seq *fork_seq,
5965 gimple_seq *join_seq, omp_context *ctx)
5966 {
5967 gimple_seq before_fork = NULL;
5968 gimple_seq after_fork = NULL;
5969 gimple_seq before_join = NULL;
5970 gimple_seq after_join = NULL;
5971 tree init_code = NULL_TREE, fini_code = NULL_TREE,
5972 setup_code = NULL_TREE, teardown_code = NULL_TREE;
5973 unsigned offset = 0;
5974
5975 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5976 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5977 {
5978 tree orig = OMP_CLAUSE_DECL (c);
5979 tree var = maybe_lookup_decl (orig, ctx);
5980 tree ref_to_res = NULL_TREE;
5981 tree incoming, outgoing, v1, v2, v3;
5982 bool is_private = false;
5983
5984 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
5985 if (rcode == MINUS_EXPR)
5986 rcode = PLUS_EXPR;
5987 else if (rcode == TRUTH_ANDIF_EXPR)
5988 rcode = BIT_AND_EXPR;
5989 else if (rcode == TRUTH_ORIF_EXPR)
5990 rcode = BIT_IOR_EXPR;
5991 tree op = build_int_cst (unsigned_type_node, rcode);
5992
5993 if (!var)
5994 var = orig;
5995
5996 incoming = outgoing = var;
5997
5998 if (!inner)
5999 {
6000 /* See if an outer construct also reduces this variable. */
6001 omp_context *outer = ctx;
6002
6003 while (omp_context *probe = outer->outer)
6004 {
6005 enum gimple_code type = gimple_code (probe->stmt);
6006 tree cls;
6007
6008 switch (type)
6009 {
6010 case GIMPLE_OMP_FOR:
6011 cls = gimple_omp_for_clauses (probe->stmt);
6012 break;
6013
6014 case GIMPLE_OMP_TARGET:
6015 if (gimple_omp_target_kind (probe->stmt)
6016 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6017 goto do_lookup;
6018
6019 cls = gimple_omp_target_clauses (probe->stmt);
6020 break;
6021
6022 default:
6023 goto do_lookup;
6024 }
6025
6026 outer = probe;
6027 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6028 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6029 && orig == OMP_CLAUSE_DECL (cls))
6030 {
6031 incoming = outgoing = lookup_decl (orig, probe);
6032 goto has_outer_reduction;
6033 }
6034 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6035 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6036 && orig == OMP_CLAUSE_DECL (cls))
6037 {
6038 is_private = true;
6039 goto do_lookup;
6040 }
6041 }
6042
6043 do_lookup:
6044 /* This is the outermost construct with this reduction,
6045 see if there's a mapping for it. */
6046 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6047 && maybe_lookup_field (orig, outer) && !is_private)
6048 {
6049 ref_to_res = build_receiver_ref (orig, false, outer);
6050 if (omp_is_reference (orig))
6051 ref_to_res = build_simple_mem_ref (ref_to_res);
6052
6053 tree type = TREE_TYPE (var);
6054 if (POINTER_TYPE_P (type))
6055 type = TREE_TYPE (type);
6056
6057 outgoing = var;
6058 incoming = omp_reduction_init_op (loc, rcode, type);
6059 }
6060 else
6061 {
6062 /* Try to look at enclosing contexts for reduction var,
6063 use original if no mapping found. */
6064 tree t = NULL_TREE;
6065 omp_context *c = ctx->outer;
6066 while (c && !t)
6067 {
6068 t = maybe_lookup_decl (orig, c);
6069 c = c->outer;
6070 }
6071 incoming = outgoing = (t ? t : orig);
6072 }
6073
6074 has_outer_reduction:;
6075 }
6076
6077 if (!ref_to_res)
6078 ref_to_res = integer_zero_node;
6079
6080 if (omp_is_reference (orig))
6081 {
6082 tree type = TREE_TYPE (var);
6083 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6084
6085 if (!inner)
6086 {
6087 tree x = create_tmp_var (TREE_TYPE (type), id);
6088 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6089 }
6090
6091 v1 = create_tmp_var (type, id);
6092 v2 = create_tmp_var (type, id);
6093 v3 = create_tmp_var (type, id);
6094
6095 gimplify_assign (v1, var, fork_seq);
6096 gimplify_assign (v2, var, fork_seq);
6097 gimplify_assign (v3, var, fork_seq);
6098
6099 var = build_simple_mem_ref (var);
6100 v1 = build_simple_mem_ref (v1);
6101 v2 = build_simple_mem_ref (v2);
6102 v3 = build_simple_mem_ref (v3);
6103 outgoing = build_simple_mem_ref (outgoing);
6104
6105 if (!TREE_CONSTANT (incoming))
6106 incoming = build_simple_mem_ref (incoming);
6107 }
6108 else
6109 v1 = v2 = v3 = var;
6110
6111 /* Determine position in reduction buffer, which may be used
6112 by target. The parser has ensured that this is not a
6113 variable-sized type. */
6114 fixed_size_mode mode
6115 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6116 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6117 offset = (offset + align - 1) & ~(align - 1);
6118 tree off = build_int_cst (sizetype, offset);
6119 offset += GET_MODE_SIZE (mode);
6120
6121 if (!init_code)
6122 {
6123 init_code = build_int_cst (integer_type_node,
6124 IFN_GOACC_REDUCTION_INIT);
6125 fini_code = build_int_cst (integer_type_node,
6126 IFN_GOACC_REDUCTION_FINI);
6127 setup_code = build_int_cst (integer_type_node,
6128 IFN_GOACC_REDUCTION_SETUP);
6129 teardown_code = build_int_cst (integer_type_node,
6130 IFN_GOACC_REDUCTION_TEARDOWN);
6131 }
6132
6133 tree setup_call
6134 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6135 TREE_TYPE (var), 6, setup_code,
6136 unshare_expr (ref_to_res),
6137 incoming, level, op, off);
6138 tree init_call
6139 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6140 TREE_TYPE (var), 6, init_code,
6141 unshare_expr (ref_to_res),
6142 v1, level, op, off);
6143 tree fini_call
6144 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6145 TREE_TYPE (var), 6, fini_code,
6146 unshare_expr (ref_to_res),
6147 v2, level, op, off);
6148 tree teardown_call
6149 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6150 TREE_TYPE (var), 6, teardown_code,
6151 ref_to_res, v3, level, op, off);
6152
6153 gimplify_assign (v1, setup_call, &before_fork);
6154 gimplify_assign (v2, init_call, &after_fork);
6155 gimplify_assign (v3, fini_call, &before_join);
6156 gimplify_assign (outgoing, teardown_call, &after_join);
6157 }
6158
6159 /* Now stitch things together. */
6160 gimple_seq_add_seq (fork_seq, before_fork);
6161 if (fork)
6162 gimple_seq_add_stmt (fork_seq, fork);
6163 gimple_seq_add_seq (fork_seq, after_fork);
6164
6165 gimple_seq_add_seq (join_seq, before_join);
6166 if (join)
6167 gimple_seq_add_stmt (join_seq, join);
6168 gimple_seq_add_seq (join_seq, after_join);
6169 }
6170
6171 /* Generate code to implement the REDUCTION clauses, append it
6172 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6173 that should be emitted also inside of the critical section,
6174 in that case clear *CLIST afterwards, otherwise leave it as is
6175 and let the caller emit it itself. */
6176
6177 static void
6178 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6179 gimple_seq *clist, omp_context *ctx)
6180 {
6181 gimple_seq sub_seq = NULL;
6182 gimple *stmt;
6183 tree x, c;
6184 int count = 0;
6185
6186 /* OpenACC loop reductions are handled elsewhere. */
6187 if (is_gimple_omp_oacc (ctx->stmt))
6188 return;
6189
6190 /* SIMD reductions are handled in lower_rec_input_clauses. */
6191 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6192 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
6193 return;
6194
6195 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6196 update in that case, otherwise use a lock. */
6197 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6198 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6199 && !OMP_CLAUSE_REDUCTION_TASK (c))
6200 {
6201 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6202 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6203 {
6204 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6205 count = -1;
6206 break;
6207 }
6208 count++;
6209 }
6210
6211 if (count == 0)
6212 return;
6213
6214 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6215 {
6216 tree var, ref, new_var, orig_var;
6217 enum tree_code code;
6218 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6219
6220 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6221 || OMP_CLAUSE_REDUCTION_TASK (c))
6222 continue;
6223
6224 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6225 orig_var = var = OMP_CLAUSE_DECL (c);
6226 if (TREE_CODE (var) == MEM_REF)
6227 {
6228 var = TREE_OPERAND (var, 0);
6229 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6230 var = TREE_OPERAND (var, 0);
6231 if (TREE_CODE (var) == ADDR_EXPR)
6232 var = TREE_OPERAND (var, 0);
6233 else
6234 {
6235 /* If this is a pointer or referenced based array
6236 section, the var could be private in the outer
6237 context e.g. on orphaned loop construct. Pretend this
6238 is private variable's outer reference. */
6239 ccode = OMP_CLAUSE_PRIVATE;
6240 if (TREE_CODE (var) == INDIRECT_REF)
6241 var = TREE_OPERAND (var, 0);
6242 }
6243 orig_var = var;
6244 if (is_variable_sized (var))
6245 {
6246 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6247 var = DECL_VALUE_EXPR (var);
6248 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6249 var = TREE_OPERAND (var, 0);
6250 gcc_assert (DECL_P (var));
6251 }
6252 }
6253 new_var = lookup_decl (var, ctx);
6254 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6255 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6256 ref = build_outer_var_ref (var, ctx, ccode);
6257 code = OMP_CLAUSE_REDUCTION_CODE (c);
6258
6259 /* reduction(-:var) sums up the partial results, so it acts
6260 identically to reduction(+:var). */
6261 if (code == MINUS_EXPR)
6262 code = PLUS_EXPR;
6263
6264 if (count == 1)
6265 {
6266 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6267
6268 addr = save_expr (addr);
6269 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6270 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6271 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6272 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6273 gimplify_and_add (x, stmt_seqp);
6274 return;
6275 }
6276 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6277 {
6278 tree d = OMP_CLAUSE_DECL (c);
6279 tree type = TREE_TYPE (d);
6280 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6281 tree i = create_tmp_var (TREE_TYPE (v));
6282 tree ptype = build_pointer_type (TREE_TYPE (type));
6283 tree bias = TREE_OPERAND (d, 1);
6284 d = TREE_OPERAND (d, 0);
6285 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6286 {
6287 tree b = TREE_OPERAND (d, 1);
6288 b = maybe_lookup_decl (b, ctx);
6289 if (b == NULL)
6290 {
6291 b = TREE_OPERAND (d, 1);
6292 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6293 }
6294 if (integer_zerop (bias))
6295 bias = b;
6296 else
6297 {
6298 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6299 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6300 TREE_TYPE (b), b, bias);
6301 }
6302 d = TREE_OPERAND (d, 0);
6303 }
6304 /* For ref build_outer_var_ref already performs this, so
6305 only new_var needs a dereference. */
6306 if (TREE_CODE (d) == INDIRECT_REF)
6307 {
6308 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6309 gcc_assert (omp_is_reference (var) && var == orig_var);
6310 }
6311 else if (TREE_CODE (d) == ADDR_EXPR)
6312 {
6313 if (orig_var == var)
6314 {
6315 new_var = build_fold_addr_expr (new_var);
6316 ref = build_fold_addr_expr (ref);
6317 }
6318 }
6319 else
6320 {
6321 gcc_assert (orig_var == var);
6322 if (omp_is_reference (var))
6323 ref = build_fold_addr_expr (ref);
6324 }
6325 if (DECL_P (v))
6326 {
6327 tree t = maybe_lookup_decl (v, ctx);
6328 if (t)
6329 v = t;
6330 else
6331 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6332 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6333 }
6334 if (!integer_zerop (bias))
6335 {
6336 bias = fold_convert_loc (clause_loc, sizetype, bias);
6337 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6338 TREE_TYPE (new_var), new_var,
6339 unshare_expr (bias));
6340 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6341 TREE_TYPE (ref), ref, bias);
6342 }
6343 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6344 ref = fold_convert_loc (clause_loc, ptype, ref);
6345 tree m = create_tmp_var (ptype);
6346 gimplify_assign (m, new_var, stmt_seqp);
6347 new_var = m;
6348 m = create_tmp_var (ptype);
6349 gimplify_assign (m, ref, stmt_seqp);
6350 ref = m;
6351 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6352 tree body = create_artificial_label (UNKNOWN_LOCATION);
6353 tree end = create_artificial_label (UNKNOWN_LOCATION);
6354 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6355 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6356 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6357 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6358 {
6359 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6360 tree decl_placeholder
6361 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6362 SET_DECL_VALUE_EXPR (placeholder, out);
6363 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6364 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6365 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6366 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6367 gimple_seq_add_seq (&sub_seq,
6368 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6369 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6370 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6371 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6372 }
6373 else
6374 {
6375 x = build2 (code, TREE_TYPE (out), out, priv);
6376 out = unshare_expr (out);
6377 gimplify_assign (out, x, &sub_seq);
6378 }
6379 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6380 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6381 gimple_seq_add_stmt (&sub_seq, g);
6382 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6383 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6384 gimple_seq_add_stmt (&sub_seq, g);
6385 g = gimple_build_assign (i, PLUS_EXPR, i,
6386 build_int_cst (TREE_TYPE (i), 1));
6387 gimple_seq_add_stmt (&sub_seq, g);
6388 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6389 gimple_seq_add_stmt (&sub_seq, g);
6390 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6391 }
6392 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6393 {
6394 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6395
6396 if (omp_is_reference (var)
6397 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6398 TREE_TYPE (ref)))
6399 ref = build_fold_addr_expr_loc (clause_loc, ref);
6400 SET_DECL_VALUE_EXPR (placeholder, ref);
6401 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6402 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6403 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6404 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6405 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6406 }
6407 else
6408 {
6409 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6410 ref = build_outer_var_ref (var, ctx);
6411 gimplify_assign (ref, x, &sub_seq);
6412 }
6413 }
6414
6415 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6416 0);
6417 gimple_seq_add_stmt (stmt_seqp, stmt);
6418
6419 gimple_seq_add_seq (stmt_seqp, sub_seq);
6420
6421 if (clist)
6422 {
6423 gimple_seq_add_seq (stmt_seqp, *clist);
6424 *clist = NULL;
6425 }
6426
6427 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6428 0);
6429 gimple_seq_add_stmt (stmt_seqp, stmt);
6430 }
6431
6432
6433 /* Generate code to implement the COPYPRIVATE clauses. */
6434
6435 static void
6436 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6437 omp_context *ctx)
6438 {
6439 tree c;
6440
6441 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6442 {
6443 tree var, new_var, ref, x;
6444 bool by_ref;
6445 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6446
6447 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6448 continue;
6449
6450 var = OMP_CLAUSE_DECL (c);
6451 by_ref = use_pointer_for_field (var, NULL);
6452
6453 ref = build_sender_ref (var, ctx);
6454 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6455 if (by_ref)
6456 {
6457 x = build_fold_addr_expr_loc (clause_loc, new_var);
6458 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6459 }
6460 gimplify_assign (ref, x, slist);
6461
6462 ref = build_receiver_ref (var, false, ctx);
6463 if (by_ref)
6464 {
6465 ref = fold_convert_loc (clause_loc,
6466 build_pointer_type (TREE_TYPE (new_var)),
6467 ref);
6468 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6469 }
6470 if (omp_is_reference (var))
6471 {
6472 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6473 ref = build_simple_mem_ref_loc (clause_loc, ref);
6474 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6475 }
6476 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6477 gimplify_and_add (x, rlist);
6478 }
6479 }
6480
6481
6482 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6483 and REDUCTION from the sender (aka parent) side. */
6484
6485 static void
6486 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6487 omp_context *ctx)
6488 {
6489 tree c, t;
6490 int ignored_looptemp = 0;
6491 bool is_taskloop = false;
6492
6493 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6494 by GOMP_taskloop. */
6495 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6496 {
6497 ignored_looptemp = 2;
6498 is_taskloop = true;
6499 }
6500
6501 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6502 {
6503 tree val, ref, x, var;
6504 bool by_ref, do_in = false, do_out = false;
6505 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6506
6507 switch (OMP_CLAUSE_CODE (c))
6508 {
6509 case OMP_CLAUSE_PRIVATE:
6510 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6511 break;
6512 continue;
6513 case OMP_CLAUSE_FIRSTPRIVATE:
6514 case OMP_CLAUSE_COPYIN:
6515 case OMP_CLAUSE_LASTPRIVATE:
6516 case OMP_CLAUSE_IN_REDUCTION:
6517 case OMP_CLAUSE__REDUCTEMP_:
6518 break;
6519 case OMP_CLAUSE_REDUCTION:
6520 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6521 continue;
6522 break;
6523 case OMP_CLAUSE_SHARED:
6524 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6525 break;
6526 continue;
6527 case OMP_CLAUSE__LOOPTEMP_:
6528 if (ignored_looptemp)
6529 {
6530 ignored_looptemp--;
6531 continue;
6532 }
6533 break;
6534 default:
6535 continue;
6536 }
6537
6538 val = OMP_CLAUSE_DECL (c);
6539 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6540 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6541 && TREE_CODE (val) == MEM_REF)
6542 {
6543 val = TREE_OPERAND (val, 0);
6544 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6545 val = TREE_OPERAND (val, 0);
6546 if (TREE_CODE (val) == INDIRECT_REF
6547 || TREE_CODE (val) == ADDR_EXPR)
6548 val = TREE_OPERAND (val, 0);
6549 if (is_variable_sized (val))
6550 continue;
6551 }
6552
6553 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6554 outer taskloop region. */
6555 omp_context *ctx_for_o = ctx;
6556 if (is_taskloop
6557 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6558 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6559 ctx_for_o = ctx->outer;
6560
6561 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6562
6563 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6564 && is_global_var (var)
6565 && (val == OMP_CLAUSE_DECL (c)
6566 || !is_task_ctx (ctx)
6567 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6568 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6569 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6570 != POINTER_TYPE)))))
6571 continue;
6572
6573 t = omp_member_access_dummy_var (var);
6574 if (t)
6575 {
6576 var = DECL_VALUE_EXPR (var);
6577 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6578 if (o != t)
6579 var = unshare_and_remap (var, t, o);
6580 else
6581 var = unshare_expr (var);
6582 }
6583
6584 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6585 {
6586 /* Handle taskloop firstprivate/lastprivate, where the
6587 lastprivate on GIMPLE_OMP_TASK is represented as
6588 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6589 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6590 x = omp_build_component_ref (ctx->sender_decl, f);
6591 if (use_pointer_for_field (val, ctx))
6592 var = build_fold_addr_expr (var);
6593 gimplify_assign (x, var, ilist);
6594 DECL_ABSTRACT_ORIGIN (f) = NULL;
6595 continue;
6596 }
6597
6598 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6599 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6600 || val == OMP_CLAUSE_DECL (c))
6601 && is_variable_sized (val))
6602 continue;
6603 by_ref = use_pointer_for_field (val, NULL);
6604
6605 switch (OMP_CLAUSE_CODE (c))
6606 {
6607 case OMP_CLAUSE_FIRSTPRIVATE:
6608 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6609 && !by_ref
6610 && is_task_ctx (ctx))
6611 TREE_NO_WARNING (var) = 1;
6612 do_in = true;
6613 break;
6614
6615 case OMP_CLAUSE_PRIVATE:
6616 case OMP_CLAUSE_COPYIN:
6617 case OMP_CLAUSE__LOOPTEMP_:
6618 case OMP_CLAUSE__REDUCTEMP_:
6619 do_in = true;
6620 break;
6621
6622 case OMP_CLAUSE_LASTPRIVATE:
6623 if (by_ref || omp_is_reference (val))
6624 {
6625 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6626 continue;
6627 do_in = true;
6628 }
6629 else
6630 {
6631 do_out = true;
6632 if (lang_hooks.decls.omp_private_outer_ref (val))
6633 do_in = true;
6634 }
6635 break;
6636
6637 case OMP_CLAUSE_REDUCTION:
6638 case OMP_CLAUSE_IN_REDUCTION:
6639 do_in = true;
6640 if (val == OMP_CLAUSE_DECL (c))
6641 {
6642 if (is_task_ctx (ctx))
6643 by_ref = use_pointer_for_field (val, ctx);
6644 else
6645 do_out = !(by_ref || omp_is_reference (val));
6646 }
6647 else
6648 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6649 break;
6650
6651 default:
6652 gcc_unreachable ();
6653 }
6654
6655 if (do_in)
6656 {
6657 ref = build_sender_ref (val, ctx);
6658 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6659 gimplify_assign (ref, x, ilist);
6660 if (is_task_ctx (ctx))
6661 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6662 }
6663
6664 if (do_out)
6665 {
6666 ref = build_sender_ref (val, ctx);
6667 gimplify_assign (var, ref, olist);
6668 }
6669 }
6670 }
6671
6672 /* Generate code to implement SHARED from the sender (aka parent)
6673 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6674 list things that got automatically shared. */
6675
6676 static void
6677 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6678 {
6679 tree var, ovar, nvar, t, f, x, record_type;
6680
6681 if (ctx->record_type == NULL)
6682 return;
6683
6684 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6685 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6686 {
6687 ovar = DECL_ABSTRACT_ORIGIN (f);
6688 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6689 continue;
6690
6691 nvar = maybe_lookup_decl (ovar, ctx);
6692 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6693 continue;
6694
6695 /* If CTX is a nested parallel directive. Find the immediately
6696 enclosing parallel or workshare construct that contains a
6697 mapping for OVAR. */
6698 var = lookup_decl_in_outer_ctx (ovar, ctx);
6699
6700 t = omp_member_access_dummy_var (var);
6701 if (t)
6702 {
6703 var = DECL_VALUE_EXPR (var);
6704 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6705 if (o != t)
6706 var = unshare_and_remap (var, t, o);
6707 else
6708 var = unshare_expr (var);
6709 }
6710
6711 if (use_pointer_for_field (ovar, ctx))
6712 {
6713 x = build_sender_ref (ovar, ctx);
6714 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
6715 && TREE_TYPE (f) == TREE_TYPE (ovar))
6716 {
6717 gcc_assert (is_parallel_ctx (ctx)
6718 && DECL_ARTIFICIAL (ovar));
6719 /* _condtemp_ clause. */
6720 var = build_constructor (TREE_TYPE (x), NULL);
6721 }
6722 else
6723 var = build_fold_addr_expr (var);
6724 gimplify_assign (x, var, ilist);
6725 }
6726 else
6727 {
6728 x = build_sender_ref (ovar, ctx);
6729 gimplify_assign (x, var, ilist);
6730
6731 if (!TREE_READONLY (var)
6732 /* We don't need to receive a new reference to a result
6733 or parm decl. In fact we may not store to it as we will
6734 invalidate any pending RSO and generate wrong gimple
6735 during inlining. */
6736 && !((TREE_CODE (var) == RESULT_DECL
6737 || TREE_CODE (var) == PARM_DECL)
6738 && DECL_BY_REFERENCE (var)))
6739 {
6740 x = build_sender_ref (ovar, ctx);
6741 gimplify_assign (var, x, olist);
6742 }
6743 }
6744 }
6745 }
6746
6747 /* Emit an OpenACC head marker call, encapulating the partitioning and
6748 other information that must be processed by the target compiler.
6749 Return the maximum number of dimensions the associated loop might
6750 be partitioned over. */
6751
6752 static unsigned
6753 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6754 gimple_seq *seq, omp_context *ctx)
6755 {
6756 unsigned levels = 0;
6757 unsigned tag = 0;
6758 tree gang_static = NULL_TREE;
6759 auto_vec<tree, 5> args;
6760
6761 args.quick_push (build_int_cst
6762 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6763 args.quick_push (ddvar);
6764 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6765 {
6766 switch (OMP_CLAUSE_CODE (c))
6767 {
6768 case OMP_CLAUSE_GANG:
6769 tag |= OLF_DIM_GANG;
6770 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6771 /* static:* is represented by -1, and we can ignore it, as
6772 scheduling is always static. */
6773 if (gang_static && integer_minus_onep (gang_static))
6774 gang_static = NULL_TREE;
6775 levels++;
6776 break;
6777
6778 case OMP_CLAUSE_WORKER:
6779 tag |= OLF_DIM_WORKER;
6780 levels++;
6781 break;
6782
6783 case OMP_CLAUSE_VECTOR:
6784 tag |= OLF_DIM_VECTOR;
6785 levels++;
6786 break;
6787
6788 case OMP_CLAUSE_SEQ:
6789 tag |= OLF_SEQ;
6790 break;
6791
6792 case OMP_CLAUSE_AUTO:
6793 tag |= OLF_AUTO;
6794 break;
6795
6796 case OMP_CLAUSE_INDEPENDENT:
6797 tag |= OLF_INDEPENDENT;
6798 break;
6799
6800 case OMP_CLAUSE_TILE:
6801 tag |= OLF_TILE;
6802 break;
6803
6804 default:
6805 continue;
6806 }
6807 }
6808
6809 if (gang_static)
6810 {
6811 if (DECL_P (gang_static))
6812 gang_static = build_outer_var_ref (gang_static, ctx);
6813 tag |= OLF_GANG_STATIC;
6814 }
6815
6816 /* In a parallel region, loops are implicitly INDEPENDENT. */
6817 omp_context *tgt = enclosing_target_ctx (ctx);
6818 if (!tgt || is_oacc_parallel (tgt))
6819 tag |= OLF_INDEPENDENT;
6820
6821 if (tag & OLF_TILE)
6822 /* Tiling could use all 3 levels. */
6823 levels = 3;
6824 else
6825 {
6826 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6827 Ensure at least one level, or 2 for possible auto
6828 partitioning */
6829 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
6830 << OLF_DIM_BASE) | OLF_SEQ));
6831
6832 if (levels < 1u + maybe_auto)
6833 levels = 1u + maybe_auto;
6834 }
6835
6836 args.quick_push (build_int_cst (integer_type_node, levels));
6837 args.quick_push (build_int_cst (integer_type_node, tag));
6838 if (gang_static)
6839 args.quick_push (gang_static);
6840
6841 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
6842 gimple_set_location (call, loc);
6843 gimple_set_lhs (call, ddvar);
6844 gimple_seq_add_stmt (seq, call);
6845
6846 return levels;
6847 }
6848
6849 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6850 partitioning level of the enclosed region. */
6851
6852 static void
6853 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
6854 tree tofollow, gimple_seq *seq)
6855 {
6856 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
6857 : IFN_UNIQUE_OACC_TAIL_MARK);
6858 tree marker = build_int_cst (integer_type_node, marker_kind);
6859 int nargs = 2 + (tofollow != NULL_TREE);
6860 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
6861 marker, ddvar, tofollow);
6862 gimple_set_location (call, loc);
6863 gimple_set_lhs (call, ddvar);
6864 gimple_seq_add_stmt (seq, call);
6865 }
6866
6867 /* Generate the before and after OpenACC loop sequences. CLAUSES are
6868 the loop clauses, from which we extract reductions. Initialize
6869 HEAD and TAIL. */
6870
6871 static void
6872 lower_oacc_head_tail (location_t loc, tree clauses,
6873 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
6874 {
6875 bool inner = false;
6876 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
6877 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
6878
6879 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
6880 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
6881 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
6882
6883 gcc_assert (count);
6884 for (unsigned done = 1; count; count--, done++)
6885 {
6886 gimple_seq fork_seq = NULL;
6887 gimple_seq join_seq = NULL;
6888
6889 tree place = build_int_cst (integer_type_node, -1);
6890 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
6891 fork_kind, ddvar, place);
6892 gimple_set_location (fork, loc);
6893 gimple_set_lhs (fork, ddvar);
6894
6895 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
6896 join_kind, ddvar, place);
6897 gimple_set_location (join, loc);
6898 gimple_set_lhs (join, ddvar);
6899
6900 /* Mark the beginning of this level sequence. */
6901 if (inner)
6902 lower_oacc_loop_marker (loc, ddvar, true,
6903 build_int_cst (integer_type_node, count),
6904 &fork_seq);
6905 lower_oacc_loop_marker (loc, ddvar, false,
6906 build_int_cst (integer_type_node, done),
6907 &join_seq);
6908
6909 lower_oacc_reductions (loc, clauses, place, inner,
6910 fork, join, &fork_seq, &join_seq, ctx);
6911
6912 /* Append this level to head. */
6913 gimple_seq_add_seq (head, fork_seq);
6914 /* Prepend it to tail. */
6915 gimple_seq_add_seq (&join_seq, *tail);
6916 *tail = join_seq;
6917
6918 inner = true;
6919 }
6920
6921 /* Mark the end of the sequence. */
6922 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
6923 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
6924 }
6925
6926 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6927 catch handler and return it. This prevents programs from violating the
6928 structured block semantics with throws. */
6929
6930 static gimple_seq
6931 maybe_catch_exception (gimple_seq body)
6932 {
6933 gimple *g;
6934 tree decl;
6935
6936 if (!flag_exceptions)
6937 return body;
6938
6939 if (lang_hooks.eh_protect_cleanup_actions != NULL)
6940 decl = lang_hooks.eh_protect_cleanup_actions ();
6941 else
6942 decl = builtin_decl_explicit (BUILT_IN_TRAP);
6943
6944 g = gimple_build_eh_must_not_throw (decl);
6945 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
6946 GIMPLE_TRY_CATCH);
6947
6948 return gimple_seq_alloc_with_stmt (g);
6949 }
6950
6951 \f
6952 /* Routines to lower OMP directives into OMP-GIMPLE. */
6953
6954 /* If ctx is a worksharing context inside of a cancellable parallel
6955 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6956 and conditional branch to parallel's cancel_label to handle
6957 cancellation in the implicit barrier. */
6958
6959 static void
6960 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
6961 gimple_seq *body)
6962 {
6963 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
6964 if (gimple_omp_return_nowait_p (omp_return))
6965 return;
6966 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6967 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6968 && outer->cancellable)
6969 {
6970 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
6971 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
6972 tree lhs = create_tmp_var (c_bool_type);
6973 gimple_omp_return_set_lhs (omp_return, lhs);
6974 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
6975 gimple *g = gimple_build_cond (NE_EXPR, lhs,
6976 fold_convert (c_bool_type,
6977 boolean_false_node),
6978 outer->cancel_label, fallthru_label);
6979 gimple_seq_add_stmt (body, g);
6980 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
6981 }
6982 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
6983 return;
6984 }
6985
6986 /* Find the first task_reduction or reduction clause or return NULL
6987 if there are none. */
6988
6989 static inline tree
6990 omp_task_reductions_find_first (tree clauses, enum tree_code code,
6991 enum omp_clause_code ccode)
6992 {
6993 while (1)
6994 {
6995 clauses = omp_find_clause (clauses, ccode);
6996 if (clauses == NULL_TREE)
6997 return NULL_TREE;
6998 if (ccode != OMP_CLAUSE_REDUCTION
6999 || code == OMP_TASKLOOP
7000 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7001 return clauses;
7002 clauses = OMP_CLAUSE_CHAIN (clauses);
7003 }
7004 }
7005
7006 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7007 gimple_seq *, gimple_seq *);
7008
7009 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7010 CTX is the enclosing OMP context for the current statement. */
7011
7012 static void
7013 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7014 {
7015 tree block, control;
7016 gimple_stmt_iterator tgsi;
7017 gomp_sections *stmt;
7018 gimple *t;
7019 gbind *new_stmt, *bind;
7020 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7021
7022 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7023
7024 push_gimplify_context ();
7025
7026 dlist = NULL;
7027 ilist = NULL;
7028
7029 tree rclauses
7030 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7031 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7032 tree rtmp = NULL_TREE;
7033 if (rclauses)
7034 {
7035 tree type = build_pointer_type (pointer_sized_int_node);
7036 tree temp = create_tmp_var (type);
7037 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7038 OMP_CLAUSE_DECL (c) = temp;
7039 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7040 gimple_omp_sections_set_clauses (stmt, c);
7041 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7042 gimple_omp_sections_clauses (stmt),
7043 &ilist, &tred_dlist);
7044 rclauses = c;
7045 rtmp = make_ssa_name (type);
7046 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7047 }
7048
7049 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7050 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7051
7052 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7053 &ilist, &dlist, ctx, NULL);
7054
7055 control = create_tmp_var (unsigned_type_node, ".section");
7056 gimple_omp_sections_set_control (stmt, control);
7057
7058 new_body = gimple_omp_body (stmt);
7059 gimple_omp_set_body (stmt, NULL);
7060 tgsi = gsi_start (new_body);
7061 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7062 {
7063 omp_context *sctx;
7064 gimple *sec_start;
7065
7066 sec_start = gsi_stmt (tgsi);
7067 sctx = maybe_lookup_ctx (sec_start);
7068 gcc_assert (sctx);
7069
7070 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7071 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7072 GSI_CONTINUE_LINKING);
7073 gimple_omp_set_body (sec_start, NULL);
7074
7075 if (gsi_one_before_end_p (tgsi))
7076 {
7077 gimple_seq l = NULL;
7078 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7079 &ilist, &l, &clist, ctx);
7080 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7081 gimple_omp_section_set_last (sec_start);
7082 }
7083
7084 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7085 GSI_CONTINUE_LINKING);
7086 }
7087
7088 block = make_node (BLOCK);
7089 bind = gimple_build_bind (NULL, new_body, block);
7090
7091 olist = NULL;
7092 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7093 &clist, ctx);
7094 if (clist)
7095 {
7096 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7097 gcall *g = gimple_build_call (fndecl, 0);
7098 gimple_seq_add_stmt (&olist, g);
7099 gimple_seq_add_seq (&olist, clist);
7100 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7101 g = gimple_build_call (fndecl, 0);
7102 gimple_seq_add_stmt (&olist, g);
7103 }
7104
7105 block = make_node (BLOCK);
7106 new_stmt = gimple_build_bind (NULL, NULL, block);
7107 gsi_replace (gsi_p, new_stmt, true);
7108
7109 pop_gimplify_context (new_stmt);
7110 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7111 BLOCK_VARS (block) = gimple_bind_vars (bind);
7112 if (BLOCK_VARS (block))
7113 TREE_USED (block) = 1;
7114
7115 new_body = NULL;
7116 gimple_seq_add_seq (&new_body, ilist);
7117 gimple_seq_add_stmt (&new_body, stmt);
7118 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7119 gimple_seq_add_stmt (&new_body, bind);
7120
7121 t = gimple_build_omp_continue (control, control);
7122 gimple_seq_add_stmt (&new_body, t);
7123
7124 gimple_seq_add_seq (&new_body, olist);
7125 if (ctx->cancellable)
7126 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7127 gimple_seq_add_seq (&new_body, dlist);
7128
7129 new_body = maybe_catch_exception (new_body);
7130
7131 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7132 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7133 t = gimple_build_omp_return (nowait);
7134 gimple_seq_add_stmt (&new_body, t);
7135 gimple_seq_add_seq (&new_body, tred_dlist);
7136 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7137
7138 if (rclauses)
7139 OMP_CLAUSE_DECL (rclauses) = rtmp;
7140
7141 gimple_bind_set_body (new_stmt, new_body);
7142 }
7143
7144
7145 /* A subroutine of lower_omp_single. Expand the simple form of
7146 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7147
7148 if (GOMP_single_start ())
7149 BODY;
7150 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7151
7152 FIXME. It may be better to delay expanding the logic of this until
7153 pass_expand_omp. The expanded logic may make the job more difficult
7154 to a synchronization analysis pass. */
7155
7156 static void
7157 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7158 {
7159 location_t loc = gimple_location (single_stmt);
7160 tree tlabel = create_artificial_label (loc);
7161 tree flabel = create_artificial_label (loc);
7162 gimple *call, *cond;
7163 tree lhs, decl;
7164
7165 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7166 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7167 call = gimple_build_call (decl, 0);
7168 gimple_call_set_lhs (call, lhs);
7169 gimple_seq_add_stmt (pre_p, call);
7170
7171 cond = gimple_build_cond (EQ_EXPR, lhs,
7172 fold_convert_loc (loc, TREE_TYPE (lhs),
7173 boolean_true_node),
7174 tlabel, flabel);
7175 gimple_seq_add_stmt (pre_p, cond);
7176 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7177 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7178 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7179 }
7180
7181
7182 /* A subroutine of lower_omp_single. Expand the simple form of
7183 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7184
7185 #pragma omp single copyprivate (a, b, c)
7186
7187 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7188
7189 {
7190 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7191 {
7192 BODY;
7193 copyout.a = a;
7194 copyout.b = b;
7195 copyout.c = c;
7196 GOMP_single_copy_end (&copyout);
7197 }
7198 else
7199 {
7200 a = copyout_p->a;
7201 b = copyout_p->b;
7202 c = copyout_p->c;
7203 }
7204 GOMP_barrier ();
7205 }
7206
7207 FIXME. It may be better to delay expanding the logic of this until
7208 pass_expand_omp. The expanded logic may make the job more difficult
7209 to a synchronization analysis pass. */
7210
7211 static void
7212 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7213 omp_context *ctx)
7214 {
7215 tree ptr_type, t, l0, l1, l2, bfn_decl;
7216 gimple_seq copyin_seq;
7217 location_t loc = gimple_location (single_stmt);
7218
7219 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7220
7221 ptr_type = build_pointer_type (ctx->record_type);
7222 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7223
7224 l0 = create_artificial_label (loc);
7225 l1 = create_artificial_label (loc);
7226 l2 = create_artificial_label (loc);
7227
7228 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7229 t = build_call_expr_loc (loc, bfn_decl, 0);
7230 t = fold_convert_loc (loc, ptr_type, t);
7231 gimplify_assign (ctx->receiver_decl, t, pre_p);
7232
7233 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7234 build_int_cst (ptr_type, 0));
7235 t = build3 (COND_EXPR, void_type_node, t,
7236 build_and_jump (&l0), build_and_jump (&l1));
7237 gimplify_and_add (t, pre_p);
7238
7239 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7240
7241 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7242
7243 copyin_seq = NULL;
7244 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7245 &copyin_seq, ctx);
7246
7247 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7248 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7249 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7250 gimplify_and_add (t, pre_p);
7251
7252 t = build_and_jump (&l2);
7253 gimplify_and_add (t, pre_p);
7254
7255 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7256
7257 gimple_seq_add_seq (pre_p, copyin_seq);
7258
7259 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7260 }
7261
7262
7263 /* Expand code for an OpenMP single directive. */
7264
7265 static void
7266 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7267 {
7268 tree block;
7269 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7270 gbind *bind;
7271 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7272
7273 push_gimplify_context ();
7274
7275 block = make_node (BLOCK);
7276 bind = gimple_build_bind (NULL, NULL, block);
7277 gsi_replace (gsi_p, bind, true);
7278 bind_body = NULL;
7279 dlist = NULL;
7280 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7281 &bind_body, &dlist, ctx, NULL);
7282 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7283
7284 gimple_seq_add_stmt (&bind_body, single_stmt);
7285
7286 if (ctx->record_type)
7287 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7288 else
7289 lower_omp_single_simple (single_stmt, &bind_body);
7290
7291 gimple_omp_set_body (single_stmt, NULL);
7292
7293 gimple_seq_add_seq (&bind_body, dlist);
7294
7295 bind_body = maybe_catch_exception (bind_body);
7296
7297 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7298 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7299 gimple *g = gimple_build_omp_return (nowait);
7300 gimple_seq_add_stmt (&bind_body_tail, g);
7301 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7302 if (ctx->record_type)
7303 {
7304 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7305 tree clobber = build_constructor (ctx->record_type, NULL);
7306 TREE_THIS_VOLATILE (clobber) = 1;
7307 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7308 clobber), GSI_SAME_STMT);
7309 }
7310 gimple_seq_add_seq (&bind_body, bind_body_tail);
7311 gimple_bind_set_body (bind, bind_body);
7312
7313 pop_gimplify_context (bind);
7314
7315 gimple_bind_append_vars (bind, ctx->block_vars);
7316 BLOCK_VARS (block) = ctx->block_vars;
7317 if (BLOCK_VARS (block))
7318 TREE_USED (block) = 1;
7319 }
7320
7321
7322 /* Expand code for an OpenMP master directive. */
7323
7324 static void
7325 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7326 {
7327 tree block, lab = NULL, x, bfn_decl;
7328 gimple *stmt = gsi_stmt (*gsi_p);
7329 gbind *bind;
7330 location_t loc = gimple_location (stmt);
7331 gimple_seq tseq;
7332
7333 push_gimplify_context ();
7334
7335 block = make_node (BLOCK);
7336 bind = gimple_build_bind (NULL, NULL, block);
7337 gsi_replace (gsi_p, bind, true);
7338 gimple_bind_add_stmt (bind, stmt);
7339
7340 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7341 x = build_call_expr_loc (loc, bfn_decl, 0);
7342 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7343 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7344 tseq = NULL;
7345 gimplify_and_add (x, &tseq);
7346 gimple_bind_add_seq (bind, tseq);
7347
7348 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7349 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7350 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7351 gimple_omp_set_body (stmt, NULL);
7352
7353 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7354
7355 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7356
7357 pop_gimplify_context (bind);
7358
7359 gimple_bind_append_vars (bind, ctx->block_vars);
7360 BLOCK_VARS (block) = ctx->block_vars;
7361 }
7362
7363 /* Helper function for lower_omp_task_reductions. For a specific PASS
7364 find out the current clause it should be processed, or return false
7365 if all have been processed already. */
7366
7367 static inline bool
7368 omp_task_reduction_iterate (int pass, enum tree_code code,
7369 enum omp_clause_code ccode, tree *c, tree *decl,
7370 tree *type, tree *next)
7371 {
7372 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7373 {
7374 if (ccode == OMP_CLAUSE_REDUCTION
7375 && code != OMP_TASKLOOP
7376 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7377 continue;
7378 *decl = OMP_CLAUSE_DECL (*c);
7379 *type = TREE_TYPE (*decl);
7380 if (TREE_CODE (*decl) == MEM_REF)
7381 {
7382 if (pass != 1)
7383 continue;
7384 }
7385 else
7386 {
7387 if (omp_is_reference (*decl))
7388 *type = TREE_TYPE (*type);
7389 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7390 continue;
7391 }
7392 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7393 return true;
7394 }
7395 *decl = NULL_TREE;
7396 *type = NULL_TREE;
7397 *next = NULL_TREE;
7398 return false;
7399 }
7400
7401 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7402 OMP_TASKGROUP only with task modifier). Register mapping of those in
7403 START sequence and reducing them and unregister them in the END sequence. */
7404
7405 static void
7406 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7407 gimple_seq *start, gimple_seq *end)
7408 {
7409 enum omp_clause_code ccode
7410 = (code == OMP_TASKGROUP
7411 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7412 tree cancellable = NULL_TREE;
7413 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7414 if (clauses == NULL_TREE)
7415 return;
7416 if (code == OMP_FOR || code == OMP_SECTIONS)
7417 {
7418 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7419 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7420 && outer->cancellable)
7421 {
7422 cancellable = error_mark_node;
7423 break;
7424 }
7425 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7426 break;
7427 }
7428 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7429 tree *last = &TYPE_FIELDS (record_type);
7430 unsigned cnt = 0;
7431 if (cancellable)
7432 {
7433 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7434 ptr_type_node);
7435 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7436 integer_type_node);
7437 *last = field;
7438 DECL_CHAIN (field) = ifield;
7439 last = &DECL_CHAIN (ifield);
7440 DECL_CONTEXT (field) = record_type;
7441 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7442 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7443 DECL_CONTEXT (ifield) = record_type;
7444 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7445 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7446 }
7447 for (int pass = 0; pass < 2; pass++)
7448 {
7449 tree decl, type, next;
7450 for (tree c = clauses;
7451 omp_task_reduction_iterate (pass, code, ccode,
7452 &c, &decl, &type, &next); c = next)
7453 {
7454 ++cnt;
7455 tree new_type = type;
7456 if (ctx->outer)
7457 new_type = remap_type (type, &ctx->outer->cb);
7458 tree field
7459 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7460 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7461 new_type);
7462 if (DECL_P (decl) && type == TREE_TYPE (decl))
7463 {
7464 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7465 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7466 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7467 }
7468 else
7469 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7470 DECL_CONTEXT (field) = record_type;
7471 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7472 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7473 *last = field;
7474 last = &DECL_CHAIN (field);
7475 tree bfield
7476 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7477 boolean_type_node);
7478 DECL_CONTEXT (bfield) = record_type;
7479 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7480 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7481 *last = bfield;
7482 last = &DECL_CHAIN (bfield);
7483 }
7484 }
7485 *last = NULL_TREE;
7486 layout_type (record_type);
7487
7488 /* Build up an array which registers with the runtime all the reductions
7489 and deregisters them at the end. Format documented in libgomp/task.c. */
7490 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7491 tree avar = create_tmp_var_raw (atype);
7492 gimple_add_tmp_var (avar);
7493 TREE_ADDRESSABLE (avar) = 1;
7494 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7495 NULL_TREE, NULL_TREE);
7496 tree t = build_int_cst (pointer_sized_int_node, cnt);
7497 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7498 gimple_seq seq = NULL;
7499 tree sz = fold_convert (pointer_sized_int_node,
7500 TYPE_SIZE_UNIT (record_type));
7501 int cachesz = 64;
7502 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7503 build_int_cst (pointer_sized_int_node, cachesz - 1));
7504 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7505 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7506 ctx->task_reductions.create (1 + cnt);
7507 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7508 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7509 ? sz : NULL_TREE);
7510 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7511 gimple_seq_add_seq (start, seq);
7512 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7513 NULL_TREE, NULL_TREE);
7514 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7515 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7516 NULL_TREE, NULL_TREE);
7517 t = build_int_cst (pointer_sized_int_node,
7518 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7519 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7520 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7521 NULL_TREE, NULL_TREE);
7522 t = build_int_cst (pointer_sized_int_node, -1);
7523 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7524 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7525 NULL_TREE, NULL_TREE);
7526 t = build_int_cst (pointer_sized_int_node, 0);
7527 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7528
7529 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7530 and for each task reduction checks a bool right after the private variable
7531 within that thread's chunk; if the bool is clear, it hasn't been
7532 initialized and thus isn't going to be reduced nor destructed, otherwise
7533 reduce and destruct it. */
7534 tree idx = create_tmp_var (size_type_node);
7535 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7536 tree num_thr_sz = create_tmp_var (size_type_node);
7537 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7538 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7539 tree lab3 = NULL_TREE;
7540 gimple *g;
7541 if (code == OMP_FOR || code == OMP_SECTIONS)
7542 {
7543 /* For worksharing constructs, only perform it in the master thread,
7544 with the exception of cancelled implicit barriers - then only handle
7545 the current thread. */
7546 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7547 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7548 tree thr_num = create_tmp_var (integer_type_node);
7549 g = gimple_build_call (t, 0);
7550 gimple_call_set_lhs (g, thr_num);
7551 gimple_seq_add_stmt (end, g);
7552 if (cancellable)
7553 {
7554 tree c;
7555 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7556 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7557 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7558 if (code == OMP_FOR)
7559 c = gimple_omp_for_clauses (ctx->stmt);
7560 else /* if (code == OMP_SECTIONS) */
7561 c = gimple_omp_sections_clauses (ctx->stmt);
7562 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7563 cancellable = c;
7564 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7565 lab5, lab6);
7566 gimple_seq_add_stmt (end, g);
7567 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7568 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7569 gimple_seq_add_stmt (end, g);
7570 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7571 build_one_cst (TREE_TYPE (idx)));
7572 gimple_seq_add_stmt (end, g);
7573 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7574 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7575 }
7576 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7577 gimple_seq_add_stmt (end, g);
7578 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7579 }
7580 if (code != OMP_PARALLEL)
7581 {
7582 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7583 tree num_thr = create_tmp_var (integer_type_node);
7584 g = gimple_build_call (t, 0);
7585 gimple_call_set_lhs (g, num_thr);
7586 gimple_seq_add_stmt (end, g);
7587 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7588 gimple_seq_add_stmt (end, g);
7589 if (cancellable)
7590 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7591 }
7592 else
7593 {
7594 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7595 OMP_CLAUSE__REDUCTEMP_);
7596 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7597 t = fold_convert (size_type_node, t);
7598 gimplify_assign (num_thr_sz, t, end);
7599 }
7600 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7601 NULL_TREE, NULL_TREE);
7602 tree data = create_tmp_var (pointer_sized_int_node);
7603 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7604 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7605 tree ptr;
7606 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7607 ptr = create_tmp_var (build_pointer_type (record_type));
7608 else
7609 ptr = create_tmp_var (ptr_type_node);
7610 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7611
7612 tree field = TYPE_FIELDS (record_type);
7613 cnt = 0;
7614 if (cancellable)
7615 field = DECL_CHAIN (DECL_CHAIN (field));
7616 for (int pass = 0; pass < 2; pass++)
7617 {
7618 tree decl, type, next;
7619 for (tree c = clauses;
7620 omp_task_reduction_iterate (pass, code, ccode,
7621 &c, &decl, &type, &next); c = next)
7622 {
7623 tree var = decl, ref;
7624 if (TREE_CODE (decl) == MEM_REF)
7625 {
7626 var = TREE_OPERAND (var, 0);
7627 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7628 var = TREE_OPERAND (var, 0);
7629 tree v = var;
7630 if (TREE_CODE (var) == ADDR_EXPR)
7631 var = TREE_OPERAND (var, 0);
7632 else if (TREE_CODE (var) == INDIRECT_REF)
7633 var = TREE_OPERAND (var, 0);
7634 tree orig_var = var;
7635 if (is_variable_sized (var))
7636 {
7637 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7638 var = DECL_VALUE_EXPR (var);
7639 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7640 var = TREE_OPERAND (var, 0);
7641 gcc_assert (DECL_P (var));
7642 }
7643 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7644 if (orig_var != var)
7645 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7646 else if (TREE_CODE (v) == ADDR_EXPR)
7647 t = build_fold_addr_expr (t);
7648 else if (TREE_CODE (v) == INDIRECT_REF)
7649 t = build_fold_indirect_ref (t);
7650 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7651 {
7652 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7653 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7654 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7655 }
7656 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7657 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7658 fold_convert (size_type_node,
7659 TREE_OPERAND (decl, 1)));
7660 }
7661 else
7662 {
7663 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7664 if (!omp_is_reference (decl))
7665 t = build_fold_addr_expr (t);
7666 }
7667 t = fold_convert (pointer_sized_int_node, t);
7668 seq = NULL;
7669 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7670 gimple_seq_add_seq (start, seq);
7671 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7672 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7673 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7674 t = unshare_expr (byte_position (field));
7675 t = fold_convert (pointer_sized_int_node, t);
7676 ctx->task_reduction_map->put (c, cnt);
7677 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7678 ? t : NULL_TREE);
7679 seq = NULL;
7680 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7681 gimple_seq_add_seq (start, seq);
7682 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7683 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7684 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7685
7686 tree bfield = DECL_CHAIN (field);
7687 tree cond;
7688 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7689 /* In parallel or worksharing all threads unconditionally
7690 initialize all their task reduction private variables. */
7691 cond = boolean_true_node;
7692 else if (TREE_TYPE (ptr) == ptr_type_node)
7693 {
7694 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7695 unshare_expr (byte_position (bfield)));
7696 seq = NULL;
7697 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7698 gimple_seq_add_seq (end, seq);
7699 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7700 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7701 build_int_cst (pbool, 0));
7702 }
7703 else
7704 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7705 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7706 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7707 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7708 tree condv = create_tmp_var (boolean_type_node);
7709 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7710 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7711 lab3, lab4);
7712 gimple_seq_add_stmt (end, g);
7713 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7714 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7715 {
7716 /* If this reduction doesn't need destruction and parallel
7717 has been cancelled, there is nothing to do for this
7718 reduction, so jump around the merge operation. */
7719 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7720 g = gimple_build_cond (NE_EXPR, cancellable,
7721 build_zero_cst (TREE_TYPE (cancellable)),
7722 lab4, lab5);
7723 gimple_seq_add_stmt (end, g);
7724 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7725 }
7726
7727 tree new_var;
7728 if (TREE_TYPE (ptr) == ptr_type_node)
7729 {
7730 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7731 unshare_expr (byte_position (field)));
7732 seq = NULL;
7733 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7734 gimple_seq_add_seq (end, seq);
7735 tree pbool = build_pointer_type (TREE_TYPE (field));
7736 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7737 build_int_cst (pbool, 0));
7738 }
7739 else
7740 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7741 build_simple_mem_ref (ptr), field, NULL_TREE);
7742
7743 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7744 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7745 ref = build_simple_mem_ref (ref);
7746 /* reduction(-:var) sums up the partial results, so it acts
7747 identically to reduction(+:var). */
7748 if (rcode == MINUS_EXPR)
7749 rcode = PLUS_EXPR;
7750 if (TREE_CODE (decl) == MEM_REF)
7751 {
7752 tree type = TREE_TYPE (new_var);
7753 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7754 tree i = create_tmp_var (TREE_TYPE (v));
7755 tree ptype = build_pointer_type (TREE_TYPE (type));
7756 if (DECL_P (v))
7757 {
7758 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7759 tree vv = create_tmp_var (TREE_TYPE (v));
7760 gimplify_assign (vv, v, start);
7761 v = vv;
7762 }
7763 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7764 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7765 new_var = build_fold_addr_expr (new_var);
7766 new_var = fold_convert (ptype, new_var);
7767 ref = fold_convert (ptype, ref);
7768 tree m = create_tmp_var (ptype);
7769 gimplify_assign (m, new_var, end);
7770 new_var = m;
7771 m = create_tmp_var (ptype);
7772 gimplify_assign (m, ref, end);
7773 ref = m;
7774 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7775 tree body = create_artificial_label (UNKNOWN_LOCATION);
7776 tree endl = create_artificial_label (UNKNOWN_LOCATION);
7777 gimple_seq_add_stmt (end, gimple_build_label (body));
7778 tree priv = build_simple_mem_ref (new_var);
7779 tree out = build_simple_mem_ref (ref);
7780 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7781 {
7782 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7783 tree decl_placeholder
7784 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7785 tree lab6 = NULL_TREE;
7786 if (cancellable)
7787 {
7788 /* If this reduction needs destruction and parallel
7789 has been cancelled, jump around the merge operation
7790 to the destruction. */
7791 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7792 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7793 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7794 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7795 lab6, lab5);
7796 gimple_seq_add_stmt (end, g);
7797 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7798 }
7799 SET_DECL_VALUE_EXPR (placeholder, out);
7800 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7801 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7802 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7803 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7804 gimple_seq_add_seq (end,
7805 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7806 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7807 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7808 {
7809 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7810 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7811 }
7812 if (cancellable)
7813 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7814 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
7815 if (x)
7816 {
7817 gimple_seq tseq = NULL;
7818 gimplify_stmt (&x, &tseq);
7819 gimple_seq_add_seq (end, tseq);
7820 }
7821 }
7822 else
7823 {
7824 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
7825 out = unshare_expr (out);
7826 gimplify_assign (out, x, end);
7827 }
7828 gimple *g
7829 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7830 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7831 gimple_seq_add_stmt (end, g);
7832 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7833 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7834 gimple_seq_add_stmt (end, g);
7835 g = gimple_build_assign (i, PLUS_EXPR, i,
7836 build_int_cst (TREE_TYPE (i), 1));
7837 gimple_seq_add_stmt (end, g);
7838 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
7839 gimple_seq_add_stmt (end, g);
7840 gimple_seq_add_stmt (end, gimple_build_label (endl));
7841 }
7842 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7843 {
7844 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7845 tree oldv = NULL_TREE;
7846 tree lab6 = NULL_TREE;
7847 if (cancellable)
7848 {
7849 /* If this reduction needs destruction and parallel
7850 has been cancelled, jump around the merge operation
7851 to the destruction. */
7852 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7853 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7854 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7855 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7856 lab6, lab5);
7857 gimple_seq_add_stmt (end, g);
7858 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7859 }
7860 if (omp_is_reference (decl)
7861 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7862 TREE_TYPE (ref)))
7863 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7864 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7865 tree refv = create_tmp_var (TREE_TYPE (ref));
7866 gimplify_assign (refv, ref, end);
7867 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
7868 SET_DECL_VALUE_EXPR (placeholder, ref);
7869 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7870 tree d = maybe_lookup_decl (decl, ctx);
7871 gcc_assert (d);
7872 if (DECL_HAS_VALUE_EXPR_P (d))
7873 oldv = DECL_VALUE_EXPR (d);
7874 if (omp_is_reference (var))
7875 {
7876 tree v = fold_convert (TREE_TYPE (d),
7877 build_fold_addr_expr (new_var));
7878 SET_DECL_VALUE_EXPR (d, v);
7879 }
7880 else
7881 SET_DECL_VALUE_EXPR (d, new_var);
7882 DECL_HAS_VALUE_EXPR_P (d) = 1;
7883 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7884 if (oldv)
7885 SET_DECL_VALUE_EXPR (d, oldv);
7886 else
7887 {
7888 SET_DECL_VALUE_EXPR (d, NULL_TREE);
7889 DECL_HAS_VALUE_EXPR_P (d) = 0;
7890 }
7891 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7892 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7893 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7894 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7895 if (cancellable)
7896 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7897 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
7898 if (x)
7899 {
7900 gimple_seq tseq = NULL;
7901 gimplify_stmt (&x, &tseq);
7902 gimple_seq_add_seq (end, tseq);
7903 }
7904 }
7905 else
7906 {
7907 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
7908 ref = unshare_expr (ref);
7909 gimplify_assign (ref, x, end);
7910 }
7911 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7912 ++cnt;
7913 field = DECL_CHAIN (bfield);
7914 }
7915 }
7916
7917 if (code == OMP_TASKGROUP)
7918 {
7919 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
7920 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7921 gimple_seq_add_stmt (start, g);
7922 }
7923 else
7924 {
7925 tree c;
7926 if (code == OMP_FOR)
7927 c = gimple_omp_for_clauses (ctx->stmt);
7928 else if (code == OMP_SECTIONS)
7929 c = gimple_omp_sections_clauses (ctx->stmt);
7930 else
7931 c = gimple_omp_taskreg_clauses (ctx->stmt);
7932 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
7933 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
7934 build_fold_addr_expr (avar));
7935 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
7936 }
7937
7938 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
7939 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
7940 size_one_node));
7941 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
7942 gimple_seq_add_stmt (end, g);
7943 gimple_seq_add_stmt (end, gimple_build_label (lab2));
7944 if (code == OMP_FOR || code == OMP_SECTIONS)
7945 {
7946 enum built_in_function bfn
7947 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
7948 t = builtin_decl_explicit (bfn);
7949 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
7950 tree arg;
7951 if (cancellable)
7952 {
7953 arg = create_tmp_var (c_bool_type);
7954 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
7955 cancellable));
7956 }
7957 else
7958 arg = build_int_cst (c_bool_type, 0);
7959 g = gimple_build_call (t, 1, arg);
7960 }
7961 else
7962 {
7963 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
7964 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7965 }
7966 gimple_seq_add_stmt (end, g);
7967 t = build_constructor (atype, NULL);
7968 TREE_THIS_VOLATILE (t) = 1;
7969 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
7970 }
7971
7972 /* Expand code for an OpenMP taskgroup directive. */
7973
7974 static void
7975 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7976 {
7977 gimple *stmt = gsi_stmt (*gsi_p);
7978 gcall *x;
7979 gbind *bind;
7980 gimple_seq dseq = NULL;
7981 tree block = make_node (BLOCK);
7982
7983 bind = gimple_build_bind (NULL, NULL, block);
7984 gsi_replace (gsi_p, bind, true);
7985 gimple_bind_add_stmt (bind, stmt);
7986
7987 push_gimplify_context ();
7988
7989 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
7990 0);
7991 gimple_bind_add_stmt (bind, x);
7992
7993 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
7994 gimple_omp_taskgroup_clauses (stmt),
7995 gimple_bind_body_ptr (bind), &dseq);
7996
7997 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7998 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7999 gimple_omp_set_body (stmt, NULL);
8000
8001 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8002 gimple_bind_add_seq (bind, dseq);
8003
8004 pop_gimplify_context (bind);
8005
8006 gimple_bind_append_vars (bind, ctx->block_vars);
8007 BLOCK_VARS (block) = ctx->block_vars;
8008 }
8009
8010
8011 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8012
8013 static void
8014 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8015 omp_context *ctx)
8016 {
8017 struct omp_for_data fd;
8018 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8019 return;
8020
8021 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8022 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8023 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8024 if (!fd.ordered)
8025 return;
8026
8027 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8028 tree c = gimple_omp_ordered_clauses (ord_stmt);
8029 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8030 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8031 {
8032 /* Merge depend clauses from multiple adjacent
8033 #pragma omp ordered depend(sink:...) constructs
8034 into one #pragma omp ordered depend(sink:...), so that
8035 we can optimize them together. */
8036 gimple_stmt_iterator gsi = *gsi_p;
8037 gsi_next (&gsi);
8038 while (!gsi_end_p (gsi))
8039 {
8040 gimple *stmt = gsi_stmt (gsi);
8041 if (is_gimple_debug (stmt)
8042 || gimple_code (stmt) == GIMPLE_NOP)
8043 {
8044 gsi_next (&gsi);
8045 continue;
8046 }
8047 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8048 break;
8049 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8050 c = gimple_omp_ordered_clauses (ord_stmt2);
8051 if (c == NULL_TREE
8052 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8053 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8054 break;
8055 while (*list_p)
8056 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8057 *list_p = c;
8058 gsi_remove (&gsi, true);
8059 }
8060 }
8061
8062 /* Canonicalize sink dependence clauses into one folded clause if
8063 possible.
8064
8065 The basic algorithm is to create a sink vector whose first
8066 element is the GCD of all the first elements, and whose remaining
8067 elements are the minimum of the subsequent columns.
8068
8069 We ignore dependence vectors whose first element is zero because
8070 such dependencies are known to be executed by the same thread.
8071
8072 We take into account the direction of the loop, so a minimum
8073 becomes a maximum if the loop is iterating forwards. We also
8074 ignore sink clauses where the loop direction is unknown, or where
8075 the offsets are clearly invalid because they are not a multiple
8076 of the loop increment.
8077
8078 For example:
8079
8080 #pragma omp for ordered(2)
8081 for (i=0; i < N; ++i)
8082 for (j=0; j < M; ++j)
8083 {
8084 #pragma omp ordered \
8085 depend(sink:i-8,j-2) \
8086 depend(sink:i,j-1) \ // Completely ignored because i+0.
8087 depend(sink:i-4,j-3) \
8088 depend(sink:i-6,j-4)
8089 #pragma omp ordered depend(source)
8090 }
8091
8092 Folded clause is:
8093
8094 depend(sink:-gcd(8,4,6),-min(2,3,4))
8095 -or-
8096 depend(sink:-2,-2)
8097 */
8098
8099 /* FIXME: Computing GCD's where the first element is zero is
8100 non-trivial in the presence of collapsed loops. Do this later. */
8101 if (fd.collapse > 1)
8102 return;
8103
8104 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8105
8106 /* wide_int is not a POD so it must be default-constructed. */
8107 for (unsigned i = 0; i != 2 * len - 1; ++i)
8108 new (static_cast<void*>(folded_deps + i)) wide_int ();
8109
8110 tree folded_dep = NULL_TREE;
8111 /* TRUE if the first dimension's offset is negative. */
8112 bool neg_offset_p = false;
8113
8114 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8115 unsigned int i;
8116 while ((c = *list_p) != NULL)
8117 {
8118 bool remove = false;
8119
8120 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8121 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8122 goto next_ordered_clause;
8123
8124 tree vec;
8125 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8126 vec && TREE_CODE (vec) == TREE_LIST;
8127 vec = TREE_CHAIN (vec), ++i)
8128 {
8129 gcc_assert (i < len);
8130
8131 /* omp_extract_for_data has canonicalized the condition. */
8132 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8133 || fd.loops[i].cond_code == GT_EXPR);
8134 bool forward = fd.loops[i].cond_code == LT_EXPR;
8135 bool maybe_lexically_later = true;
8136
8137 /* While the committee makes up its mind, bail if we have any
8138 non-constant steps. */
8139 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8140 goto lower_omp_ordered_ret;
8141
8142 tree itype = TREE_TYPE (TREE_VALUE (vec));
8143 if (POINTER_TYPE_P (itype))
8144 itype = sizetype;
8145 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8146 TYPE_PRECISION (itype),
8147 TYPE_SIGN (itype));
8148
8149 /* Ignore invalid offsets that are not multiples of the step. */
8150 if (!wi::multiple_of_p (wi::abs (offset),
8151 wi::abs (wi::to_wide (fd.loops[i].step)),
8152 UNSIGNED))
8153 {
8154 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8155 "ignoring sink clause with offset that is not "
8156 "a multiple of the loop step");
8157 remove = true;
8158 goto next_ordered_clause;
8159 }
8160
8161 /* Calculate the first dimension. The first dimension of
8162 the folded dependency vector is the GCD of the first
8163 elements, while ignoring any first elements whose offset
8164 is 0. */
8165 if (i == 0)
8166 {
8167 /* Ignore dependence vectors whose first dimension is 0. */
8168 if (offset == 0)
8169 {
8170 remove = true;
8171 goto next_ordered_clause;
8172 }
8173 else
8174 {
8175 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8176 {
8177 error_at (OMP_CLAUSE_LOCATION (c),
8178 "first offset must be in opposite direction "
8179 "of loop iterations");
8180 goto lower_omp_ordered_ret;
8181 }
8182 if (forward)
8183 offset = -offset;
8184 neg_offset_p = forward;
8185 /* Initialize the first time around. */
8186 if (folded_dep == NULL_TREE)
8187 {
8188 folded_dep = c;
8189 folded_deps[0] = offset;
8190 }
8191 else
8192 folded_deps[0] = wi::gcd (folded_deps[0],
8193 offset, UNSIGNED);
8194 }
8195 }
8196 /* Calculate minimum for the remaining dimensions. */
8197 else
8198 {
8199 folded_deps[len + i - 1] = offset;
8200 if (folded_dep == c)
8201 folded_deps[i] = offset;
8202 else if (maybe_lexically_later
8203 && !wi::eq_p (folded_deps[i], offset))
8204 {
8205 if (forward ^ wi::gts_p (folded_deps[i], offset))
8206 {
8207 unsigned int j;
8208 folded_dep = c;
8209 for (j = 1; j <= i; j++)
8210 folded_deps[j] = folded_deps[len + j - 1];
8211 }
8212 else
8213 maybe_lexically_later = false;
8214 }
8215 }
8216 }
8217 gcc_assert (i == len);
8218
8219 remove = true;
8220
8221 next_ordered_clause:
8222 if (remove)
8223 *list_p = OMP_CLAUSE_CHAIN (c);
8224 else
8225 list_p = &OMP_CLAUSE_CHAIN (c);
8226 }
8227
8228 if (folded_dep)
8229 {
8230 if (neg_offset_p)
8231 folded_deps[0] = -folded_deps[0];
8232
8233 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8234 if (POINTER_TYPE_P (itype))
8235 itype = sizetype;
8236
8237 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8238 = wide_int_to_tree (itype, folded_deps[0]);
8239 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8240 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8241 }
8242
8243 lower_omp_ordered_ret:
8244
8245 /* Ordered without clauses is #pragma omp threads, while we want
8246 a nop instead if we remove all clauses. */
8247 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8248 gsi_replace (gsi_p, gimple_build_nop (), true);
8249 }
8250
8251
8252 /* Expand code for an OpenMP ordered directive. */
8253
8254 static void
8255 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8256 {
8257 tree block;
8258 gimple *stmt = gsi_stmt (*gsi_p), *g;
8259 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8260 gcall *x;
8261 gbind *bind;
8262 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8263 OMP_CLAUSE_SIMD);
8264 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8265 loop. */
8266 bool maybe_simt
8267 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8268 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8269 OMP_CLAUSE_THREADS);
8270
8271 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8272 OMP_CLAUSE_DEPEND))
8273 {
8274 /* FIXME: This is needs to be moved to the expansion to verify various
8275 conditions only testable on cfg with dominators computed, and also
8276 all the depend clauses to be merged still might need to be available
8277 for the runtime checks. */
8278 if (0)
8279 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8280 return;
8281 }
8282
8283 push_gimplify_context ();
8284
8285 block = make_node (BLOCK);
8286 bind = gimple_build_bind (NULL, NULL, block);
8287 gsi_replace (gsi_p, bind, true);
8288 gimple_bind_add_stmt (bind, stmt);
8289
8290 if (simd)
8291 {
8292 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8293 build_int_cst (NULL_TREE, threads));
8294 cfun->has_simduid_loops = true;
8295 }
8296 else
8297 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8298 0);
8299 gimple_bind_add_stmt (bind, x);
8300
8301 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8302 if (maybe_simt)
8303 {
8304 counter = create_tmp_var (integer_type_node);
8305 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8306 gimple_call_set_lhs (g, counter);
8307 gimple_bind_add_stmt (bind, g);
8308
8309 body = create_artificial_label (UNKNOWN_LOCATION);
8310 test = create_artificial_label (UNKNOWN_LOCATION);
8311 gimple_bind_add_stmt (bind, gimple_build_label (body));
8312
8313 tree simt_pred = create_tmp_var (integer_type_node);
8314 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8315 gimple_call_set_lhs (g, simt_pred);
8316 gimple_bind_add_stmt (bind, g);
8317
8318 tree t = create_artificial_label (UNKNOWN_LOCATION);
8319 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8320 gimple_bind_add_stmt (bind, g);
8321
8322 gimple_bind_add_stmt (bind, gimple_build_label (t));
8323 }
8324 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8325 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8326 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8327 gimple_omp_set_body (stmt, NULL);
8328
8329 if (maybe_simt)
8330 {
8331 gimple_bind_add_stmt (bind, gimple_build_label (test));
8332 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8333 gimple_bind_add_stmt (bind, g);
8334
8335 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8336 tree nonneg = create_tmp_var (integer_type_node);
8337 gimple_seq tseq = NULL;
8338 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8339 gimple_bind_add_seq (bind, tseq);
8340
8341 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8342 gimple_call_set_lhs (g, nonneg);
8343 gimple_bind_add_stmt (bind, g);
8344
8345 tree end = create_artificial_label (UNKNOWN_LOCATION);
8346 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8347 gimple_bind_add_stmt (bind, g);
8348
8349 gimple_bind_add_stmt (bind, gimple_build_label (end));
8350 }
8351 if (simd)
8352 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8353 build_int_cst (NULL_TREE, threads));
8354 else
8355 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8356 0);
8357 gimple_bind_add_stmt (bind, x);
8358
8359 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8360
8361 pop_gimplify_context (bind);
8362
8363 gimple_bind_append_vars (bind, ctx->block_vars);
8364 BLOCK_VARS (block) = gimple_bind_vars (bind);
8365 }
8366
8367
8368 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8369 substitution of a couple of function calls. But in the NAMED case,
8370 requires that languages coordinate a symbol name. It is therefore
8371 best put here in common code. */
8372
8373 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
8374
8375 static void
8376 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8377 {
8378 tree block;
8379 tree name, lock, unlock;
8380 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8381 gbind *bind;
8382 location_t loc = gimple_location (stmt);
8383 gimple_seq tbody;
8384
8385 name = gimple_omp_critical_name (stmt);
8386 if (name)
8387 {
8388 tree decl;
8389
8390 if (!critical_name_mutexes)
8391 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
8392
8393 tree *n = critical_name_mutexes->get (name);
8394 if (n == NULL)
8395 {
8396 char *new_str;
8397
8398 decl = create_tmp_var_raw (ptr_type_node);
8399
8400 new_str = ACONCAT ((".gomp_critical_user_",
8401 IDENTIFIER_POINTER (name), NULL));
8402 DECL_NAME (decl) = get_identifier (new_str);
8403 TREE_PUBLIC (decl) = 1;
8404 TREE_STATIC (decl) = 1;
8405 DECL_COMMON (decl) = 1;
8406 DECL_ARTIFICIAL (decl) = 1;
8407 DECL_IGNORED_P (decl) = 1;
8408
8409 varpool_node::finalize_decl (decl);
8410
8411 critical_name_mutexes->put (name, decl);
8412 }
8413 else
8414 decl = *n;
8415
8416 /* If '#pragma omp critical' is inside offloaded region or
8417 inside function marked as offloadable, the symbol must be
8418 marked as offloadable too. */
8419 omp_context *octx;
8420 if (cgraph_node::get (current_function_decl)->offloadable)
8421 varpool_node::get_create (decl)->offloadable = 1;
8422 else
8423 for (octx = ctx->outer; octx; octx = octx->outer)
8424 if (is_gimple_omp_offloaded (octx->stmt))
8425 {
8426 varpool_node::get_create (decl)->offloadable = 1;
8427 break;
8428 }
8429
8430 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8431 lock = build_call_expr_loc (loc, lock, 1,
8432 build_fold_addr_expr_loc (loc, decl));
8433
8434 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8435 unlock = build_call_expr_loc (loc, unlock, 1,
8436 build_fold_addr_expr_loc (loc, decl));
8437 }
8438 else
8439 {
8440 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8441 lock = build_call_expr_loc (loc, lock, 0);
8442
8443 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8444 unlock = build_call_expr_loc (loc, unlock, 0);
8445 }
8446
8447 push_gimplify_context ();
8448
8449 block = make_node (BLOCK);
8450 bind = gimple_build_bind (NULL, NULL, block);
8451 gsi_replace (gsi_p, bind, true);
8452 gimple_bind_add_stmt (bind, stmt);
8453
8454 tbody = gimple_bind_body (bind);
8455 gimplify_and_add (lock, &tbody);
8456 gimple_bind_set_body (bind, tbody);
8457
8458 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8459 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8460 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8461 gimple_omp_set_body (stmt, NULL);
8462
8463 tbody = gimple_bind_body (bind);
8464 gimplify_and_add (unlock, &tbody);
8465 gimple_bind_set_body (bind, tbody);
8466
8467 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8468
8469 pop_gimplify_context (bind);
8470 gimple_bind_append_vars (bind, ctx->block_vars);
8471 BLOCK_VARS (block) = gimple_bind_vars (bind);
8472 }
8473
8474 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8475 for a lastprivate clause. Given a loop control predicate of (V
8476 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8477 is appended to *DLIST, iterator initialization is appended to
8478 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
8479 to be emitted in a critical section. */
8480
8481 static void
8482 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8483 gimple_seq *dlist, gimple_seq *clist,
8484 struct omp_context *ctx)
8485 {
8486 tree clauses, cond, vinit;
8487 enum tree_code cond_code;
8488 gimple_seq stmts;
8489
8490 cond_code = fd->loop.cond_code;
8491 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8492
8493 /* When possible, use a strict equality expression. This can let VRP
8494 type optimizations deduce the value and remove a copy. */
8495 if (tree_fits_shwi_p (fd->loop.step))
8496 {
8497 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8498 if (step == 1 || step == -1)
8499 cond_code = EQ_EXPR;
8500 }
8501
8502 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8503 || gimple_omp_for_grid_phony (fd->for_stmt))
8504 cond = omp_grid_lastprivate_predicate (fd);
8505 else
8506 {
8507 tree n2 = fd->loop.n2;
8508 if (fd->collapse > 1
8509 && TREE_CODE (n2) != INTEGER_CST
8510 && gimple_omp_for_combined_into_p (fd->for_stmt))
8511 {
8512 struct omp_context *taskreg_ctx = NULL;
8513 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
8514 {
8515 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8516 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8517 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
8518 {
8519 if (gimple_omp_for_combined_into_p (gfor))
8520 {
8521 gcc_assert (ctx->outer->outer
8522 && is_parallel_ctx (ctx->outer->outer));
8523 taskreg_ctx = ctx->outer->outer;
8524 }
8525 else
8526 {
8527 struct omp_for_data outer_fd;
8528 omp_extract_for_data (gfor, &outer_fd, NULL);
8529 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8530 }
8531 }
8532 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8533 taskreg_ctx = ctx->outer->outer;
8534 }
8535 else if (is_taskreg_ctx (ctx->outer))
8536 taskreg_ctx = ctx->outer;
8537 if (taskreg_ctx)
8538 {
8539 int i;
8540 tree taskreg_clauses
8541 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8542 tree innerc = omp_find_clause (taskreg_clauses,
8543 OMP_CLAUSE__LOOPTEMP_);
8544 gcc_assert (innerc);
8545 for (i = 0; i < fd->collapse; i++)
8546 {
8547 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8548 OMP_CLAUSE__LOOPTEMP_);
8549 gcc_assert (innerc);
8550 }
8551 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8552 OMP_CLAUSE__LOOPTEMP_);
8553 if (innerc)
8554 n2 = fold_convert (TREE_TYPE (n2),
8555 lookup_decl (OMP_CLAUSE_DECL (innerc),
8556 taskreg_ctx));
8557 }
8558 }
8559 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
8560 }
8561
8562 clauses = gimple_omp_for_clauses (fd->for_stmt);
8563 stmts = NULL;
8564 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
8565 if (!gimple_seq_empty_p (stmts))
8566 {
8567 gimple_seq_add_seq (&stmts, *dlist);
8568 *dlist = stmts;
8569
8570 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8571 vinit = fd->loop.n1;
8572 if (cond_code == EQ_EXPR
8573 && tree_fits_shwi_p (fd->loop.n2)
8574 && ! integer_zerop (fd->loop.n2))
8575 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8576 else
8577 vinit = unshare_expr (vinit);
8578
8579 /* Initialize the iterator variable, so that threads that don't execute
8580 any iterations don't execute the lastprivate clauses by accident. */
8581 gimplify_assign (fd->loop.v, vinit, body_p);
8582 }
8583 }
8584
8585
8586 /* Lower code for an OMP loop directive. */
8587
8588 static void
8589 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8590 {
8591 tree *rhs_p, block;
8592 struct omp_for_data fd, *fdp = NULL;
8593 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8594 gbind *new_stmt;
8595 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
8596 gimple_seq cnt_list = NULL, clist = NULL;
8597 gimple_seq oacc_head = NULL, oacc_tail = NULL;
8598 size_t i;
8599
8600 push_gimplify_context ();
8601
8602 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
8603
8604 block = make_node (BLOCK);
8605 new_stmt = gimple_build_bind (NULL, NULL, block);
8606 /* Replace at gsi right away, so that 'stmt' is no member
8607 of a sequence anymore as we're going to add to a different
8608 one below. */
8609 gsi_replace (gsi_p, new_stmt, true);
8610
8611 /* Move declaration of temporaries in the loop body before we make
8612 it go away. */
8613 omp_for_body = gimple_omp_body (stmt);
8614 if (!gimple_seq_empty_p (omp_for_body)
8615 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8616 {
8617 gbind *inner_bind
8618 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8619 tree vars = gimple_bind_vars (inner_bind);
8620 gimple_bind_append_vars (new_stmt, vars);
8621 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8622 keep them on the inner_bind and it's block. */
8623 gimple_bind_set_vars (inner_bind, NULL_TREE);
8624 if (gimple_bind_block (inner_bind))
8625 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
8626 }
8627
8628 if (gimple_omp_for_combined_into_p (stmt))
8629 {
8630 omp_extract_for_data (stmt, &fd, NULL);
8631 fdp = &fd;
8632
8633 /* We need two temporaries with fd.loop.v type (istart/iend)
8634 and then (fd.collapse - 1) temporaries with the same
8635 type for count2 ... countN-1 vars if not constant. */
8636 size_t count = 2;
8637 tree type = fd.iter_type;
8638 if (fd.collapse > 1
8639 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8640 count += fd.collapse - 1;
8641 bool taskreg_for
8642 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8643 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8644 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8645 tree simtc = NULL;
8646 tree clauses = *pc;
8647 if (taskreg_for)
8648 outerc
8649 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8650 OMP_CLAUSE__LOOPTEMP_);
8651 if (ctx->simt_stmt)
8652 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
8653 OMP_CLAUSE__LOOPTEMP_);
8654 for (i = 0; i < count; i++)
8655 {
8656 tree temp;
8657 if (taskreg_for)
8658 {
8659 gcc_assert (outerc);
8660 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8661 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
8662 OMP_CLAUSE__LOOPTEMP_);
8663 }
8664 else
8665 {
8666 /* If there are 2 adjacent SIMD stmts, one with _simt_
8667 clause, another without, make sure they have the same
8668 decls in _looptemp_ clauses, because the outer stmt
8669 they are combined into will look up just one inner_stmt. */
8670 if (ctx->simt_stmt)
8671 temp = OMP_CLAUSE_DECL (simtc);
8672 else
8673 temp = create_tmp_var (type);
8674 insert_decl_map (&ctx->outer->cb, temp, temp);
8675 }
8676 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8677 OMP_CLAUSE_DECL (*pc) = temp;
8678 pc = &OMP_CLAUSE_CHAIN (*pc);
8679 if (ctx->simt_stmt)
8680 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
8681 OMP_CLAUSE__LOOPTEMP_);
8682 }
8683 *pc = clauses;
8684 }
8685
8686 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8687 dlist = NULL;
8688 body = NULL;
8689 tree rclauses
8690 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
8691 OMP_CLAUSE_REDUCTION);
8692 tree rtmp = NULL_TREE;
8693 if (rclauses)
8694 {
8695 tree type = build_pointer_type (pointer_sized_int_node);
8696 tree temp = create_tmp_var (type);
8697 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8698 OMP_CLAUSE_DECL (c) = temp;
8699 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
8700 gimple_omp_for_set_clauses (stmt, c);
8701 lower_omp_task_reductions (ctx, OMP_FOR,
8702 gimple_omp_for_clauses (stmt),
8703 &tred_ilist, &tred_dlist);
8704 rclauses = c;
8705 rtmp = make_ssa_name (type);
8706 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
8707 }
8708
8709 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
8710 ctx);
8711
8712 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8713 fdp);
8714 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
8715 gimple_omp_for_pre_body (stmt));
8716
8717 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8718
8719 /* Lower the header expressions. At this point, we can assume that
8720 the header is of the form:
8721
8722 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8723
8724 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8725 using the .omp_data_s mapping, if needed. */
8726 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
8727 {
8728 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
8729 if (!is_gimple_min_invariant (*rhs_p))
8730 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8731 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8732 recompute_tree_invariant_for_addr_expr (*rhs_p);
8733
8734 rhs_p = gimple_omp_for_final_ptr (stmt, i);
8735 if (!is_gimple_min_invariant (*rhs_p))
8736 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8737 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8738 recompute_tree_invariant_for_addr_expr (*rhs_p);
8739
8740 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
8741 if (!is_gimple_min_invariant (*rhs_p))
8742 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8743 }
8744 if (rclauses)
8745 gimple_seq_add_seq (&tred_ilist, cnt_list);
8746 else
8747 gimple_seq_add_seq (&body, cnt_list);
8748
8749 /* Once lowered, extract the bounds and clauses. */
8750 omp_extract_for_data (stmt, &fd, NULL);
8751
8752 if (is_gimple_omp_oacc (ctx->stmt)
8753 && !ctx_in_oacc_kernels_region (ctx))
8754 lower_oacc_head_tail (gimple_location (stmt),
8755 gimple_omp_for_clauses (stmt),
8756 &oacc_head, &oacc_tail, ctx);
8757
8758 /* Add OpenACC partitioning and reduction markers just before the loop. */
8759 if (oacc_head)
8760 gimple_seq_add_seq (&body, oacc_head);
8761
8762 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
8763
8764 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
8765 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
8766 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8767 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8768 {
8769 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8770 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
8771 OMP_CLAUSE_LINEAR_STEP (c)
8772 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
8773 ctx);
8774 }
8775
8776 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
8777 && gimple_omp_for_grid_phony (stmt));
8778 if (!phony_loop)
8779 gimple_seq_add_stmt (&body, stmt);
8780 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
8781
8782 if (!phony_loop)
8783 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8784 fd.loop.v));
8785
8786 /* After the loop, add exit clauses. */
8787 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
8788
8789 if (clist)
8790 {
8791 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8792 gcall *g = gimple_build_call (fndecl, 0);
8793 gimple_seq_add_stmt (&body, g);
8794 gimple_seq_add_seq (&body, clist);
8795 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8796 g = gimple_build_call (fndecl, 0);
8797 gimple_seq_add_stmt (&body, g);
8798 }
8799
8800 if (ctx->cancellable)
8801 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8802
8803 gimple_seq_add_seq (&body, dlist);
8804
8805 if (rclauses)
8806 {
8807 gimple_seq_add_seq (&tred_ilist, body);
8808 body = tred_ilist;
8809 }
8810
8811 body = maybe_catch_exception (body);
8812
8813 if (!phony_loop)
8814 {
8815 /* Region exit marker goes at the end of the loop body. */
8816 gimple *g = gimple_build_omp_return (fd.have_nowait);
8817 gimple_seq_add_stmt (&body, g);
8818
8819 gimple_seq_add_seq (&body, tred_dlist);
8820
8821 maybe_add_implicit_barrier_cancel (ctx, g, &body);
8822
8823 if (rclauses)
8824 OMP_CLAUSE_DECL (rclauses) = rtmp;
8825 }
8826
8827 /* Add OpenACC joining and reduction markers just after the loop. */
8828 if (oacc_tail)
8829 gimple_seq_add_seq (&body, oacc_tail);
8830
8831 pop_gimplify_context (new_stmt);
8832
8833 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8834 maybe_remove_omp_member_access_dummy_vars (new_stmt);
8835 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
8836 if (BLOCK_VARS (block))
8837 TREE_USED (block) = 1;
8838
8839 gimple_bind_set_body (new_stmt, body);
8840 gimple_omp_set_body (stmt, NULL);
8841 gimple_omp_for_set_pre_body (stmt, NULL);
8842 }
8843
8844 /* Callback for walk_stmts. Check if the current statement only contains
8845 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
8846
8847 static tree
8848 check_combined_parallel (gimple_stmt_iterator *gsi_p,
8849 bool *handled_ops_p,
8850 struct walk_stmt_info *wi)
8851 {
8852 int *info = (int *) wi->info;
8853 gimple *stmt = gsi_stmt (*gsi_p);
8854
8855 *handled_ops_p = true;
8856 switch (gimple_code (stmt))
8857 {
8858 WALK_SUBSTMTS;
8859
8860 case GIMPLE_DEBUG:
8861 break;
8862 case GIMPLE_OMP_FOR:
8863 case GIMPLE_OMP_SECTIONS:
8864 *info = *info == 0 ? 1 : -1;
8865 break;
8866 default:
8867 *info = -1;
8868 break;
8869 }
8870 return NULL;
8871 }
8872
8873 struct omp_taskcopy_context
8874 {
8875 /* This field must be at the beginning, as we do "inheritance": Some
8876 callback functions for tree-inline.c (e.g., omp_copy_decl)
8877 receive a copy_body_data pointer that is up-casted to an
8878 omp_context pointer. */
8879 copy_body_data cb;
8880 omp_context *ctx;
8881 };
8882
8883 static tree
8884 task_copyfn_copy_decl (tree var, copy_body_data *cb)
8885 {
8886 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
8887
8888 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
8889 return create_tmp_var (TREE_TYPE (var));
8890
8891 return var;
8892 }
8893
8894 static tree
8895 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
8896 {
8897 tree name, new_fields = NULL, type, f;
8898
8899 type = lang_hooks.types.make_type (RECORD_TYPE);
8900 name = DECL_NAME (TYPE_NAME (orig_type));
8901 name = build_decl (gimple_location (tcctx->ctx->stmt),
8902 TYPE_DECL, name, type);
8903 TYPE_NAME (type) = name;
8904
8905 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
8906 {
8907 tree new_f = copy_node (f);
8908 DECL_CONTEXT (new_f) = type;
8909 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
8910 TREE_CHAIN (new_f) = new_fields;
8911 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8912 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8913 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
8914 &tcctx->cb, NULL);
8915 new_fields = new_f;
8916 tcctx->cb.decl_map->put (f, new_f);
8917 }
8918 TYPE_FIELDS (type) = nreverse (new_fields);
8919 layout_type (type);
8920 return type;
8921 }
8922
8923 /* Create task copyfn. */
8924
8925 static void
8926 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
8927 {
8928 struct function *child_cfun;
8929 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
8930 tree record_type, srecord_type, bind, list;
8931 bool record_needs_remap = false, srecord_needs_remap = false;
8932 splay_tree_node n;
8933 struct omp_taskcopy_context tcctx;
8934 location_t loc = gimple_location (task_stmt);
8935 size_t looptempno = 0;
8936
8937 child_fn = gimple_omp_task_copy_fn (task_stmt);
8938 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
8939 gcc_assert (child_cfun->cfg == NULL);
8940 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
8941
8942 /* Reset DECL_CONTEXT on function arguments. */
8943 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
8944 DECL_CONTEXT (t) = child_fn;
8945
8946 /* Populate the function. */
8947 push_gimplify_context ();
8948 push_cfun (child_cfun);
8949
8950 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
8951 TREE_SIDE_EFFECTS (bind) = 1;
8952 list = NULL;
8953 DECL_SAVED_TREE (child_fn) = bind;
8954 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
8955
8956 /* Remap src and dst argument types if needed. */
8957 record_type = ctx->record_type;
8958 srecord_type = ctx->srecord_type;
8959 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8960 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8961 {
8962 record_needs_remap = true;
8963 break;
8964 }
8965 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
8966 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8967 {
8968 srecord_needs_remap = true;
8969 break;
8970 }
8971
8972 if (record_needs_remap || srecord_needs_remap)
8973 {
8974 memset (&tcctx, '\0', sizeof (tcctx));
8975 tcctx.cb.src_fn = ctx->cb.src_fn;
8976 tcctx.cb.dst_fn = child_fn;
8977 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
8978 gcc_checking_assert (tcctx.cb.src_node);
8979 tcctx.cb.dst_node = tcctx.cb.src_node;
8980 tcctx.cb.src_cfun = ctx->cb.src_cfun;
8981 tcctx.cb.copy_decl = task_copyfn_copy_decl;
8982 tcctx.cb.eh_lp_nr = 0;
8983 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
8984 tcctx.cb.decl_map = new hash_map<tree, tree>;
8985 tcctx.ctx = ctx;
8986
8987 if (record_needs_remap)
8988 record_type = task_copyfn_remap_type (&tcctx, record_type);
8989 if (srecord_needs_remap)
8990 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
8991 }
8992 else
8993 tcctx.cb.decl_map = NULL;
8994
8995 arg = DECL_ARGUMENTS (child_fn);
8996 TREE_TYPE (arg) = build_pointer_type (record_type);
8997 sarg = DECL_CHAIN (arg);
8998 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
8999
9000 /* First pass: initialize temporaries used in record_type and srecord_type
9001 sizes and field offsets. */
9002 if (tcctx.cb.decl_map)
9003 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9004 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9005 {
9006 tree *p;
9007
9008 decl = OMP_CLAUSE_DECL (c);
9009 p = tcctx.cb.decl_map->get (decl);
9010 if (p == NULL)
9011 continue;
9012 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9013 sf = (tree) n->value;
9014 sf = *tcctx.cb.decl_map->get (sf);
9015 src = build_simple_mem_ref_loc (loc, sarg);
9016 src = omp_build_component_ref (src, sf);
9017 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
9018 append_to_statement_list (t, &list);
9019 }
9020
9021 /* Second pass: copy shared var pointers and copy construct non-VLA
9022 firstprivate vars. */
9023 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9024 switch (OMP_CLAUSE_CODE (c))
9025 {
9026 splay_tree_key key;
9027 case OMP_CLAUSE_SHARED:
9028 decl = OMP_CLAUSE_DECL (c);
9029 key = (splay_tree_key) decl;
9030 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
9031 key = (splay_tree_key) &DECL_UID (decl);
9032 n = splay_tree_lookup (ctx->field_map, key);
9033 if (n == NULL)
9034 break;
9035 f = (tree) n->value;
9036 if (tcctx.cb.decl_map)
9037 f = *tcctx.cb.decl_map->get (f);
9038 n = splay_tree_lookup (ctx->sfield_map, key);
9039 sf = (tree) n->value;
9040 if (tcctx.cb.decl_map)
9041 sf = *tcctx.cb.decl_map->get (sf);
9042 src = build_simple_mem_ref_loc (loc, sarg);
9043 src = omp_build_component_ref (src, sf);
9044 dst = build_simple_mem_ref_loc (loc, arg);
9045 dst = omp_build_component_ref (dst, f);
9046 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9047 append_to_statement_list (t, &list);
9048 break;
9049 case OMP_CLAUSE_REDUCTION:
9050 case OMP_CLAUSE_IN_REDUCTION:
9051 decl = OMP_CLAUSE_DECL (c);
9052 if (TREE_CODE (decl) == MEM_REF)
9053 {
9054 decl = TREE_OPERAND (decl, 0);
9055 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
9056 decl = TREE_OPERAND (decl, 0);
9057 if (TREE_CODE (decl) == INDIRECT_REF
9058 || TREE_CODE (decl) == ADDR_EXPR)
9059 decl = TREE_OPERAND (decl, 0);
9060 }
9061 key = (splay_tree_key) decl;
9062 n = splay_tree_lookup (ctx->field_map, key);
9063 if (n == NULL)
9064 break;
9065 f = (tree) n->value;
9066 if (tcctx.cb.decl_map)
9067 f = *tcctx.cb.decl_map->get (f);
9068 n = splay_tree_lookup (ctx->sfield_map, key);
9069 sf = (tree) n->value;
9070 if (tcctx.cb.decl_map)
9071 sf = *tcctx.cb.decl_map->get (sf);
9072 src = build_simple_mem_ref_loc (loc, sarg);
9073 src = omp_build_component_ref (src, sf);
9074 if (decl != OMP_CLAUSE_DECL (c)
9075 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9076 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9077 src = build_simple_mem_ref_loc (loc, src);
9078 dst = build_simple_mem_ref_loc (loc, arg);
9079 dst = omp_build_component_ref (dst, f);
9080 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9081 append_to_statement_list (t, &list);
9082 break;
9083 case OMP_CLAUSE__LOOPTEMP_:
9084 /* Fields for first two _looptemp_ clauses are initialized by
9085 GOMP_taskloop*, the rest are handled like firstprivate. */
9086 if (looptempno < 2)
9087 {
9088 looptempno++;
9089 break;
9090 }
9091 /* FALLTHRU */
9092 case OMP_CLAUSE__REDUCTEMP_:
9093 case OMP_CLAUSE_FIRSTPRIVATE:
9094 decl = OMP_CLAUSE_DECL (c);
9095 if (is_variable_sized (decl))
9096 break;
9097 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9098 if (n == NULL)
9099 break;
9100 f = (tree) n->value;
9101 if (tcctx.cb.decl_map)
9102 f = *tcctx.cb.decl_map->get (f);
9103 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9104 if (n != NULL)
9105 {
9106 sf = (tree) n->value;
9107 if (tcctx.cb.decl_map)
9108 sf = *tcctx.cb.decl_map->get (sf);
9109 src = build_simple_mem_ref_loc (loc, sarg);
9110 src = omp_build_component_ref (src, sf);
9111 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
9112 src = build_simple_mem_ref_loc (loc, src);
9113 }
9114 else
9115 src = decl;
9116 dst = build_simple_mem_ref_loc (loc, arg);
9117 dst = omp_build_component_ref (dst, f);
9118 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
9119 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9120 else
9121 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9122 append_to_statement_list (t, &list);
9123 break;
9124 case OMP_CLAUSE_PRIVATE:
9125 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9126 break;
9127 decl = OMP_CLAUSE_DECL (c);
9128 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9129 f = (tree) n->value;
9130 if (tcctx.cb.decl_map)
9131 f = *tcctx.cb.decl_map->get (f);
9132 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9133 if (n != NULL)
9134 {
9135 sf = (tree) n->value;
9136 if (tcctx.cb.decl_map)
9137 sf = *tcctx.cb.decl_map->get (sf);
9138 src = build_simple_mem_ref_loc (loc, sarg);
9139 src = omp_build_component_ref (src, sf);
9140 if (use_pointer_for_field (decl, NULL))
9141 src = build_simple_mem_ref_loc (loc, src);
9142 }
9143 else
9144 src = decl;
9145 dst = build_simple_mem_ref_loc (loc, arg);
9146 dst = omp_build_component_ref (dst, f);
9147 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9148 append_to_statement_list (t, &list);
9149 break;
9150 default:
9151 break;
9152 }
9153
9154 /* Last pass: handle VLA firstprivates. */
9155 if (tcctx.cb.decl_map)
9156 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9157 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9158 {
9159 tree ind, ptr, df;
9160
9161 decl = OMP_CLAUSE_DECL (c);
9162 if (!is_variable_sized (decl))
9163 continue;
9164 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9165 if (n == NULL)
9166 continue;
9167 f = (tree) n->value;
9168 f = *tcctx.cb.decl_map->get (f);
9169 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9170 ind = DECL_VALUE_EXPR (decl);
9171 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9172 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9173 n = splay_tree_lookup (ctx->sfield_map,
9174 (splay_tree_key) TREE_OPERAND (ind, 0));
9175 sf = (tree) n->value;
9176 sf = *tcctx.cb.decl_map->get (sf);
9177 src = build_simple_mem_ref_loc (loc, sarg);
9178 src = omp_build_component_ref (src, sf);
9179 src = build_simple_mem_ref_loc (loc, src);
9180 dst = build_simple_mem_ref_loc (loc, arg);
9181 dst = omp_build_component_ref (dst, f);
9182 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9183 append_to_statement_list (t, &list);
9184 n = splay_tree_lookup (ctx->field_map,
9185 (splay_tree_key) TREE_OPERAND (ind, 0));
9186 df = (tree) n->value;
9187 df = *tcctx.cb.decl_map->get (df);
9188 ptr = build_simple_mem_ref_loc (loc, arg);
9189 ptr = omp_build_component_ref (ptr, df);
9190 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
9191 build_fold_addr_expr_loc (loc, dst));
9192 append_to_statement_list (t, &list);
9193 }
9194
9195 t = build1 (RETURN_EXPR, void_type_node, NULL);
9196 append_to_statement_list (t, &list);
9197
9198 if (tcctx.cb.decl_map)
9199 delete tcctx.cb.decl_map;
9200 pop_gimplify_context (NULL);
9201 BIND_EXPR_BODY (bind) = list;
9202 pop_cfun ();
9203 }
9204
9205 static void
9206 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
9207 {
9208 tree c, clauses;
9209 gimple *g;
9210 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
9211
9212 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
9213 gcc_assert (clauses);
9214 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9216 switch (OMP_CLAUSE_DEPEND_KIND (c))
9217 {
9218 case OMP_CLAUSE_DEPEND_LAST:
9219 /* Lowering already done at gimplification. */
9220 return;
9221 case OMP_CLAUSE_DEPEND_IN:
9222 cnt[2]++;
9223 break;
9224 case OMP_CLAUSE_DEPEND_OUT:
9225 case OMP_CLAUSE_DEPEND_INOUT:
9226 cnt[0]++;
9227 break;
9228 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9229 cnt[1]++;
9230 break;
9231 case OMP_CLAUSE_DEPEND_DEPOBJ:
9232 cnt[3]++;
9233 break;
9234 case OMP_CLAUSE_DEPEND_SOURCE:
9235 case OMP_CLAUSE_DEPEND_SINK:
9236 /* FALLTHRU */
9237 default:
9238 gcc_unreachable ();
9239 }
9240 if (cnt[1] || cnt[3])
9241 idx = 5;
9242 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
9243 tree type = build_array_type_nelts (ptr_type_node, total + idx);
9244 tree array = create_tmp_var (type);
9245 TREE_ADDRESSABLE (array) = 1;
9246 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9247 NULL_TREE);
9248 if (idx == 5)
9249 {
9250 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
9251 gimple_seq_add_stmt (iseq, g);
9252 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9253 NULL_TREE);
9254 }
9255 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
9256 gimple_seq_add_stmt (iseq, g);
9257 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
9258 {
9259 r = build4 (ARRAY_REF, ptr_type_node, array,
9260 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
9261 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
9262 gimple_seq_add_stmt (iseq, g);
9263 }
9264 for (i = 0; i < 4; i++)
9265 {
9266 if (cnt[i] == 0)
9267 continue;
9268 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9269 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
9270 continue;
9271 else
9272 {
9273 switch (OMP_CLAUSE_DEPEND_KIND (c))
9274 {
9275 case OMP_CLAUSE_DEPEND_IN:
9276 if (i != 2)
9277 continue;
9278 break;
9279 case OMP_CLAUSE_DEPEND_OUT:
9280 case OMP_CLAUSE_DEPEND_INOUT:
9281 if (i != 0)
9282 continue;
9283 break;
9284 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9285 if (i != 1)
9286 continue;
9287 break;
9288 case OMP_CLAUSE_DEPEND_DEPOBJ:
9289 if (i != 3)
9290 continue;
9291 break;
9292 default:
9293 gcc_unreachable ();
9294 }
9295 tree t = OMP_CLAUSE_DECL (c);
9296 t = fold_convert (ptr_type_node, t);
9297 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9298 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9299 NULL_TREE, NULL_TREE);
9300 g = gimple_build_assign (r, t);
9301 gimple_seq_add_stmt (iseq, g);
9302 }
9303 }
9304 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9305 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9306 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9307 OMP_CLAUSE_CHAIN (c) = *pclauses;
9308 *pclauses = c;
9309 tree clobber = build_constructor (type, NULL);
9310 TREE_THIS_VOLATILE (clobber) = 1;
9311 g = gimple_build_assign (array, clobber);
9312 gimple_seq_add_stmt (oseq, g);
9313 }
9314
9315 /* Lower the OpenMP parallel or task directive in the current statement
9316 in GSI_P. CTX holds context information for the directive. */
9317
9318 static void
9319 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9320 {
9321 tree clauses;
9322 tree child_fn, t;
9323 gimple *stmt = gsi_stmt (*gsi_p);
9324 gbind *par_bind, *bind, *dep_bind = NULL;
9325 gimple_seq par_body;
9326 location_t loc = gimple_location (stmt);
9327
9328 clauses = gimple_omp_taskreg_clauses (stmt);
9329 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9330 && gimple_omp_task_taskwait_p (stmt))
9331 {
9332 par_bind = NULL;
9333 par_body = NULL;
9334 }
9335 else
9336 {
9337 par_bind
9338 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
9339 par_body = gimple_bind_body (par_bind);
9340 }
9341 child_fn = ctx->cb.dst_fn;
9342 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9343 && !gimple_omp_parallel_combined_p (stmt))
9344 {
9345 struct walk_stmt_info wi;
9346 int ws_num = 0;
9347
9348 memset (&wi, 0, sizeof (wi));
9349 wi.info = &ws_num;
9350 wi.val_only = true;
9351 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
9352 if (ws_num == 1)
9353 gimple_omp_parallel_set_combined_p (stmt, true);
9354 }
9355 gimple_seq dep_ilist = NULL;
9356 gimple_seq dep_olist = NULL;
9357 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9358 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9359 {
9360 push_gimplify_context ();
9361 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9362 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
9363 &dep_ilist, &dep_olist);
9364 }
9365
9366 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9367 && gimple_omp_task_taskwait_p (stmt))
9368 {
9369 if (dep_bind)
9370 {
9371 gsi_replace (gsi_p, dep_bind, true);
9372 gimple_bind_add_seq (dep_bind, dep_ilist);
9373 gimple_bind_add_stmt (dep_bind, stmt);
9374 gimple_bind_add_seq (dep_bind, dep_olist);
9375 pop_gimplify_context (dep_bind);
9376 }
9377 return;
9378 }
9379
9380 if (ctx->srecord_type)
9381 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9382
9383 gimple_seq tskred_ilist = NULL;
9384 gimple_seq tskred_olist = NULL;
9385 if ((is_task_ctx (ctx)
9386 && gimple_omp_task_taskloop_p (ctx->stmt)
9387 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
9388 OMP_CLAUSE_REDUCTION))
9389 || (is_parallel_ctx (ctx)
9390 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
9391 OMP_CLAUSE__REDUCTEMP_)))
9392 {
9393 if (dep_bind == NULL)
9394 {
9395 push_gimplify_context ();
9396 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9397 }
9398 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
9399 : OMP_PARALLEL,
9400 gimple_omp_taskreg_clauses (ctx->stmt),
9401 &tskred_ilist, &tskred_olist);
9402 }
9403
9404 push_gimplify_context ();
9405
9406 gimple_seq par_olist = NULL;
9407 gimple_seq par_ilist = NULL;
9408 gimple_seq par_rlist = NULL;
9409 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9410 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
9411 if (phony_construct && ctx->record_type)
9412 {
9413 gcc_checking_assert (!ctx->receiver_decl);
9414 ctx->receiver_decl = create_tmp_var
9415 (build_reference_type (ctx->record_type), ".omp_rec");
9416 }
9417 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
9418 lower_omp (&par_body, ctx);
9419 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
9420 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
9421
9422 /* Declare all the variables created by mapping and the variables
9423 declared in the scope of the parallel body. */
9424 record_vars_into (ctx->block_vars, child_fn);
9425 maybe_remove_omp_member_access_dummy_vars (par_bind);
9426 record_vars_into (gimple_bind_vars (par_bind), child_fn);
9427
9428 if (ctx->record_type)
9429 {
9430 ctx->sender_decl
9431 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9432 : ctx->record_type, ".omp_data_o");
9433 DECL_NAMELESS (ctx->sender_decl) = 1;
9434 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9435 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
9436 }
9437
9438 gimple_seq olist = NULL;
9439 gimple_seq ilist = NULL;
9440 lower_send_clauses (clauses, &ilist, &olist, ctx);
9441 lower_send_shared_vars (&ilist, &olist, ctx);
9442
9443 if (ctx->record_type)
9444 {
9445 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9446 TREE_THIS_VOLATILE (clobber) = 1;
9447 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9448 clobber));
9449 }
9450
9451 /* Once all the expansions are done, sequence all the different
9452 fragments inside gimple_omp_body. */
9453
9454 gimple_seq new_body = NULL;
9455
9456 if (ctx->record_type)
9457 {
9458 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9459 /* fixup_child_record_type might have changed receiver_decl's type. */
9460 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9461 gimple_seq_add_stmt (&new_body,
9462 gimple_build_assign (ctx->receiver_decl, t));
9463 }
9464
9465 gimple_seq_add_seq (&new_body, par_ilist);
9466 gimple_seq_add_seq (&new_body, par_body);
9467 gimple_seq_add_seq (&new_body, par_rlist);
9468 if (ctx->cancellable)
9469 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9470 gimple_seq_add_seq (&new_body, par_olist);
9471 new_body = maybe_catch_exception (new_body);
9472 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9473 gimple_seq_add_stmt (&new_body,
9474 gimple_build_omp_continue (integer_zero_node,
9475 integer_zero_node));
9476 if (!phony_construct)
9477 {
9478 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9479 gimple_omp_set_body (stmt, new_body);
9480 }
9481
9482 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9483 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9484 else
9485 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
9486 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9487 gimple_bind_add_seq (bind, ilist);
9488 if (!phony_construct)
9489 gimple_bind_add_stmt (bind, stmt);
9490 else
9491 gimple_bind_add_seq (bind, new_body);
9492 gimple_bind_add_seq (bind, olist);
9493
9494 pop_gimplify_context (NULL);
9495
9496 if (dep_bind)
9497 {
9498 gimple_bind_add_seq (dep_bind, dep_ilist);
9499 gimple_bind_add_seq (dep_bind, tskred_ilist);
9500 gimple_bind_add_stmt (dep_bind, bind);
9501 gimple_bind_add_seq (dep_bind, tskred_olist);
9502 gimple_bind_add_seq (dep_bind, dep_olist);
9503 pop_gimplify_context (dep_bind);
9504 }
9505 }
9506
9507 /* Lower the GIMPLE_OMP_TARGET in the current statement
9508 in GSI_P. CTX holds context information for the directive. */
9509
9510 static void
9511 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9512 {
9513 tree clauses;
9514 tree child_fn, t, c;
9515 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9516 gbind *tgt_bind, *bind, *dep_bind = NULL;
9517 gimple_seq tgt_body, olist, ilist, fplist, new_body;
9518 location_t loc = gimple_location (stmt);
9519 bool offloaded, data_region;
9520 unsigned int map_cnt = 0;
9521
9522 offloaded = is_gimple_omp_offloaded (stmt);
9523 switch (gimple_omp_target_kind (stmt))
9524 {
9525 case GF_OMP_TARGET_KIND_REGION:
9526 case GF_OMP_TARGET_KIND_UPDATE:
9527 case GF_OMP_TARGET_KIND_ENTER_DATA:
9528 case GF_OMP_TARGET_KIND_EXIT_DATA:
9529 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9530 case GF_OMP_TARGET_KIND_OACC_KERNELS:
9531 case GF_OMP_TARGET_KIND_OACC_UPDATE:
9532 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9533 case GF_OMP_TARGET_KIND_OACC_DECLARE:
9534 data_region = false;
9535 break;
9536 case GF_OMP_TARGET_KIND_DATA:
9537 case GF_OMP_TARGET_KIND_OACC_DATA:
9538 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9539 data_region = true;
9540 break;
9541 default:
9542 gcc_unreachable ();
9543 }
9544
9545 clauses = gimple_omp_target_clauses (stmt);
9546
9547 gimple_seq dep_ilist = NULL;
9548 gimple_seq dep_olist = NULL;
9549 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9550 {
9551 push_gimplify_context ();
9552 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9553 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9554 &dep_ilist, &dep_olist);
9555 }
9556
9557 tgt_bind = NULL;
9558 tgt_body = NULL;
9559 if (offloaded)
9560 {
9561 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9562 tgt_body = gimple_bind_body (tgt_bind);
9563 }
9564 else if (data_region)
9565 tgt_body = gimple_omp_body (stmt);
9566 child_fn = ctx->cb.dst_fn;
9567
9568 push_gimplify_context ();
9569 fplist = NULL;
9570
9571 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9572 switch (OMP_CLAUSE_CODE (c))
9573 {
9574 tree var, x;
9575
9576 default:
9577 break;
9578 case OMP_CLAUSE_MAP:
9579 #if CHECKING_P
9580 /* First check what we're prepared to handle in the following. */
9581 switch (OMP_CLAUSE_MAP_KIND (c))
9582 {
9583 case GOMP_MAP_ALLOC:
9584 case GOMP_MAP_TO:
9585 case GOMP_MAP_FROM:
9586 case GOMP_MAP_TOFROM:
9587 case GOMP_MAP_POINTER:
9588 case GOMP_MAP_TO_PSET:
9589 case GOMP_MAP_DELETE:
9590 case GOMP_MAP_RELEASE:
9591 case GOMP_MAP_ALWAYS_TO:
9592 case GOMP_MAP_ALWAYS_FROM:
9593 case GOMP_MAP_ALWAYS_TOFROM:
9594 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9595 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9596 case GOMP_MAP_STRUCT:
9597 case GOMP_MAP_ALWAYS_POINTER:
9598 break;
9599 case GOMP_MAP_FORCE_ALLOC:
9600 case GOMP_MAP_FORCE_TO:
9601 case GOMP_MAP_FORCE_FROM:
9602 case GOMP_MAP_FORCE_TOFROM:
9603 case GOMP_MAP_FORCE_PRESENT:
9604 case GOMP_MAP_FORCE_DEVICEPTR:
9605 case GOMP_MAP_DEVICE_RESIDENT:
9606 case GOMP_MAP_LINK:
9607 gcc_assert (is_gimple_omp_oacc (stmt));
9608 break;
9609 default:
9610 gcc_unreachable ();
9611 }
9612 #endif
9613 /* FALLTHRU */
9614 case OMP_CLAUSE_TO:
9615 case OMP_CLAUSE_FROM:
9616 oacc_firstprivate:
9617 var = OMP_CLAUSE_DECL (c);
9618 if (!DECL_P (var))
9619 {
9620 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9621 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9622 && (OMP_CLAUSE_MAP_KIND (c)
9623 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
9624 map_cnt++;
9625 continue;
9626 }
9627
9628 if (DECL_SIZE (var)
9629 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9630 {
9631 tree var2 = DECL_VALUE_EXPR (var);
9632 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9633 var2 = TREE_OPERAND (var2, 0);
9634 gcc_assert (DECL_P (var2));
9635 var = var2;
9636 }
9637
9638 if (offloaded
9639 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9640 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9641 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9642 {
9643 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9644 {
9645 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9646 && varpool_node::get_create (var)->offloadable)
9647 continue;
9648
9649 tree type = build_pointer_type (TREE_TYPE (var));
9650 tree new_var = lookup_decl (var, ctx);
9651 x = create_tmp_var_raw (type, get_name (new_var));
9652 gimple_add_tmp_var (x);
9653 x = build_simple_mem_ref (x);
9654 SET_DECL_VALUE_EXPR (new_var, x);
9655 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9656 }
9657 continue;
9658 }
9659
9660 if (!maybe_lookup_field (var, ctx))
9661 continue;
9662
9663 /* Don't remap oacc parallel reduction variables, because the
9664 intermediate result must be local to each gang. */
9665 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9666 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
9667 {
9668 x = build_receiver_ref (var, true, ctx);
9669 tree new_var = lookup_decl (var, ctx);
9670
9671 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9672 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9673 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9674 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9675 x = build_simple_mem_ref (x);
9676 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9677 {
9678 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9679 if (omp_is_reference (new_var)
9680 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
9681 {
9682 /* Create a local object to hold the instance
9683 value. */
9684 tree type = TREE_TYPE (TREE_TYPE (new_var));
9685 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
9686 tree inst = create_tmp_var (type, id);
9687 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
9688 x = build_fold_addr_expr (inst);
9689 }
9690 gimplify_assign (new_var, x, &fplist);
9691 }
9692 else if (DECL_P (new_var))
9693 {
9694 SET_DECL_VALUE_EXPR (new_var, x);
9695 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9696 }
9697 else
9698 gcc_unreachable ();
9699 }
9700 map_cnt++;
9701 break;
9702
9703 case OMP_CLAUSE_FIRSTPRIVATE:
9704 if (is_oacc_parallel (ctx))
9705 goto oacc_firstprivate;
9706 map_cnt++;
9707 var = OMP_CLAUSE_DECL (c);
9708 if (!omp_is_reference (var)
9709 && !is_gimple_reg_type (TREE_TYPE (var)))
9710 {
9711 tree new_var = lookup_decl (var, ctx);
9712 if (is_variable_sized (var))
9713 {
9714 tree pvar = DECL_VALUE_EXPR (var);
9715 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9716 pvar = TREE_OPERAND (pvar, 0);
9717 gcc_assert (DECL_P (pvar));
9718 tree new_pvar = lookup_decl (pvar, ctx);
9719 x = build_fold_indirect_ref (new_pvar);
9720 TREE_THIS_NOTRAP (x) = 1;
9721 }
9722 else
9723 x = build_receiver_ref (var, true, ctx);
9724 SET_DECL_VALUE_EXPR (new_var, x);
9725 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9726 }
9727 break;
9728
9729 case OMP_CLAUSE_PRIVATE:
9730 if (is_gimple_omp_oacc (ctx->stmt))
9731 break;
9732 var = OMP_CLAUSE_DECL (c);
9733 if (is_variable_sized (var))
9734 {
9735 tree new_var = lookup_decl (var, ctx);
9736 tree pvar = DECL_VALUE_EXPR (var);
9737 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9738 pvar = TREE_OPERAND (pvar, 0);
9739 gcc_assert (DECL_P (pvar));
9740 tree new_pvar = lookup_decl (pvar, ctx);
9741 x = build_fold_indirect_ref (new_pvar);
9742 TREE_THIS_NOTRAP (x) = 1;
9743 SET_DECL_VALUE_EXPR (new_var, x);
9744 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9745 }
9746 break;
9747
9748 case OMP_CLAUSE_USE_DEVICE_PTR:
9749 case OMP_CLAUSE_IS_DEVICE_PTR:
9750 var = OMP_CLAUSE_DECL (c);
9751 map_cnt++;
9752 if (is_variable_sized (var))
9753 {
9754 tree new_var = lookup_decl (var, ctx);
9755 tree pvar = DECL_VALUE_EXPR (var);
9756 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9757 pvar = TREE_OPERAND (pvar, 0);
9758 gcc_assert (DECL_P (pvar));
9759 tree new_pvar = lookup_decl (pvar, ctx);
9760 x = build_fold_indirect_ref (new_pvar);
9761 TREE_THIS_NOTRAP (x) = 1;
9762 SET_DECL_VALUE_EXPR (new_var, x);
9763 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9764 }
9765 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9766 {
9767 tree new_var = lookup_decl (var, ctx);
9768 tree type = build_pointer_type (TREE_TYPE (var));
9769 x = create_tmp_var_raw (type, get_name (new_var));
9770 gimple_add_tmp_var (x);
9771 x = build_simple_mem_ref (x);
9772 SET_DECL_VALUE_EXPR (new_var, x);
9773 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9774 }
9775 else
9776 {
9777 tree new_var = lookup_decl (var, ctx);
9778 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
9779 gimple_add_tmp_var (x);
9780 SET_DECL_VALUE_EXPR (new_var, x);
9781 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9782 }
9783 break;
9784 }
9785
9786 if (offloaded)
9787 {
9788 target_nesting_level++;
9789 lower_omp (&tgt_body, ctx);
9790 target_nesting_level--;
9791 }
9792 else if (data_region)
9793 lower_omp (&tgt_body, ctx);
9794
9795 if (offloaded)
9796 {
9797 /* Declare all the variables created by mapping and the variables
9798 declared in the scope of the target body. */
9799 record_vars_into (ctx->block_vars, child_fn);
9800 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
9801 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9802 }
9803
9804 olist = NULL;
9805 ilist = NULL;
9806 if (ctx->record_type)
9807 {
9808 ctx->sender_decl
9809 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9810 DECL_NAMELESS (ctx->sender_decl) = 1;
9811 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9812 t = make_tree_vec (3);
9813 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9814 TREE_VEC_ELT (t, 1)
9815 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9816 ".omp_data_sizes");
9817 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9818 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9819 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9820 tree tkind_type = short_unsigned_type_node;
9821 int talign_shift = 8;
9822 TREE_VEC_ELT (t, 2)
9823 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
9824 ".omp_data_kinds");
9825 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9826 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9827 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9828 gimple_omp_target_set_data_arg (stmt, t);
9829
9830 vec<constructor_elt, va_gc> *vsize;
9831 vec<constructor_elt, va_gc> *vkind;
9832 vec_alloc (vsize, map_cnt);
9833 vec_alloc (vkind, map_cnt);
9834 unsigned int map_idx = 0;
9835
9836 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9837 switch (OMP_CLAUSE_CODE (c))
9838 {
9839 tree ovar, nc, s, purpose, var, x, type;
9840 unsigned int talign;
9841
9842 default:
9843 break;
9844
9845 case OMP_CLAUSE_MAP:
9846 case OMP_CLAUSE_TO:
9847 case OMP_CLAUSE_FROM:
9848 oacc_firstprivate_map:
9849 nc = c;
9850 ovar = OMP_CLAUSE_DECL (c);
9851 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9852 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9853 || (OMP_CLAUSE_MAP_KIND (c)
9854 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
9855 break;
9856 if (!DECL_P (ovar))
9857 {
9858 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9859 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9860 {
9861 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9862 == get_base_address (ovar));
9863 nc = OMP_CLAUSE_CHAIN (c);
9864 ovar = OMP_CLAUSE_DECL (nc);
9865 }
9866 else
9867 {
9868 tree x = build_sender_ref (ovar, ctx);
9869 tree v
9870 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9871 gimplify_assign (x, v, &ilist);
9872 nc = NULL_TREE;
9873 }
9874 }
9875 else
9876 {
9877 if (DECL_SIZE (ovar)
9878 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9879 {
9880 tree ovar2 = DECL_VALUE_EXPR (ovar);
9881 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9882 ovar2 = TREE_OPERAND (ovar2, 0);
9883 gcc_assert (DECL_P (ovar2));
9884 ovar = ovar2;
9885 }
9886 if (!maybe_lookup_field (ovar, ctx))
9887 continue;
9888 }
9889
9890 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9891 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9892 talign = DECL_ALIGN_UNIT (ovar);
9893 if (nc)
9894 {
9895 var = lookup_decl_in_outer_ctx (ovar, ctx);
9896 x = build_sender_ref (ovar, ctx);
9897
9898 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9899 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9900 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9901 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9902 {
9903 gcc_assert (offloaded);
9904 tree avar
9905 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
9906 mark_addressable (avar);
9907 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9908 talign = DECL_ALIGN_UNIT (avar);
9909 avar = build_fold_addr_expr (avar);
9910 gimplify_assign (x, avar, &ilist);
9911 }
9912 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9913 {
9914 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9915 if (!omp_is_reference (var))
9916 {
9917 if (is_gimple_reg (var)
9918 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9919 TREE_NO_WARNING (var) = 1;
9920 var = build_fold_addr_expr (var);
9921 }
9922 else
9923 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9924 gimplify_assign (x, var, &ilist);
9925 }
9926 else if (is_gimple_reg (var))
9927 {
9928 gcc_assert (offloaded);
9929 tree avar = create_tmp_var (TREE_TYPE (var));
9930 mark_addressable (avar);
9931 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
9932 if (GOMP_MAP_COPY_TO_P (map_kind)
9933 || map_kind == GOMP_MAP_POINTER
9934 || map_kind == GOMP_MAP_TO_PSET
9935 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9936 {
9937 /* If we need to initialize a temporary
9938 with VAR because it is not addressable, and
9939 the variable hasn't been initialized yet, then
9940 we'll get a warning for the store to avar.
9941 Don't warn in that case, the mapping might
9942 be implicit. */
9943 TREE_NO_WARNING (var) = 1;
9944 gimplify_assign (avar, var, &ilist);
9945 }
9946 avar = build_fold_addr_expr (avar);
9947 gimplify_assign (x, avar, &ilist);
9948 if ((GOMP_MAP_COPY_FROM_P (map_kind)
9949 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9950 && !TYPE_READONLY (TREE_TYPE (var)))
9951 {
9952 x = unshare_expr (x);
9953 x = build_simple_mem_ref (x);
9954 gimplify_assign (var, x, &olist);
9955 }
9956 }
9957 else
9958 {
9959 var = build_fold_addr_expr (var);
9960 gimplify_assign (x, var, &ilist);
9961 }
9962 }
9963 s = NULL_TREE;
9964 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9965 {
9966 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9967 s = TREE_TYPE (ovar);
9968 if (TREE_CODE (s) == REFERENCE_TYPE)
9969 s = TREE_TYPE (s);
9970 s = TYPE_SIZE_UNIT (s);
9971 }
9972 else
9973 s = OMP_CLAUSE_SIZE (c);
9974 if (s == NULL_TREE)
9975 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9976 s = fold_convert (size_type_node, s);
9977 purpose = size_int (map_idx++);
9978 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9979 if (TREE_CODE (s) != INTEGER_CST)
9980 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9981
9982 unsigned HOST_WIDE_INT tkind, tkind_zero;
9983 switch (OMP_CLAUSE_CODE (c))
9984 {
9985 case OMP_CLAUSE_MAP:
9986 tkind = OMP_CLAUSE_MAP_KIND (c);
9987 tkind_zero = tkind;
9988 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
9989 switch (tkind)
9990 {
9991 case GOMP_MAP_ALLOC:
9992 case GOMP_MAP_TO:
9993 case GOMP_MAP_FROM:
9994 case GOMP_MAP_TOFROM:
9995 case GOMP_MAP_ALWAYS_TO:
9996 case GOMP_MAP_ALWAYS_FROM:
9997 case GOMP_MAP_ALWAYS_TOFROM:
9998 case GOMP_MAP_RELEASE:
9999 case GOMP_MAP_FORCE_TO:
10000 case GOMP_MAP_FORCE_FROM:
10001 case GOMP_MAP_FORCE_TOFROM:
10002 case GOMP_MAP_FORCE_PRESENT:
10003 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
10004 break;
10005 case GOMP_MAP_DELETE:
10006 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
10007 default:
10008 break;
10009 }
10010 if (tkind_zero != tkind)
10011 {
10012 if (integer_zerop (s))
10013 tkind = tkind_zero;
10014 else if (integer_nonzerop (s))
10015 tkind_zero = tkind;
10016 }
10017 break;
10018 case OMP_CLAUSE_FIRSTPRIVATE:
10019 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
10020 tkind = GOMP_MAP_TO;
10021 tkind_zero = tkind;
10022 break;
10023 case OMP_CLAUSE_TO:
10024 tkind = GOMP_MAP_TO;
10025 tkind_zero = tkind;
10026 break;
10027 case OMP_CLAUSE_FROM:
10028 tkind = GOMP_MAP_FROM;
10029 tkind_zero = tkind;
10030 break;
10031 default:
10032 gcc_unreachable ();
10033 }
10034 gcc_checking_assert (tkind
10035 < (HOST_WIDE_INT_C (1U) << talign_shift));
10036 gcc_checking_assert (tkind_zero
10037 < (HOST_WIDE_INT_C (1U) << talign_shift));
10038 talign = ceil_log2 (talign);
10039 tkind |= talign << talign_shift;
10040 tkind_zero |= talign << talign_shift;
10041 gcc_checking_assert (tkind
10042 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10043 gcc_checking_assert (tkind_zero
10044 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10045 if (tkind == tkind_zero)
10046 x = build_int_cstu (tkind_type, tkind);
10047 else
10048 {
10049 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
10050 x = build3 (COND_EXPR, tkind_type,
10051 fold_build2 (EQ_EXPR, boolean_type_node,
10052 unshare_expr (s), size_zero_node),
10053 build_int_cstu (tkind_type, tkind_zero),
10054 build_int_cstu (tkind_type, tkind));
10055 }
10056 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
10057 if (nc && nc != c)
10058 c = nc;
10059 break;
10060
10061 case OMP_CLAUSE_FIRSTPRIVATE:
10062 if (is_oacc_parallel (ctx))
10063 goto oacc_firstprivate_map;
10064 ovar = OMP_CLAUSE_DECL (c);
10065 if (omp_is_reference (ovar))
10066 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10067 else
10068 talign = DECL_ALIGN_UNIT (ovar);
10069 var = lookup_decl_in_outer_ctx (ovar, ctx);
10070 x = build_sender_ref (ovar, ctx);
10071 tkind = GOMP_MAP_FIRSTPRIVATE;
10072 type = TREE_TYPE (ovar);
10073 if (omp_is_reference (ovar))
10074 type = TREE_TYPE (type);
10075 if ((INTEGRAL_TYPE_P (type)
10076 && TYPE_PRECISION (type) <= POINTER_SIZE)
10077 || TREE_CODE (type) == POINTER_TYPE)
10078 {
10079 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
10080 tree t = var;
10081 if (omp_is_reference (var))
10082 t = build_simple_mem_ref (var);
10083 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10084 TREE_NO_WARNING (var) = 1;
10085 if (TREE_CODE (type) != POINTER_TYPE)
10086 t = fold_convert (pointer_sized_int_node, t);
10087 t = fold_convert (TREE_TYPE (x), t);
10088 gimplify_assign (x, t, &ilist);
10089 }
10090 else if (omp_is_reference (var))
10091 gimplify_assign (x, var, &ilist);
10092 else if (is_gimple_reg (var))
10093 {
10094 tree avar = create_tmp_var (TREE_TYPE (var));
10095 mark_addressable (avar);
10096 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10097 TREE_NO_WARNING (var) = 1;
10098 gimplify_assign (avar, var, &ilist);
10099 avar = build_fold_addr_expr (avar);
10100 gimplify_assign (x, avar, &ilist);
10101 }
10102 else
10103 {
10104 var = build_fold_addr_expr (var);
10105 gimplify_assign (x, var, &ilist);
10106 }
10107 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
10108 s = size_int (0);
10109 else if (omp_is_reference (ovar))
10110 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10111 else
10112 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
10113 s = fold_convert (size_type_node, s);
10114 purpose = size_int (map_idx++);
10115 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10116 if (TREE_CODE (s) != INTEGER_CST)
10117 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
10118
10119 gcc_checking_assert (tkind
10120 < (HOST_WIDE_INT_C (1U) << talign_shift));
10121 talign = ceil_log2 (talign);
10122 tkind |= talign << talign_shift;
10123 gcc_checking_assert (tkind
10124 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10125 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
10126 build_int_cstu (tkind_type, tkind));
10127 break;
10128
10129 case OMP_CLAUSE_USE_DEVICE_PTR:
10130 case OMP_CLAUSE_IS_DEVICE_PTR:
10131 ovar = OMP_CLAUSE_DECL (c);
10132 var = lookup_decl_in_outer_ctx (ovar, ctx);
10133 x = build_sender_ref (ovar, ctx);
10134 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
10135 tkind = GOMP_MAP_USE_DEVICE_PTR;
10136 else
10137 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
10138 type = TREE_TYPE (ovar);
10139 if (TREE_CODE (type) == ARRAY_TYPE)
10140 var = build_fold_addr_expr (var);
10141 else
10142 {
10143 if (omp_is_reference (ovar))
10144 {
10145 type = TREE_TYPE (type);
10146 if (TREE_CODE (type) != ARRAY_TYPE)
10147 var = build_simple_mem_ref (var);
10148 var = fold_convert (TREE_TYPE (x), var);
10149 }
10150 }
10151 gimplify_assign (x, var, &ilist);
10152 s = size_int (0);
10153 purpose = size_int (map_idx++);
10154 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10155 gcc_checking_assert (tkind
10156 < (HOST_WIDE_INT_C (1U) << talign_shift));
10157 gcc_checking_assert (tkind
10158 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10159 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
10160 build_int_cstu (tkind_type, tkind));
10161 break;
10162 }
10163
10164 gcc_assert (map_idx == map_cnt);
10165
10166 DECL_INITIAL (TREE_VEC_ELT (t, 1))
10167 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
10168 DECL_INITIAL (TREE_VEC_ELT (t, 2))
10169 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
10170 for (int i = 1; i <= 2; i++)
10171 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
10172 {
10173 gimple_seq initlist = NULL;
10174 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
10175 TREE_VEC_ELT (t, i)),
10176 &initlist, true, NULL_TREE);
10177 gimple_seq_add_seq (&ilist, initlist);
10178
10179 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
10180 NULL);
10181 TREE_THIS_VOLATILE (clobber) = 1;
10182 gimple_seq_add_stmt (&olist,
10183 gimple_build_assign (TREE_VEC_ELT (t, i),
10184 clobber));
10185 }
10186
10187 tree clobber = build_constructor (ctx->record_type, NULL);
10188 TREE_THIS_VOLATILE (clobber) = 1;
10189 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
10190 clobber));
10191 }
10192
10193 /* Once all the expansions are done, sequence all the different
10194 fragments inside gimple_omp_body. */
10195
10196 new_body = NULL;
10197
10198 if (offloaded
10199 && ctx->record_type)
10200 {
10201 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10202 /* fixup_child_record_type might have changed receiver_decl's type. */
10203 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10204 gimple_seq_add_stmt (&new_body,
10205 gimple_build_assign (ctx->receiver_decl, t));
10206 }
10207 gimple_seq_add_seq (&new_body, fplist);
10208
10209 if (offloaded || data_region)
10210 {
10211 tree prev = NULL_TREE;
10212 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10213 switch (OMP_CLAUSE_CODE (c))
10214 {
10215 tree var, x;
10216 default:
10217 break;
10218 case OMP_CLAUSE_FIRSTPRIVATE:
10219 if (is_gimple_omp_oacc (ctx->stmt))
10220 break;
10221 var = OMP_CLAUSE_DECL (c);
10222 if (omp_is_reference (var)
10223 || is_gimple_reg_type (TREE_TYPE (var)))
10224 {
10225 tree new_var = lookup_decl (var, ctx);
10226 tree type;
10227 type = TREE_TYPE (var);
10228 if (omp_is_reference (var))
10229 type = TREE_TYPE (type);
10230 if ((INTEGRAL_TYPE_P (type)
10231 && TYPE_PRECISION (type) <= POINTER_SIZE)
10232 || TREE_CODE (type) == POINTER_TYPE)
10233 {
10234 x = build_receiver_ref (var, false, ctx);
10235 if (TREE_CODE (type) != POINTER_TYPE)
10236 x = fold_convert (pointer_sized_int_node, x);
10237 x = fold_convert (type, x);
10238 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10239 fb_rvalue);
10240 if (omp_is_reference (var))
10241 {
10242 tree v = create_tmp_var_raw (type, get_name (var));
10243 gimple_add_tmp_var (v);
10244 TREE_ADDRESSABLE (v) = 1;
10245 gimple_seq_add_stmt (&new_body,
10246 gimple_build_assign (v, x));
10247 x = build_fold_addr_expr (v);
10248 }
10249 gimple_seq_add_stmt (&new_body,
10250 gimple_build_assign (new_var, x));
10251 }
10252 else
10253 {
10254 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
10255 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10256 fb_rvalue);
10257 gimple_seq_add_stmt (&new_body,
10258 gimple_build_assign (new_var, x));
10259 }
10260 }
10261 else if (is_variable_sized (var))
10262 {
10263 tree pvar = DECL_VALUE_EXPR (var);
10264 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10265 pvar = TREE_OPERAND (pvar, 0);
10266 gcc_assert (DECL_P (pvar));
10267 tree new_var = lookup_decl (pvar, ctx);
10268 x = build_receiver_ref (var, false, ctx);
10269 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10270 gimple_seq_add_stmt (&new_body,
10271 gimple_build_assign (new_var, x));
10272 }
10273 break;
10274 case OMP_CLAUSE_PRIVATE:
10275 if (is_gimple_omp_oacc (ctx->stmt))
10276 break;
10277 var = OMP_CLAUSE_DECL (c);
10278 if (omp_is_reference (var))
10279 {
10280 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10281 tree new_var = lookup_decl (var, ctx);
10282 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10283 if (TREE_CONSTANT (x))
10284 {
10285 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
10286 get_name (var));
10287 gimple_add_tmp_var (x);
10288 TREE_ADDRESSABLE (x) = 1;
10289 x = build_fold_addr_expr_loc (clause_loc, x);
10290 }
10291 else
10292 break;
10293
10294 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10295 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10296 gimple_seq_add_stmt (&new_body,
10297 gimple_build_assign (new_var, x));
10298 }
10299 break;
10300 case OMP_CLAUSE_USE_DEVICE_PTR:
10301 case OMP_CLAUSE_IS_DEVICE_PTR:
10302 var = OMP_CLAUSE_DECL (c);
10303 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
10304 x = build_sender_ref (var, ctx);
10305 else
10306 x = build_receiver_ref (var, false, ctx);
10307 if (is_variable_sized (var))
10308 {
10309 tree pvar = DECL_VALUE_EXPR (var);
10310 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10311 pvar = TREE_OPERAND (pvar, 0);
10312 gcc_assert (DECL_P (pvar));
10313 tree new_var = lookup_decl (pvar, ctx);
10314 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10315 gimple_seq_add_stmt (&new_body,
10316 gimple_build_assign (new_var, x));
10317 }
10318 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10319 {
10320 tree new_var = lookup_decl (var, ctx);
10321 new_var = DECL_VALUE_EXPR (new_var);
10322 gcc_assert (TREE_CODE (new_var) == MEM_REF);
10323 new_var = TREE_OPERAND (new_var, 0);
10324 gcc_assert (DECL_P (new_var));
10325 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10326 gimple_seq_add_stmt (&new_body,
10327 gimple_build_assign (new_var, x));
10328 }
10329 else
10330 {
10331 tree type = TREE_TYPE (var);
10332 tree new_var = lookup_decl (var, ctx);
10333 if (omp_is_reference (var))
10334 {
10335 type = TREE_TYPE (type);
10336 if (TREE_CODE (type) != ARRAY_TYPE)
10337 {
10338 tree v = create_tmp_var_raw (type, get_name (var));
10339 gimple_add_tmp_var (v);
10340 TREE_ADDRESSABLE (v) = 1;
10341 x = fold_convert (type, x);
10342 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10343 fb_rvalue);
10344 gimple_seq_add_stmt (&new_body,
10345 gimple_build_assign (v, x));
10346 x = build_fold_addr_expr (v);
10347 }
10348 }
10349 new_var = DECL_VALUE_EXPR (new_var);
10350 x = fold_convert (TREE_TYPE (new_var), x);
10351 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10352 gimple_seq_add_stmt (&new_body,
10353 gimple_build_assign (new_var, x));
10354 }
10355 break;
10356 }
10357 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
10358 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
10359 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
10360 or references to VLAs. */
10361 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10362 switch (OMP_CLAUSE_CODE (c))
10363 {
10364 tree var;
10365 default:
10366 break;
10367 case OMP_CLAUSE_MAP:
10368 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10369 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10370 {
10371 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10372 poly_int64 offset = 0;
10373 gcc_assert (prev);
10374 var = OMP_CLAUSE_DECL (c);
10375 if (DECL_P (var)
10376 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
10377 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
10378 ctx))
10379 && varpool_node::get_create (var)->offloadable)
10380 break;
10381 if (TREE_CODE (var) == INDIRECT_REF
10382 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
10383 var = TREE_OPERAND (var, 0);
10384 if (TREE_CODE (var) == COMPONENT_REF)
10385 {
10386 var = get_addr_base_and_unit_offset (var, &offset);
10387 gcc_assert (var != NULL_TREE && DECL_P (var));
10388 }
10389 else if (DECL_SIZE (var)
10390 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10391 {
10392 tree var2 = DECL_VALUE_EXPR (var);
10393 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10394 var2 = TREE_OPERAND (var2, 0);
10395 gcc_assert (DECL_P (var2));
10396 var = var2;
10397 }
10398 tree new_var = lookup_decl (var, ctx), x;
10399 tree type = TREE_TYPE (new_var);
10400 bool is_ref;
10401 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
10402 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10403 == COMPONENT_REF))
10404 {
10405 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
10406 is_ref = true;
10407 new_var = build2 (MEM_REF, type,
10408 build_fold_addr_expr (new_var),
10409 build_int_cst (build_pointer_type (type),
10410 offset));
10411 }
10412 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
10413 {
10414 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
10415 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
10416 new_var = build2 (MEM_REF, type,
10417 build_fold_addr_expr (new_var),
10418 build_int_cst (build_pointer_type (type),
10419 offset));
10420 }
10421 else
10422 is_ref = omp_is_reference (var);
10423 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10424 is_ref = false;
10425 bool ref_to_array = false;
10426 if (is_ref)
10427 {
10428 type = TREE_TYPE (type);
10429 if (TREE_CODE (type) == ARRAY_TYPE)
10430 {
10431 type = build_pointer_type (type);
10432 ref_to_array = true;
10433 }
10434 }
10435 else if (TREE_CODE (type) == ARRAY_TYPE)
10436 {
10437 tree decl2 = DECL_VALUE_EXPR (new_var);
10438 gcc_assert (TREE_CODE (decl2) == MEM_REF);
10439 decl2 = TREE_OPERAND (decl2, 0);
10440 gcc_assert (DECL_P (decl2));
10441 new_var = decl2;
10442 type = TREE_TYPE (new_var);
10443 }
10444 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10445 x = fold_convert_loc (clause_loc, type, x);
10446 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10447 {
10448 tree bias = OMP_CLAUSE_SIZE (c);
10449 if (DECL_P (bias))
10450 bias = lookup_decl (bias, ctx);
10451 bias = fold_convert_loc (clause_loc, sizetype, bias);
10452 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10453 bias);
10454 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10455 TREE_TYPE (x), x, bias);
10456 }
10457 if (ref_to_array)
10458 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10459 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10460 if (is_ref && !ref_to_array)
10461 {
10462 tree t = create_tmp_var_raw (type, get_name (var));
10463 gimple_add_tmp_var (t);
10464 TREE_ADDRESSABLE (t) = 1;
10465 gimple_seq_add_stmt (&new_body,
10466 gimple_build_assign (t, x));
10467 x = build_fold_addr_expr_loc (clause_loc, t);
10468 }
10469 gimple_seq_add_stmt (&new_body,
10470 gimple_build_assign (new_var, x));
10471 prev = NULL_TREE;
10472 }
10473 else if (OMP_CLAUSE_CHAIN (c)
10474 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10475 == OMP_CLAUSE_MAP
10476 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10477 == GOMP_MAP_FIRSTPRIVATE_POINTER
10478 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10479 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10480 prev = c;
10481 break;
10482 case OMP_CLAUSE_PRIVATE:
10483 var = OMP_CLAUSE_DECL (c);
10484 if (is_variable_sized (var))
10485 {
10486 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10487 tree new_var = lookup_decl (var, ctx);
10488 tree pvar = DECL_VALUE_EXPR (var);
10489 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10490 pvar = TREE_OPERAND (pvar, 0);
10491 gcc_assert (DECL_P (pvar));
10492 tree new_pvar = lookup_decl (pvar, ctx);
10493 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10494 tree al = size_int (DECL_ALIGN (var));
10495 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10496 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10497 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10498 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10499 gimple_seq_add_stmt (&new_body,
10500 gimple_build_assign (new_pvar, x));
10501 }
10502 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10503 {
10504 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10505 tree new_var = lookup_decl (var, ctx);
10506 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10507 if (TREE_CONSTANT (x))
10508 break;
10509 else
10510 {
10511 tree atmp
10512 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10513 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10514 tree al = size_int (TYPE_ALIGN (rtype));
10515 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10516 }
10517
10518 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10519 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10520 gimple_seq_add_stmt (&new_body,
10521 gimple_build_assign (new_var, x));
10522 }
10523 break;
10524 }
10525
10526 gimple_seq fork_seq = NULL;
10527 gimple_seq join_seq = NULL;
10528
10529 if (is_oacc_parallel (ctx))
10530 {
10531 /* If there are reductions on the offloaded region itself, treat
10532 them as a dummy GANG loop. */
10533 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
10534
10535 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10536 false, NULL, NULL, &fork_seq, &join_seq, ctx);
10537 }
10538
10539 gimple_seq_add_seq (&new_body, fork_seq);
10540 gimple_seq_add_seq (&new_body, tgt_body);
10541 gimple_seq_add_seq (&new_body, join_seq);
10542
10543 if (offloaded)
10544 new_body = maybe_catch_exception (new_body);
10545
10546 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10547 gimple_omp_set_body (stmt, new_body);
10548 }
10549
10550 bind = gimple_build_bind (NULL, NULL,
10551 tgt_bind ? gimple_bind_block (tgt_bind)
10552 : NULL_TREE);
10553 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10554 gimple_bind_add_seq (bind, ilist);
10555 gimple_bind_add_stmt (bind, stmt);
10556 gimple_bind_add_seq (bind, olist);
10557
10558 pop_gimplify_context (NULL);
10559
10560 if (dep_bind)
10561 {
10562 gimple_bind_add_seq (dep_bind, dep_ilist);
10563 gimple_bind_add_stmt (dep_bind, bind);
10564 gimple_bind_add_seq (dep_bind, dep_olist);
10565 pop_gimplify_context (dep_bind);
10566 }
10567 }
10568
10569 /* Expand code for an OpenMP teams directive. */
10570
10571 static void
10572 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10573 {
10574 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10575 push_gimplify_context ();
10576
10577 tree block = make_node (BLOCK);
10578 gbind *bind = gimple_build_bind (NULL, NULL, block);
10579 gsi_replace (gsi_p, bind, true);
10580 gimple_seq bind_body = NULL;
10581 gimple_seq dlist = NULL;
10582 gimple_seq olist = NULL;
10583
10584 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10585 OMP_CLAUSE_NUM_TEAMS);
10586 if (num_teams == NULL_TREE)
10587 num_teams = build_int_cst (unsigned_type_node, 0);
10588 else
10589 {
10590 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10591 num_teams = fold_convert (unsigned_type_node, num_teams);
10592 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
10593 }
10594 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10595 OMP_CLAUSE_THREAD_LIMIT);
10596 if (thread_limit == NULL_TREE)
10597 thread_limit = build_int_cst (unsigned_type_node, 0);
10598 else
10599 {
10600 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10601 thread_limit = fold_convert (unsigned_type_node, thread_limit);
10602 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10603 fb_rvalue);
10604 }
10605
10606 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10607 &bind_body, &dlist, ctx, NULL);
10608 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
10609 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
10610 NULL, ctx);
10611 if (!gimple_omp_teams_grid_phony (teams_stmt))
10612 {
10613 gimple_seq_add_stmt (&bind_body, teams_stmt);
10614 location_t loc = gimple_location (teams_stmt);
10615 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10616 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10617 gimple_set_location (call, loc);
10618 gimple_seq_add_stmt (&bind_body, call);
10619 }
10620
10621 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10622 gimple_omp_set_body (teams_stmt, NULL);
10623 gimple_seq_add_seq (&bind_body, olist);
10624 gimple_seq_add_seq (&bind_body, dlist);
10625 if (!gimple_omp_teams_grid_phony (teams_stmt))
10626 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10627 gimple_bind_set_body (bind, bind_body);
10628
10629 pop_gimplify_context (bind);
10630
10631 gimple_bind_append_vars (bind, ctx->block_vars);
10632 BLOCK_VARS (block) = ctx->block_vars;
10633 if (BLOCK_VARS (block))
10634 TREE_USED (block) = 1;
10635 }
10636
10637 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
10638
10639 static void
10640 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10641 {
10642 gimple *stmt = gsi_stmt (*gsi_p);
10643 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10644 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10645 gimple_build_omp_return (false));
10646 }
10647
10648
10649 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10650 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10651 of OMP context, but with task_shared_vars set. */
10652
10653 static tree
10654 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
10655 void *data)
10656 {
10657 tree t = *tp;
10658
10659 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10660 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
10661 return t;
10662
10663 if (task_shared_vars
10664 && DECL_P (t)
10665 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
10666 return t;
10667
10668 /* If a global variable has been privatized, TREE_CONSTANT on
10669 ADDR_EXPR might be wrong. */
10670 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
10671 recompute_tree_invariant_for_addr_expr (t);
10672
10673 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
10674 return NULL_TREE;
10675 }
10676
10677 /* Data to be communicated between lower_omp_regimplify_operands and
10678 lower_omp_regimplify_operands_p. */
10679
10680 struct lower_omp_regimplify_operands_data
10681 {
10682 omp_context *ctx;
10683 vec<tree> *decls;
10684 };
10685
10686 /* Helper function for lower_omp_regimplify_operands. Find
10687 omp_member_access_dummy_var vars and adjust temporarily their
10688 DECL_VALUE_EXPRs if needed. */
10689
10690 static tree
10691 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
10692 void *data)
10693 {
10694 tree t = omp_member_access_dummy_var (*tp);
10695 if (t)
10696 {
10697 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10698 lower_omp_regimplify_operands_data *ldata
10699 = (lower_omp_regimplify_operands_data *) wi->info;
10700 tree o = maybe_lookup_decl (t, ldata->ctx);
10701 if (o != t)
10702 {
10703 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
10704 ldata->decls->safe_push (*tp);
10705 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
10706 SET_DECL_VALUE_EXPR (*tp, v);
10707 }
10708 }
10709 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
10710 return NULL_TREE;
10711 }
10712
10713 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10714 of omp_member_access_dummy_var vars during regimplification. */
10715
10716 static void
10717 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
10718 gimple_stmt_iterator *gsi_p)
10719 {
10720 auto_vec<tree, 10> decls;
10721 if (ctx)
10722 {
10723 struct walk_stmt_info wi;
10724 memset (&wi, '\0', sizeof (wi));
10725 struct lower_omp_regimplify_operands_data data;
10726 data.ctx = ctx;
10727 data.decls = &decls;
10728 wi.info = &data;
10729 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
10730 }
10731 gimple_regimplify_operands (stmt, gsi_p);
10732 while (!decls.is_empty ())
10733 {
10734 tree t = decls.pop ();
10735 tree v = decls.pop ();
10736 SET_DECL_VALUE_EXPR (t, v);
10737 }
10738 }
10739
10740 static void
10741 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10742 {
10743 gimple *stmt = gsi_stmt (*gsi_p);
10744 struct walk_stmt_info wi;
10745 gcall *call_stmt;
10746
10747 if (gimple_has_location (stmt))
10748 input_location = gimple_location (stmt);
10749
10750 if (task_shared_vars)
10751 memset (&wi, '\0', sizeof (wi));
10752
10753 /* If we have issued syntax errors, avoid doing any heavy lifting.
10754 Just replace the OMP directives with a NOP to avoid
10755 confusing RTL expansion. */
10756 if (seen_error () && is_gimple_omp (stmt))
10757 {
10758 gsi_replace (gsi_p, gimple_build_nop (), true);
10759 return;
10760 }
10761
10762 switch (gimple_code (stmt))
10763 {
10764 case GIMPLE_COND:
10765 {
10766 gcond *cond_stmt = as_a <gcond *> (stmt);
10767 if ((ctx || task_shared_vars)
10768 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
10769 lower_omp_regimplify_p,
10770 ctx ? NULL : &wi, NULL)
10771 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
10772 lower_omp_regimplify_p,
10773 ctx ? NULL : &wi, NULL)))
10774 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
10775 }
10776 break;
10777 case GIMPLE_CATCH:
10778 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
10779 break;
10780 case GIMPLE_EH_FILTER:
10781 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
10782 break;
10783 case GIMPLE_TRY:
10784 lower_omp (gimple_try_eval_ptr (stmt), ctx);
10785 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
10786 break;
10787 case GIMPLE_TRANSACTION:
10788 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
10789 ctx);
10790 break;
10791 case GIMPLE_BIND:
10792 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
10793 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
10794 break;
10795 case GIMPLE_OMP_PARALLEL:
10796 case GIMPLE_OMP_TASK:
10797 ctx = maybe_lookup_ctx (stmt);
10798 gcc_assert (ctx);
10799 if (ctx->cancellable)
10800 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10801 lower_omp_taskreg (gsi_p, ctx);
10802 break;
10803 case GIMPLE_OMP_FOR:
10804 ctx = maybe_lookup_ctx (stmt);
10805 gcc_assert (ctx);
10806 if (ctx->cancellable)
10807 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10808 lower_omp_for (gsi_p, ctx);
10809 break;
10810 case GIMPLE_OMP_SECTIONS:
10811 ctx = maybe_lookup_ctx (stmt);
10812 gcc_assert (ctx);
10813 if (ctx->cancellable)
10814 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10815 lower_omp_sections (gsi_p, ctx);
10816 break;
10817 case GIMPLE_OMP_SINGLE:
10818 ctx = maybe_lookup_ctx (stmt);
10819 gcc_assert (ctx);
10820 lower_omp_single (gsi_p, ctx);
10821 break;
10822 case GIMPLE_OMP_MASTER:
10823 ctx = maybe_lookup_ctx (stmt);
10824 gcc_assert (ctx);
10825 lower_omp_master (gsi_p, ctx);
10826 break;
10827 case GIMPLE_OMP_TASKGROUP:
10828 ctx = maybe_lookup_ctx (stmt);
10829 gcc_assert (ctx);
10830 lower_omp_taskgroup (gsi_p, ctx);
10831 break;
10832 case GIMPLE_OMP_ORDERED:
10833 ctx = maybe_lookup_ctx (stmt);
10834 gcc_assert (ctx);
10835 lower_omp_ordered (gsi_p, ctx);
10836 break;
10837 case GIMPLE_OMP_CRITICAL:
10838 ctx = maybe_lookup_ctx (stmt);
10839 gcc_assert (ctx);
10840 lower_omp_critical (gsi_p, ctx);
10841 break;
10842 case GIMPLE_OMP_ATOMIC_LOAD:
10843 if ((ctx || task_shared_vars)
10844 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10845 as_a <gomp_atomic_load *> (stmt)),
10846 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10847 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10848 break;
10849 case GIMPLE_OMP_TARGET:
10850 ctx = maybe_lookup_ctx (stmt);
10851 gcc_assert (ctx);
10852 lower_omp_target (gsi_p, ctx);
10853 break;
10854 case GIMPLE_OMP_TEAMS:
10855 ctx = maybe_lookup_ctx (stmt);
10856 gcc_assert (ctx);
10857 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
10858 lower_omp_taskreg (gsi_p, ctx);
10859 else
10860 lower_omp_teams (gsi_p, ctx);
10861 break;
10862 case GIMPLE_OMP_GRID_BODY:
10863 ctx = maybe_lookup_ctx (stmt);
10864 gcc_assert (ctx);
10865 lower_omp_grid_body (gsi_p, ctx);
10866 break;
10867 case GIMPLE_CALL:
10868 tree fndecl;
10869 call_stmt = as_a <gcall *> (stmt);
10870 fndecl = gimple_call_fndecl (call_stmt);
10871 if (fndecl
10872 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10873 switch (DECL_FUNCTION_CODE (fndecl))
10874 {
10875 case BUILT_IN_GOMP_BARRIER:
10876 if (ctx == NULL)
10877 break;
10878 /* FALLTHRU */
10879 case BUILT_IN_GOMP_CANCEL:
10880 case BUILT_IN_GOMP_CANCELLATION_POINT:
10881 omp_context *cctx;
10882 cctx = ctx;
10883 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10884 cctx = cctx->outer;
10885 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
10886 if (!cctx->cancellable)
10887 {
10888 if (DECL_FUNCTION_CODE (fndecl)
10889 == BUILT_IN_GOMP_CANCELLATION_POINT)
10890 {
10891 stmt = gimple_build_nop ();
10892 gsi_replace (gsi_p, stmt, false);
10893 }
10894 break;
10895 }
10896 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10897 {
10898 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10899 gimple_call_set_fndecl (call_stmt, fndecl);
10900 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
10901 }
10902 tree lhs;
10903 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
10904 gimple_call_set_lhs (call_stmt, lhs);
10905 tree fallthru_label;
10906 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10907 gimple *g;
10908 g = gimple_build_label (fallthru_label);
10909 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10910 g = gimple_build_cond (NE_EXPR, lhs,
10911 fold_convert (TREE_TYPE (lhs),
10912 boolean_false_node),
10913 cctx->cancel_label, fallthru_label);
10914 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10915 break;
10916 default:
10917 break;
10918 }
10919 goto regimplify;
10920
10921 case GIMPLE_ASSIGN:
10922 for (omp_context *up = ctx; up; up = up->outer)
10923 {
10924 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
10925 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
10926 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
10927 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
10928 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
10929 && (gimple_omp_target_kind (up->stmt)
10930 == GF_OMP_TARGET_KIND_DATA)))
10931 continue;
10932 else if (!up->lastprivate_conditional_map)
10933 break;
10934 tree lhs = get_base_address (gimple_assign_lhs (stmt));
10935 if (TREE_CODE (lhs) == MEM_REF
10936 && DECL_P (TREE_OPERAND (lhs, 0))
10937 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
10938 0))) == REFERENCE_TYPE)
10939 lhs = TREE_OPERAND (lhs, 0);
10940 if (DECL_P (lhs))
10941 if (tree *v = up->lastprivate_conditional_map->get (lhs))
10942 {
10943 tree clauses;
10944 if (up->combined_into_simd_safelen0)
10945 up = up->outer;
10946 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
10947 clauses = gimple_omp_for_clauses (up->stmt);
10948 else
10949 clauses = gimple_omp_sections_clauses (up->stmt);
10950 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
10951 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
10952 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
10953 OMP_CLAUSE__CONDTEMP_);
10954 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
10955 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
10956 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10957 }
10958 }
10959 /* FALLTHRU */
10960
10961 default:
10962 regimplify:
10963 if ((ctx || task_shared_vars)
10964 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10965 ctx ? NULL : &wi))
10966 {
10967 /* Just remove clobbers, this should happen only if we have
10968 "privatized" local addressable variables in SIMD regions,
10969 the clobber isn't needed in that case and gimplifying address
10970 of the ARRAY_REF into a pointer and creating MEM_REF based
10971 clobber would create worse code than we get with the clobber
10972 dropped. */
10973 if (gimple_clobber_p (stmt))
10974 {
10975 gsi_replace (gsi_p, gimple_build_nop (), true);
10976 break;
10977 }
10978 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10979 }
10980 break;
10981 }
10982 }
10983
10984 static void
10985 lower_omp (gimple_seq *body, omp_context *ctx)
10986 {
10987 location_t saved_location = input_location;
10988 gimple_stmt_iterator gsi;
10989 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10990 lower_omp_1 (&gsi, ctx);
10991 /* During gimplification, we haven't folded statments inside offloading
10992 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10993 if (target_nesting_level || taskreg_nesting_level)
10994 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10995 fold_stmt (&gsi);
10996 input_location = saved_location;
10997 }
10998
10999 /* Main entry point. */
11000
11001 static unsigned int
11002 execute_lower_omp (void)
11003 {
11004 gimple_seq body;
11005 int i;
11006 omp_context *ctx;
11007
11008 /* This pass always runs, to provide PROP_gimple_lomp.
11009 But often, there is nothing to do. */
11010 if (flag_openacc == 0 && flag_openmp == 0
11011 && flag_openmp_simd == 0)
11012 return 0;
11013
11014 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
11015 delete_omp_context);
11016
11017 body = gimple_body (current_function_decl);
11018
11019 if (hsa_gen_requested_p ())
11020 omp_grid_gridify_all_targets (&body);
11021
11022 scan_omp (&body, NULL);
11023 gcc_assert (taskreg_nesting_level == 0);
11024 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
11025 finish_taskreg_scan (ctx);
11026 taskreg_contexts.release ();
11027
11028 if (all_contexts->root)
11029 {
11030 if (task_shared_vars)
11031 push_gimplify_context ();
11032 lower_omp (&body, NULL);
11033 if (task_shared_vars)
11034 pop_gimplify_context (NULL);
11035 }
11036
11037 if (all_contexts)
11038 {
11039 splay_tree_delete (all_contexts);
11040 all_contexts = NULL;
11041 }
11042 BITMAP_FREE (task_shared_vars);
11043
11044 /* If current function is a method, remove artificial dummy VAR_DECL created
11045 for non-static data member privatization, they aren't needed for
11046 debuginfo nor anything else, have been already replaced everywhere in the
11047 IL and cause problems with LTO. */
11048 if (DECL_ARGUMENTS (current_function_decl)
11049 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
11050 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
11051 == POINTER_TYPE))
11052 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
11053 return 0;
11054 }
11055
11056 namespace {
11057
11058 const pass_data pass_data_lower_omp =
11059 {
11060 GIMPLE_PASS, /* type */
11061 "omplower", /* name */
11062 OPTGROUP_OMP, /* optinfo_flags */
11063 TV_NONE, /* tv_id */
11064 PROP_gimple_any, /* properties_required */
11065 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
11066 0, /* properties_destroyed */
11067 0, /* todo_flags_start */
11068 0, /* todo_flags_finish */
11069 };
11070
11071 class pass_lower_omp : public gimple_opt_pass
11072 {
11073 public:
11074 pass_lower_omp (gcc::context *ctxt)
11075 : gimple_opt_pass (pass_data_lower_omp, ctxt)
11076 {}
11077
11078 /* opt_pass methods: */
11079 virtual unsigned int execute (function *) { return execute_lower_omp (); }
11080
11081 }; // class pass_lower_omp
11082
11083 } // anon namespace
11084
11085 gimple_opt_pass *
11086 make_pass_lower_omp (gcc::context *ctxt)
11087 {
11088 return new pass_lower_omp (ctxt);
11089 }
11090 \f
11091 /* The following is a utility to diagnose structured block violations.
11092 It is not part of the "omplower" pass, as that's invoked too late. It
11093 should be invoked by the respective front ends after gimplification. */
11094
11095 static splay_tree all_labels;
11096
11097 /* Check for mismatched contexts and generate an error if needed. Return
11098 true if an error is detected. */
11099
11100 static bool
11101 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
11102 gimple *branch_ctx, gimple *label_ctx)
11103 {
11104 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
11105 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
11106
11107 if (label_ctx == branch_ctx)
11108 return false;
11109
11110 const char* kind = NULL;
11111
11112 if (flag_openacc)
11113 {
11114 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
11115 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
11116 {
11117 gcc_checking_assert (kind == NULL);
11118 kind = "OpenACC";
11119 }
11120 }
11121 if (kind == NULL)
11122 {
11123 gcc_checking_assert (flag_openmp || flag_openmp_simd);
11124 kind = "OpenMP";
11125 }
11126
11127 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
11128 so we could traverse it and issue a correct "exit" or "enter" error
11129 message upon a structured block violation.
11130
11131 We built the context by building a list with tree_cons'ing, but there is
11132 no easy counterpart in gimple tuples. It seems like far too much work
11133 for issuing exit/enter error messages. If someone really misses the
11134 distinct error message... patches welcome. */
11135
11136 #if 0
11137 /* Try to avoid confusing the user by producing and error message
11138 with correct "exit" or "enter" verbiage. We prefer "exit"
11139 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
11140 if (branch_ctx == NULL)
11141 exit_p = false;
11142 else
11143 {
11144 while (label_ctx)
11145 {
11146 if (TREE_VALUE (label_ctx) == branch_ctx)
11147 {
11148 exit_p = false;
11149 break;
11150 }
11151 label_ctx = TREE_CHAIN (label_ctx);
11152 }
11153 }
11154
11155 if (exit_p)
11156 error ("invalid exit from %s structured block", kind);
11157 else
11158 error ("invalid entry to %s structured block", kind);
11159 #endif
11160
11161 /* If it's obvious we have an invalid entry, be specific about the error. */
11162 if (branch_ctx == NULL)
11163 error ("invalid entry to %s structured block", kind);
11164 else
11165 {
11166 /* Otherwise, be vague and lazy, but efficient. */
11167 error ("invalid branch to/from %s structured block", kind);
11168 }
11169
11170 gsi_replace (gsi_p, gimple_build_nop (), false);
11171 return true;
11172 }
11173
11174 /* Pass 1: Create a minimal tree of structured blocks, and record
11175 where each label is found. */
11176
11177 static tree
11178 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
11179 struct walk_stmt_info *wi)
11180 {
11181 gimple *context = (gimple *) wi->info;
11182 gimple *inner_context;
11183 gimple *stmt = gsi_stmt (*gsi_p);
11184
11185 *handled_ops_p = true;
11186
11187 switch (gimple_code (stmt))
11188 {
11189 WALK_SUBSTMTS;
11190
11191 case GIMPLE_OMP_PARALLEL:
11192 case GIMPLE_OMP_TASK:
11193 case GIMPLE_OMP_SECTIONS:
11194 case GIMPLE_OMP_SINGLE:
11195 case GIMPLE_OMP_SECTION:
11196 case GIMPLE_OMP_MASTER:
11197 case GIMPLE_OMP_ORDERED:
11198 case GIMPLE_OMP_CRITICAL:
11199 case GIMPLE_OMP_TARGET:
11200 case GIMPLE_OMP_TEAMS:
11201 case GIMPLE_OMP_TASKGROUP:
11202 /* The minimal context here is just the current OMP construct. */
11203 inner_context = stmt;
11204 wi->info = inner_context;
11205 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
11206 wi->info = context;
11207 break;
11208
11209 case GIMPLE_OMP_FOR:
11210 inner_context = stmt;
11211 wi->info = inner_context;
11212 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11213 walk them. */
11214 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
11215 diagnose_sb_1, NULL, wi);
11216 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
11217 wi->info = context;
11218 break;
11219
11220 case GIMPLE_LABEL:
11221 splay_tree_insert (all_labels,
11222 (splay_tree_key) gimple_label_label (
11223 as_a <glabel *> (stmt)),
11224 (splay_tree_value) context);
11225 break;
11226
11227 default:
11228 break;
11229 }
11230
11231 return NULL_TREE;
11232 }
11233
11234 /* Pass 2: Check each branch and see if its context differs from that of
11235 the destination label's context. */
11236
11237 static tree
11238 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
11239 struct walk_stmt_info *wi)
11240 {
11241 gimple *context = (gimple *) wi->info;
11242 splay_tree_node n;
11243 gimple *stmt = gsi_stmt (*gsi_p);
11244
11245 *handled_ops_p = true;
11246
11247 switch (gimple_code (stmt))
11248 {
11249 WALK_SUBSTMTS;
11250
11251 case GIMPLE_OMP_PARALLEL:
11252 case GIMPLE_OMP_TASK:
11253 case GIMPLE_OMP_SECTIONS:
11254 case GIMPLE_OMP_SINGLE:
11255 case GIMPLE_OMP_SECTION:
11256 case GIMPLE_OMP_MASTER:
11257 case GIMPLE_OMP_ORDERED:
11258 case GIMPLE_OMP_CRITICAL:
11259 case GIMPLE_OMP_TARGET:
11260 case GIMPLE_OMP_TEAMS:
11261 case GIMPLE_OMP_TASKGROUP:
11262 wi->info = stmt;
11263 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
11264 wi->info = context;
11265 break;
11266
11267 case GIMPLE_OMP_FOR:
11268 wi->info = stmt;
11269 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11270 walk them. */
11271 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
11272 diagnose_sb_2, NULL, wi);
11273 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
11274 wi->info = context;
11275 break;
11276
11277 case GIMPLE_COND:
11278 {
11279 gcond *cond_stmt = as_a <gcond *> (stmt);
11280 tree lab = gimple_cond_true_label (cond_stmt);
11281 if (lab)
11282 {
11283 n = splay_tree_lookup (all_labels,
11284 (splay_tree_key) lab);
11285 diagnose_sb_0 (gsi_p, context,
11286 n ? (gimple *) n->value : NULL);
11287 }
11288 lab = gimple_cond_false_label (cond_stmt);
11289 if (lab)
11290 {
11291 n = splay_tree_lookup (all_labels,
11292 (splay_tree_key) lab);
11293 diagnose_sb_0 (gsi_p, context,
11294 n ? (gimple *) n->value : NULL);
11295 }
11296 }
11297 break;
11298
11299 case GIMPLE_GOTO:
11300 {
11301 tree lab = gimple_goto_dest (stmt);
11302 if (TREE_CODE (lab) != LABEL_DECL)
11303 break;
11304
11305 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
11306 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
11307 }
11308 break;
11309
11310 case GIMPLE_SWITCH:
11311 {
11312 gswitch *switch_stmt = as_a <gswitch *> (stmt);
11313 unsigned int i;
11314 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
11315 {
11316 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
11317 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
11318 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
11319 break;
11320 }
11321 }
11322 break;
11323
11324 case GIMPLE_RETURN:
11325 diagnose_sb_0 (gsi_p, context, NULL);
11326 break;
11327
11328 default:
11329 break;
11330 }
11331
11332 return NULL_TREE;
11333 }
11334
11335 static unsigned int
11336 diagnose_omp_structured_block_errors (void)
11337 {
11338 struct walk_stmt_info wi;
11339 gimple_seq body = gimple_body (current_function_decl);
11340
11341 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
11342
11343 memset (&wi, 0, sizeof (wi));
11344 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
11345
11346 memset (&wi, 0, sizeof (wi));
11347 wi.want_locations = true;
11348 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
11349
11350 gimple_set_body (current_function_decl, body);
11351
11352 splay_tree_delete (all_labels);
11353 all_labels = NULL;
11354
11355 return 0;
11356 }
11357
11358 namespace {
11359
11360 const pass_data pass_data_diagnose_omp_blocks =
11361 {
11362 GIMPLE_PASS, /* type */
11363 "*diagnose_omp_blocks", /* name */
11364 OPTGROUP_OMP, /* optinfo_flags */
11365 TV_NONE, /* tv_id */
11366 PROP_gimple_any, /* properties_required */
11367 0, /* properties_provided */
11368 0, /* properties_destroyed */
11369 0, /* todo_flags_start */
11370 0, /* todo_flags_finish */
11371 };
11372
11373 class pass_diagnose_omp_blocks : public gimple_opt_pass
11374 {
11375 public:
11376 pass_diagnose_omp_blocks (gcc::context *ctxt)
11377 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
11378 {}
11379
11380 /* opt_pass methods: */
11381 virtual bool gate (function *)
11382 {
11383 return flag_openacc || flag_openmp || flag_openmp_simd;
11384 }
11385 virtual unsigned int execute (function *)
11386 {
11387 return diagnose_omp_structured_block_errors ();
11388 }
11389
11390 }; // class pass_diagnose_omp_blocks
11391
11392 } // anon namespace
11393
11394 gimple_opt_pass *
11395 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
11396 {
11397 return new pass_diagnose_omp_blocks (ctxt);
11398 }
11399 \f
11400
11401 #include "gt-omp-low.h"