re PR tree-optimization/90883 (Generated code is worse if returned struct is unnamed)
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen0;
144
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
147 };
148
149 static splay_tree all_contexts;
150 static int taskreg_nesting_level;
151 static int target_nesting_level;
152 static bitmap task_shared_vars;
153 static vec<omp_context *> taskreg_contexts;
154
155 static void scan_omp (gimple_seq *, omp_context *);
156 static tree scan_omp_1_op (tree *, int *, void *);
157
158 #define WALK_SUBSTMTS \
159 case GIMPLE_BIND: \
160 case GIMPLE_TRY: \
161 case GIMPLE_CATCH: \
162 case GIMPLE_EH_FILTER: \
163 case GIMPLE_TRANSACTION: \
164 /* The sub-statements for these should be walked. */ \
165 *handled_ops_p = false; \
166 break;
167
168 /* Return true if CTX corresponds to an oacc parallel region. */
169
170 static bool
171 is_oacc_parallel (omp_context *ctx)
172 {
173 enum gimple_code outer_type = gimple_code (ctx->stmt);
174 return ((outer_type == GIMPLE_OMP_TARGET)
175 && (gimple_omp_target_kind (ctx->stmt)
176 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
177 }
178
179 /* Return true if CTX corresponds to an oacc kernels region. */
180
181 static bool
182 is_oacc_kernels (omp_context *ctx)
183 {
184 enum gimple_code outer_type = gimple_code (ctx->stmt);
185 return ((outer_type == GIMPLE_OMP_TARGET)
186 && (gimple_omp_target_kind (ctx->stmt)
187 == GF_OMP_TARGET_KIND_OACC_KERNELS));
188 }
189
190 /* If DECL is the artificial dummy VAR_DECL created for non-static
191 data member privatization, return the underlying "this" parameter,
192 otherwise return NULL. */
193
194 tree
195 omp_member_access_dummy_var (tree decl)
196 {
197 if (!VAR_P (decl)
198 || !DECL_ARTIFICIAL (decl)
199 || !DECL_IGNORED_P (decl)
200 || !DECL_HAS_VALUE_EXPR_P (decl)
201 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
202 return NULL_TREE;
203
204 tree v = DECL_VALUE_EXPR (decl);
205 if (TREE_CODE (v) != COMPONENT_REF)
206 return NULL_TREE;
207
208 while (1)
209 switch (TREE_CODE (v))
210 {
211 case COMPONENT_REF:
212 case MEM_REF:
213 case INDIRECT_REF:
214 CASE_CONVERT:
215 case POINTER_PLUS_EXPR:
216 v = TREE_OPERAND (v, 0);
217 continue;
218 case PARM_DECL:
219 if (DECL_CONTEXT (v) == current_function_decl
220 && DECL_ARTIFICIAL (v)
221 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
222 return v;
223 return NULL_TREE;
224 default:
225 return NULL_TREE;
226 }
227 }
228
229 /* Helper for unshare_and_remap, called through walk_tree. */
230
231 static tree
232 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
233 {
234 tree *pair = (tree *) data;
235 if (*tp == pair[0])
236 {
237 *tp = unshare_expr (pair[1]);
238 *walk_subtrees = 0;
239 }
240 else if (IS_TYPE_OR_DECL_P (*tp))
241 *walk_subtrees = 0;
242 return NULL_TREE;
243 }
244
245 /* Return unshare_expr (X) with all occurrences of FROM
246 replaced with TO. */
247
248 static tree
249 unshare_and_remap (tree x, tree from, tree to)
250 {
251 tree pair[2] = { from, to };
252 x = unshare_expr (x);
253 walk_tree (&x, unshare_and_remap_1, pair, NULL);
254 return x;
255 }
256
257 /* Convenience function for calling scan_omp_1_op on tree operands. */
258
259 static inline tree
260 scan_omp_op (tree *tp, omp_context *ctx)
261 {
262 struct walk_stmt_info wi;
263
264 memset (&wi, 0, sizeof (wi));
265 wi.info = ctx;
266 wi.want_locations = true;
267
268 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
269 }
270
271 static void lower_omp (gimple_seq *, omp_context *);
272 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
273 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
274
275 /* Return true if CTX is for an omp parallel. */
276
277 static inline bool
278 is_parallel_ctx (omp_context *ctx)
279 {
280 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
281 }
282
283
284 /* Return true if CTX is for an omp task. */
285
286 static inline bool
287 is_task_ctx (omp_context *ctx)
288 {
289 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
290 }
291
292
293 /* Return true if CTX is for an omp taskloop. */
294
295 static inline bool
296 is_taskloop_ctx (omp_context *ctx)
297 {
298 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
299 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
300 }
301
302
303 /* Return true if CTX is for a host omp teams. */
304
305 static inline bool
306 is_host_teams_ctx (omp_context *ctx)
307 {
308 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
309 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
310 }
311
312 /* Return true if CTX is for an omp parallel or omp task or host omp teams
313 (the last one is strictly not a task region in OpenMP speak, but we
314 need to treat it similarly). */
315
316 static inline bool
317 is_taskreg_ctx (omp_context *ctx)
318 {
319 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
320 }
321
322 /* Return true if EXPR is variable sized. */
323
324 static inline bool
325 is_variable_sized (const_tree expr)
326 {
327 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
328 }
329
330 /* Lookup variables. The "maybe" form
331 allows for the variable form to not have been entered, otherwise we
332 assert that the variable must have been entered. */
333
334 static inline tree
335 lookup_decl (tree var, omp_context *ctx)
336 {
337 tree *n = ctx->cb.decl_map->get (var);
338 return *n;
339 }
340
341 static inline tree
342 maybe_lookup_decl (const_tree var, omp_context *ctx)
343 {
344 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
345 return n ? *n : NULL_TREE;
346 }
347
348 static inline tree
349 lookup_field (tree var, omp_context *ctx)
350 {
351 splay_tree_node n;
352 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
353 return (tree) n->value;
354 }
355
356 static inline tree
357 lookup_sfield (splay_tree_key key, omp_context *ctx)
358 {
359 splay_tree_node n;
360 n = splay_tree_lookup (ctx->sfield_map
361 ? ctx->sfield_map : ctx->field_map, key);
362 return (tree) n->value;
363 }
364
365 static inline tree
366 lookup_sfield (tree var, omp_context *ctx)
367 {
368 return lookup_sfield ((splay_tree_key) var, ctx);
369 }
370
371 static inline tree
372 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
373 {
374 splay_tree_node n;
375 n = splay_tree_lookup (ctx->field_map, key);
376 return n ? (tree) n->value : NULL_TREE;
377 }
378
379 static inline tree
380 maybe_lookup_field (tree var, omp_context *ctx)
381 {
382 return maybe_lookup_field ((splay_tree_key) var, ctx);
383 }
384
385 /* Return true if DECL should be copied by pointer. SHARED_CTX is
386 the parallel context if DECL is to be shared. */
387
388 static bool
389 use_pointer_for_field (tree decl, omp_context *shared_ctx)
390 {
391 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
392 || TYPE_ATOMIC (TREE_TYPE (decl)))
393 return true;
394
395 /* We can only use copy-in/copy-out semantics for shared variables
396 when we know the value is not accessible from an outer scope. */
397 if (shared_ctx)
398 {
399 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
400
401 /* ??? Trivially accessible from anywhere. But why would we even
402 be passing an address in this case? Should we simply assert
403 this to be false, or should we have a cleanup pass that removes
404 these from the list of mappings? */
405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
406 return true;
407
408 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
409 without analyzing the expression whether or not its location
410 is accessible to anyone else. In the case of nested parallel
411 regions it certainly may be. */
412 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
413 return true;
414
415 /* Do not use copy-in/copy-out for variables that have their
416 address taken. */
417 if (TREE_ADDRESSABLE (decl))
418 return true;
419
420 /* lower_send_shared_vars only uses copy-in, but not copy-out
421 for these. */
422 if (TREE_READONLY (decl)
423 || ((TREE_CODE (decl) == RESULT_DECL
424 || TREE_CODE (decl) == PARM_DECL)
425 && DECL_BY_REFERENCE (decl)))
426 return false;
427
428 /* Disallow copy-in/out in nested parallel if
429 decl is shared in outer parallel, otherwise
430 each thread could store the shared variable
431 in its own copy-in location, making the
432 variable no longer really shared. */
433 if (shared_ctx->is_nested)
434 {
435 omp_context *up;
436
437 for (up = shared_ctx->outer; up; up = up->outer)
438 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
439 break;
440
441 if (up)
442 {
443 tree c;
444
445 for (c = gimple_omp_taskreg_clauses (up->stmt);
446 c; c = OMP_CLAUSE_CHAIN (c))
447 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
448 && OMP_CLAUSE_DECL (c) == decl)
449 break;
450
451 if (c)
452 goto maybe_mark_addressable_and_ret;
453 }
454 }
455
456 /* For tasks avoid using copy-in/out. As tasks can be
457 deferred or executed in different thread, when GOMP_task
458 returns, the task hasn't necessarily terminated. */
459 if (is_task_ctx (shared_ctx))
460 {
461 tree outer;
462 maybe_mark_addressable_and_ret:
463 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
464 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
465 {
466 /* Taking address of OUTER in lower_send_shared_vars
467 might need regimplification of everything that uses the
468 variable. */
469 if (!task_shared_vars)
470 task_shared_vars = BITMAP_ALLOC (NULL);
471 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
472 TREE_ADDRESSABLE (outer) = 1;
473 }
474 return true;
475 }
476 }
477
478 return false;
479 }
480
481 /* Construct a new automatic decl similar to VAR. */
482
483 static tree
484 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
485 {
486 tree copy = copy_var_decl (var, name, type);
487
488 DECL_CONTEXT (copy) = current_function_decl;
489 DECL_CHAIN (copy) = ctx->block_vars;
490 /* If VAR is listed in task_shared_vars, it means it wasn't
491 originally addressable and is just because task needs to take
492 it's address. But we don't need to take address of privatizations
493 from that var. */
494 if (TREE_ADDRESSABLE (var)
495 && task_shared_vars
496 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
497 TREE_ADDRESSABLE (copy) = 0;
498 ctx->block_vars = copy;
499
500 return copy;
501 }
502
503 static tree
504 omp_copy_decl_1 (tree var, omp_context *ctx)
505 {
506 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
507 }
508
509 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
510 as appropriate. */
511 static tree
512 omp_build_component_ref (tree obj, tree field)
513 {
514 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
515 if (TREE_THIS_VOLATILE (field))
516 TREE_THIS_VOLATILE (ret) |= 1;
517 if (TREE_READONLY (field))
518 TREE_READONLY (ret) |= 1;
519 return ret;
520 }
521
522 /* Build tree nodes to access the field for VAR on the receiver side. */
523
524 static tree
525 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
526 {
527 tree x, field = lookup_field (var, ctx);
528
529 /* If the receiver record type was remapped in the child function,
530 remap the field into the new record type. */
531 x = maybe_lookup_field (field, ctx);
532 if (x != NULL)
533 field = x;
534
535 x = build_simple_mem_ref (ctx->receiver_decl);
536 TREE_THIS_NOTRAP (x) = 1;
537 x = omp_build_component_ref (x, field);
538 if (by_ref)
539 {
540 x = build_simple_mem_ref (x);
541 TREE_THIS_NOTRAP (x) = 1;
542 }
543
544 return x;
545 }
546
547 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
548 of a parallel, this is a component reference; for workshare constructs
549 this is some variable. */
550
551 static tree
552 build_outer_var_ref (tree var, omp_context *ctx,
553 enum omp_clause_code code = OMP_CLAUSE_ERROR)
554 {
555 tree x;
556 omp_context *outer = ctx->outer;
557 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
558 outer = outer->outer;
559
560 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
561 x = var;
562 else if (is_variable_sized (var))
563 {
564 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
565 x = build_outer_var_ref (x, ctx, code);
566 x = build_simple_mem_ref (x);
567 }
568 else if (is_taskreg_ctx (ctx))
569 {
570 bool by_ref = use_pointer_for_field (var, NULL);
571 x = build_receiver_ref (var, by_ref, ctx);
572 }
573 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
574 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
575 || (code == OMP_CLAUSE_PRIVATE
576 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
577 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
578 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
579 {
580 /* #pragma omp simd isn't a worksharing construct, and can reference
581 even private vars in its linear etc. clauses.
582 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
583 to private vars in all worksharing constructs. */
584 x = NULL_TREE;
585 if (outer && is_taskreg_ctx (outer))
586 x = lookup_decl (var, outer);
587 else if (outer)
588 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
589 if (x == NULL_TREE)
590 x = var;
591 }
592 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
593 {
594 gcc_assert (outer);
595 splay_tree_node n
596 = splay_tree_lookup (outer->field_map,
597 (splay_tree_key) &DECL_UID (var));
598 if (n == NULL)
599 {
600 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
601 x = var;
602 else
603 x = lookup_decl (var, outer);
604 }
605 else
606 {
607 tree field = (tree) n->value;
608 /* If the receiver record type was remapped in the child function,
609 remap the field into the new record type. */
610 x = maybe_lookup_field (field, outer);
611 if (x != NULL)
612 field = x;
613
614 x = build_simple_mem_ref (outer->receiver_decl);
615 x = omp_build_component_ref (x, field);
616 if (use_pointer_for_field (var, outer))
617 x = build_simple_mem_ref (x);
618 }
619 }
620 else if (outer)
621 {
622 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
623 {
624 outer = outer->outer;
625 gcc_assert (outer
626 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
627 }
628 x = lookup_decl (var, outer);
629 }
630 else if (omp_is_reference (var))
631 /* This can happen with orphaned constructs. If var is reference, it is
632 possible it is shared and as such valid. */
633 x = var;
634 else if (omp_member_access_dummy_var (var))
635 x = var;
636 else
637 gcc_unreachable ();
638
639 if (x == var)
640 {
641 tree t = omp_member_access_dummy_var (var);
642 if (t)
643 {
644 x = DECL_VALUE_EXPR (var);
645 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
646 if (o != t)
647 x = unshare_and_remap (x, t, o);
648 else
649 x = unshare_expr (x);
650 }
651 }
652
653 if (omp_is_reference (var))
654 x = build_simple_mem_ref (x);
655
656 return x;
657 }
658
659 /* Build tree nodes to access the field for VAR on the sender side. */
660
661 static tree
662 build_sender_ref (splay_tree_key key, omp_context *ctx)
663 {
664 tree field = lookup_sfield (key, ctx);
665 return omp_build_component_ref (ctx->sender_decl, field);
666 }
667
668 static tree
669 build_sender_ref (tree var, omp_context *ctx)
670 {
671 return build_sender_ref ((splay_tree_key) var, ctx);
672 }
673
674 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
675 BASE_POINTERS_RESTRICT, declare the field with restrict. */
676
677 static void
678 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
679 {
680 tree field, type, sfield = NULL_TREE;
681 splay_tree_key key = (splay_tree_key) var;
682
683 if ((mask & 8) != 0)
684 {
685 key = (splay_tree_key) &DECL_UID (var);
686 gcc_checking_assert (key != (splay_tree_key) var);
687 }
688 gcc_assert ((mask & 1) == 0
689 || !splay_tree_lookup (ctx->field_map, key));
690 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
691 || !splay_tree_lookup (ctx->sfield_map, key));
692 gcc_assert ((mask & 3) == 3
693 || !is_gimple_omp_oacc (ctx->stmt));
694
695 type = TREE_TYPE (var);
696 /* Prevent redeclaring the var in the split-off function with a restrict
697 pointer type. Note that we only clear type itself, restrict qualifiers in
698 the pointed-to type will be ignored by points-to analysis. */
699 if (POINTER_TYPE_P (type)
700 && TYPE_RESTRICT (type))
701 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
702
703 if (mask & 4)
704 {
705 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
706 type = build_pointer_type (build_pointer_type (type));
707 }
708 else if (by_ref)
709 type = build_pointer_type (type);
710 else if ((mask & 3) == 1 && omp_is_reference (var))
711 type = TREE_TYPE (type);
712
713 field = build_decl (DECL_SOURCE_LOCATION (var),
714 FIELD_DECL, DECL_NAME (var), type);
715
716 /* Remember what variable this field was created for. This does have a
717 side effect of making dwarf2out ignore this member, so for helpful
718 debugging we clear it later in delete_omp_context. */
719 DECL_ABSTRACT_ORIGIN (field) = var;
720 if (type == TREE_TYPE (var))
721 {
722 SET_DECL_ALIGN (field, DECL_ALIGN (var));
723 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
724 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
725 }
726 else
727 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
728
729 if ((mask & 3) == 3)
730 {
731 insert_field_into_struct (ctx->record_type, field);
732 if (ctx->srecord_type)
733 {
734 sfield = build_decl (DECL_SOURCE_LOCATION (var),
735 FIELD_DECL, DECL_NAME (var), type);
736 DECL_ABSTRACT_ORIGIN (sfield) = var;
737 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
738 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
739 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
740 insert_field_into_struct (ctx->srecord_type, sfield);
741 }
742 }
743 else
744 {
745 if (ctx->srecord_type == NULL_TREE)
746 {
747 tree t;
748
749 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
750 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
751 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
752 {
753 sfield = build_decl (DECL_SOURCE_LOCATION (t),
754 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
755 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
756 insert_field_into_struct (ctx->srecord_type, sfield);
757 splay_tree_insert (ctx->sfield_map,
758 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
759 (splay_tree_value) sfield);
760 }
761 }
762 sfield = field;
763 insert_field_into_struct ((mask & 1) ? ctx->record_type
764 : ctx->srecord_type, field);
765 }
766
767 if (mask & 1)
768 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
769 if ((mask & 2) && ctx->sfield_map)
770 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
771 }
772
773 static tree
774 install_var_local (tree var, omp_context *ctx)
775 {
776 tree new_var = omp_copy_decl_1 (var, ctx);
777 insert_decl_map (&ctx->cb, var, new_var);
778 return new_var;
779 }
780
781 /* Adjust the replacement for DECL in CTX for the new context. This means
782 copying the DECL_VALUE_EXPR, and fixing up the type. */
783
784 static void
785 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
786 {
787 tree new_decl, size;
788
789 new_decl = lookup_decl (decl, ctx);
790
791 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
792
793 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
794 && DECL_HAS_VALUE_EXPR_P (decl))
795 {
796 tree ve = DECL_VALUE_EXPR (decl);
797 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
798 SET_DECL_VALUE_EXPR (new_decl, ve);
799 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
800 }
801
802 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
803 {
804 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
805 if (size == error_mark_node)
806 size = TYPE_SIZE (TREE_TYPE (new_decl));
807 DECL_SIZE (new_decl) = size;
808
809 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
810 if (size == error_mark_node)
811 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
812 DECL_SIZE_UNIT (new_decl) = size;
813 }
814 }
815
816 /* The callback for remap_decl. Search all containing contexts for a
817 mapping of the variable; this avoids having to duplicate the splay
818 tree ahead of time. We know a mapping doesn't already exist in the
819 given context. Create new mappings to implement default semantics. */
820
821 static tree
822 omp_copy_decl (tree var, copy_body_data *cb)
823 {
824 omp_context *ctx = (omp_context *) cb;
825 tree new_var;
826
827 if (TREE_CODE (var) == LABEL_DECL)
828 {
829 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
830 return var;
831 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
832 DECL_CONTEXT (new_var) = current_function_decl;
833 insert_decl_map (&ctx->cb, var, new_var);
834 return new_var;
835 }
836
837 while (!is_taskreg_ctx (ctx))
838 {
839 ctx = ctx->outer;
840 if (ctx == NULL)
841 return var;
842 new_var = maybe_lookup_decl (var, ctx);
843 if (new_var)
844 return new_var;
845 }
846
847 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
848 return var;
849
850 return error_mark_node;
851 }
852
853 /* Create a new context, with OUTER_CTX being the surrounding context. */
854
855 static omp_context *
856 new_omp_context (gimple *stmt, omp_context *outer_ctx)
857 {
858 omp_context *ctx = XCNEW (omp_context);
859
860 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
861 (splay_tree_value) ctx);
862 ctx->stmt = stmt;
863
864 if (outer_ctx)
865 {
866 ctx->outer = outer_ctx;
867 ctx->cb = outer_ctx->cb;
868 ctx->cb.block = NULL;
869 ctx->depth = outer_ctx->depth + 1;
870 }
871 else
872 {
873 ctx->cb.src_fn = current_function_decl;
874 ctx->cb.dst_fn = current_function_decl;
875 ctx->cb.src_node = cgraph_node::get (current_function_decl);
876 gcc_checking_assert (ctx->cb.src_node);
877 ctx->cb.dst_node = ctx->cb.src_node;
878 ctx->cb.src_cfun = cfun;
879 ctx->cb.copy_decl = omp_copy_decl;
880 ctx->cb.eh_lp_nr = 0;
881 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
882 ctx->cb.adjust_array_error_bounds = true;
883 ctx->cb.dont_remap_vla_if_no_change = true;
884 ctx->depth = 1;
885 }
886
887 ctx->cb.decl_map = new hash_map<tree, tree>;
888
889 return ctx;
890 }
891
892 static gimple_seq maybe_catch_exception (gimple_seq);
893
894 /* Finalize task copyfn. */
895
896 static void
897 finalize_task_copyfn (gomp_task *task_stmt)
898 {
899 struct function *child_cfun;
900 tree child_fn;
901 gimple_seq seq = NULL, new_seq;
902 gbind *bind;
903
904 child_fn = gimple_omp_task_copy_fn (task_stmt);
905 if (child_fn == NULL_TREE)
906 return;
907
908 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
909 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
910
911 push_cfun (child_cfun);
912 bind = gimplify_body (child_fn, false);
913 gimple_seq_add_stmt (&seq, bind);
914 new_seq = maybe_catch_exception (seq);
915 if (new_seq != seq)
916 {
917 bind = gimple_build_bind (NULL, new_seq, NULL);
918 seq = NULL;
919 gimple_seq_add_stmt (&seq, bind);
920 }
921 gimple_set_body (child_fn, seq);
922 pop_cfun ();
923
924 /* Inform the callgraph about the new function. */
925 cgraph_node *node = cgraph_node::get_create (child_fn);
926 node->parallelized_function = 1;
927 cgraph_node::add_new_function (child_fn, false);
928 }
929
930 /* Destroy a omp_context data structures. Called through the splay tree
931 value delete callback. */
932
933 static void
934 delete_omp_context (splay_tree_value value)
935 {
936 omp_context *ctx = (omp_context *) value;
937
938 delete ctx->cb.decl_map;
939
940 if (ctx->field_map)
941 splay_tree_delete (ctx->field_map);
942 if (ctx->sfield_map)
943 splay_tree_delete (ctx->sfield_map);
944
945 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
946 it produces corrupt debug information. */
947 if (ctx->record_type)
948 {
949 tree t;
950 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
951 DECL_ABSTRACT_ORIGIN (t) = NULL;
952 }
953 if (ctx->srecord_type)
954 {
955 tree t;
956 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
957 DECL_ABSTRACT_ORIGIN (t) = NULL;
958 }
959
960 if (is_task_ctx (ctx))
961 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
962
963 if (ctx->task_reduction_map)
964 {
965 ctx->task_reductions.release ();
966 delete ctx->task_reduction_map;
967 }
968
969 delete ctx->lastprivate_conditional_map;
970
971 XDELETE (ctx);
972 }
973
974 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
975 context. */
976
977 static void
978 fixup_child_record_type (omp_context *ctx)
979 {
980 tree f, type = ctx->record_type;
981
982 if (!ctx->receiver_decl)
983 return;
984 /* ??? It isn't sufficient to just call remap_type here, because
985 variably_modified_type_p doesn't work the way we expect for
986 record types. Testing each field for whether it needs remapping
987 and creating a new record by hand works, however. */
988 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
989 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
990 break;
991 if (f)
992 {
993 tree name, new_fields = NULL;
994
995 type = lang_hooks.types.make_type (RECORD_TYPE);
996 name = DECL_NAME (TYPE_NAME (ctx->record_type));
997 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
998 TYPE_DECL, name, type);
999 TYPE_NAME (type) = name;
1000
1001 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1002 {
1003 tree new_f = copy_node (f);
1004 DECL_CONTEXT (new_f) = type;
1005 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1006 DECL_CHAIN (new_f) = new_fields;
1007 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1008 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1009 &ctx->cb, NULL);
1010 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1011 &ctx->cb, NULL);
1012 new_fields = new_f;
1013
1014 /* Arrange to be able to look up the receiver field
1015 given the sender field. */
1016 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1017 (splay_tree_value) new_f);
1018 }
1019 TYPE_FIELDS (type) = nreverse (new_fields);
1020 layout_type (type);
1021 }
1022
1023 /* In a target region we never modify any of the pointers in *.omp_data_i,
1024 so attempt to help the optimizers. */
1025 if (is_gimple_omp_offloaded (ctx->stmt))
1026 type = build_qualified_type (type, TYPE_QUAL_CONST);
1027
1028 TREE_TYPE (ctx->receiver_decl)
1029 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1030 }
1031
1032 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1033 specified by CLAUSES. */
1034
1035 static void
1036 scan_sharing_clauses (tree clauses, omp_context *ctx)
1037 {
1038 tree c, decl;
1039 bool scan_array_reductions = false;
1040
1041 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1042 {
1043 bool by_ref;
1044
1045 switch (OMP_CLAUSE_CODE (c))
1046 {
1047 case OMP_CLAUSE_PRIVATE:
1048 decl = OMP_CLAUSE_DECL (c);
1049 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1050 goto do_private;
1051 else if (!is_variable_sized (decl))
1052 install_var_local (decl, ctx);
1053 break;
1054
1055 case OMP_CLAUSE_SHARED:
1056 decl = OMP_CLAUSE_DECL (c);
1057 /* Ignore shared directives in teams construct inside of
1058 target construct. */
1059 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1060 && !is_host_teams_ctx (ctx))
1061 {
1062 /* Global variables don't need to be copied,
1063 the receiver side will use them directly. */
1064 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1065 if (is_global_var (odecl))
1066 break;
1067 insert_decl_map (&ctx->cb, decl, odecl);
1068 break;
1069 }
1070 gcc_assert (is_taskreg_ctx (ctx));
1071 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1072 || !is_variable_sized (decl));
1073 /* Global variables don't need to be copied,
1074 the receiver side will use them directly. */
1075 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1076 break;
1077 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1078 {
1079 use_pointer_for_field (decl, ctx);
1080 break;
1081 }
1082 by_ref = use_pointer_for_field (decl, NULL);
1083 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1084 || TREE_ADDRESSABLE (decl)
1085 || by_ref
1086 || omp_is_reference (decl))
1087 {
1088 by_ref = use_pointer_for_field (decl, ctx);
1089 install_var_field (decl, by_ref, 3, ctx);
1090 install_var_local (decl, ctx);
1091 break;
1092 }
1093 /* We don't need to copy const scalar vars back. */
1094 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1095 goto do_private;
1096
1097 case OMP_CLAUSE_REDUCTION:
1098 case OMP_CLAUSE_IN_REDUCTION:
1099 decl = OMP_CLAUSE_DECL (c);
1100 if (TREE_CODE (decl) == MEM_REF)
1101 {
1102 tree t = TREE_OPERAND (decl, 0);
1103 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1104 t = TREE_OPERAND (t, 0);
1105 if (TREE_CODE (t) == INDIRECT_REF
1106 || TREE_CODE (t) == ADDR_EXPR)
1107 t = TREE_OPERAND (t, 0);
1108 install_var_local (t, ctx);
1109 if (is_taskreg_ctx (ctx)
1110 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1111 || (is_task_ctx (ctx)
1112 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1113 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1114 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1115 == POINTER_TYPE)))))
1116 && !is_variable_sized (t)
1117 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1118 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1119 && !is_task_ctx (ctx))))
1120 {
1121 by_ref = use_pointer_for_field (t, NULL);
1122 if (is_task_ctx (ctx)
1123 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1124 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1125 {
1126 install_var_field (t, false, 1, ctx);
1127 install_var_field (t, by_ref, 2, ctx);
1128 }
1129 else
1130 install_var_field (t, by_ref, 3, ctx);
1131 }
1132 break;
1133 }
1134 if (is_task_ctx (ctx)
1135 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1136 && OMP_CLAUSE_REDUCTION_TASK (c)
1137 && is_parallel_ctx (ctx)))
1138 {
1139 /* Global variables don't need to be copied,
1140 the receiver side will use them directly. */
1141 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1142 {
1143 by_ref = use_pointer_for_field (decl, ctx);
1144 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1145 install_var_field (decl, by_ref, 3, ctx);
1146 }
1147 install_var_local (decl, ctx);
1148 break;
1149 }
1150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1151 && OMP_CLAUSE_REDUCTION_TASK (c))
1152 {
1153 install_var_local (decl, ctx);
1154 break;
1155 }
1156 goto do_private;
1157
1158 case OMP_CLAUSE_LASTPRIVATE:
1159 /* Let the corresponding firstprivate clause create
1160 the variable. */
1161 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1162 break;
1163 /* FALLTHRU */
1164
1165 case OMP_CLAUSE_FIRSTPRIVATE:
1166 case OMP_CLAUSE_LINEAR:
1167 decl = OMP_CLAUSE_DECL (c);
1168 do_private:
1169 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1170 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1171 && is_gimple_omp_offloaded (ctx->stmt))
1172 {
1173 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1174 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1175 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1176 install_var_field (decl, true, 3, ctx);
1177 else
1178 install_var_field (decl, false, 3, ctx);
1179 }
1180 if (is_variable_sized (decl))
1181 {
1182 if (is_task_ctx (ctx))
1183 install_var_field (decl, false, 1, ctx);
1184 break;
1185 }
1186 else if (is_taskreg_ctx (ctx))
1187 {
1188 bool global
1189 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1190 by_ref = use_pointer_for_field (decl, NULL);
1191
1192 if (is_task_ctx (ctx)
1193 && (global || by_ref || omp_is_reference (decl)))
1194 {
1195 install_var_field (decl, false, 1, ctx);
1196 if (!global)
1197 install_var_field (decl, by_ref, 2, ctx);
1198 }
1199 else if (!global)
1200 install_var_field (decl, by_ref, 3, ctx);
1201 }
1202 install_var_local (decl, ctx);
1203 break;
1204
1205 case OMP_CLAUSE_USE_DEVICE_PTR:
1206 decl = OMP_CLAUSE_DECL (c);
1207 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1208 install_var_field (decl, true, 3, ctx);
1209 else
1210 install_var_field (decl, false, 3, ctx);
1211 if (DECL_SIZE (decl)
1212 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1213 {
1214 tree decl2 = DECL_VALUE_EXPR (decl);
1215 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1216 decl2 = TREE_OPERAND (decl2, 0);
1217 gcc_assert (DECL_P (decl2));
1218 install_var_local (decl2, ctx);
1219 }
1220 install_var_local (decl, ctx);
1221 break;
1222
1223 case OMP_CLAUSE_IS_DEVICE_PTR:
1224 decl = OMP_CLAUSE_DECL (c);
1225 goto do_private;
1226
1227 case OMP_CLAUSE__LOOPTEMP_:
1228 case OMP_CLAUSE__REDUCTEMP_:
1229 gcc_assert (is_taskreg_ctx (ctx));
1230 decl = OMP_CLAUSE_DECL (c);
1231 install_var_field (decl, false, 3, ctx);
1232 install_var_local (decl, ctx);
1233 break;
1234
1235 case OMP_CLAUSE_COPYPRIVATE:
1236 case OMP_CLAUSE_COPYIN:
1237 decl = OMP_CLAUSE_DECL (c);
1238 by_ref = use_pointer_for_field (decl, NULL);
1239 install_var_field (decl, by_ref, 3, ctx);
1240 break;
1241
1242 case OMP_CLAUSE_FINAL:
1243 case OMP_CLAUSE_IF:
1244 case OMP_CLAUSE_NUM_THREADS:
1245 case OMP_CLAUSE_NUM_TEAMS:
1246 case OMP_CLAUSE_THREAD_LIMIT:
1247 case OMP_CLAUSE_DEVICE:
1248 case OMP_CLAUSE_SCHEDULE:
1249 case OMP_CLAUSE_DIST_SCHEDULE:
1250 case OMP_CLAUSE_DEPEND:
1251 case OMP_CLAUSE_PRIORITY:
1252 case OMP_CLAUSE_GRAINSIZE:
1253 case OMP_CLAUSE_NUM_TASKS:
1254 case OMP_CLAUSE_NUM_GANGS:
1255 case OMP_CLAUSE_NUM_WORKERS:
1256 case OMP_CLAUSE_VECTOR_LENGTH:
1257 if (ctx->outer)
1258 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1259 break;
1260
1261 case OMP_CLAUSE_TO:
1262 case OMP_CLAUSE_FROM:
1263 case OMP_CLAUSE_MAP:
1264 if (ctx->outer)
1265 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1266 decl = OMP_CLAUSE_DECL (c);
1267 /* Global variables with "omp declare target" attribute
1268 don't need to be copied, the receiver side will use them
1269 directly. However, global variables with "omp declare target link"
1270 attribute need to be copied. Or when ALWAYS modifier is used. */
1271 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1272 && DECL_P (decl)
1273 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1274 && (OMP_CLAUSE_MAP_KIND (c)
1275 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1276 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1277 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1278 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1279 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1280 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1281 && varpool_node::get_create (decl)->offloadable
1282 && !lookup_attribute ("omp declare target link",
1283 DECL_ATTRIBUTES (decl)))
1284 break;
1285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1286 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1287 {
1288 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1289 not offloaded; there is nothing to map for those. */
1290 if (!is_gimple_omp_offloaded (ctx->stmt)
1291 && !POINTER_TYPE_P (TREE_TYPE (decl))
1292 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1293 break;
1294 }
1295 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1296 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1297 || (OMP_CLAUSE_MAP_KIND (c)
1298 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1299 {
1300 if (TREE_CODE (decl) == COMPONENT_REF
1301 || (TREE_CODE (decl) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1303 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1304 == REFERENCE_TYPE)))
1305 break;
1306 if (DECL_SIZE (decl)
1307 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1308 {
1309 tree decl2 = DECL_VALUE_EXPR (decl);
1310 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1311 decl2 = TREE_OPERAND (decl2, 0);
1312 gcc_assert (DECL_P (decl2));
1313 install_var_local (decl2, ctx);
1314 }
1315 install_var_local (decl, ctx);
1316 break;
1317 }
1318 if (DECL_P (decl))
1319 {
1320 if (DECL_SIZE (decl)
1321 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1322 {
1323 tree decl2 = DECL_VALUE_EXPR (decl);
1324 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1325 decl2 = TREE_OPERAND (decl2, 0);
1326 gcc_assert (DECL_P (decl2));
1327 install_var_field (decl2, true, 3, ctx);
1328 install_var_local (decl2, ctx);
1329 install_var_local (decl, ctx);
1330 }
1331 else
1332 {
1333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1334 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1335 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_field (decl, true, 7, ctx);
1338 else
1339 install_var_field (decl, true, 3, ctx);
1340 if (is_gimple_omp_offloaded (ctx->stmt)
1341 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1342 install_var_local (decl, ctx);
1343 }
1344 }
1345 else
1346 {
1347 tree base = get_base_address (decl);
1348 tree nc = OMP_CLAUSE_CHAIN (c);
1349 if (DECL_P (base)
1350 && nc != NULL_TREE
1351 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1352 && OMP_CLAUSE_DECL (nc) == base
1353 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1354 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1355 {
1356 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1357 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1358 }
1359 else
1360 {
1361 if (ctx->outer)
1362 {
1363 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1364 decl = OMP_CLAUSE_DECL (c);
1365 }
1366 gcc_assert (!splay_tree_lookup (ctx->field_map,
1367 (splay_tree_key) decl));
1368 tree field
1369 = build_decl (OMP_CLAUSE_LOCATION (c),
1370 FIELD_DECL, NULL_TREE, ptr_type_node);
1371 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1372 insert_field_into_struct (ctx->record_type, field);
1373 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1374 (splay_tree_value) field);
1375 }
1376 }
1377 break;
1378
1379 case OMP_CLAUSE__GRIDDIM_:
1380 if (ctx->outer)
1381 {
1382 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1383 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1384 }
1385 break;
1386
1387 case OMP_CLAUSE_NOWAIT:
1388 case OMP_CLAUSE_ORDERED:
1389 case OMP_CLAUSE_COLLAPSE:
1390 case OMP_CLAUSE_UNTIED:
1391 case OMP_CLAUSE_MERGEABLE:
1392 case OMP_CLAUSE_PROC_BIND:
1393 case OMP_CLAUSE_SAFELEN:
1394 case OMP_CLAUSE_SIMDLEN:
1395 case OMP_CLAUSE_THREADS:
1396 case OMP_CLAUSE_SIMD:
1397 case OMP_CLAUSE_NOGROUP:
1398 case OMP_CLAUSE_DEFAULTMAP:
1399 case OMP_CLAUSE_ASYNC:
1400 case OMP_CLAUSE_WAIT:
1401 case OMP_CLAUSE_GANG:
1402 case OMP_CLAUSE_WORKER:
1403 case OMP_CLAUSE_VECTOR:
1404 case OMP_CLAUSE_INDEPENDENT:
1405 case OMP_CLAUSE_AUTO:
1406 case OMP_CLAUSE_SEQ:
1407 case OMP_CLAUSE_TILE:
1408 case OMP_CLAUSE__SIMT_:
1409 case OMP_CLAUSE_DEFAULT:
1410 case OMP_CLAUSE_NONTEMPORAL:
1411 case OMP_CLAUSE_IF_PRESENT:
1412 case OMP_CLAUSE_FINALIZE:
1413 case OMP_CLAUSE_TASK_REDUCTION:
1414 break;
1415
1416 case OMP_CLAUSE_ALIGNED:
1417 decl = OMP_CLAUSE_DECL (c);
1418 if (is_global_var (decl)
1419 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1420 install_var_local (decl, ctx);
1421 break;
1422
1423 case OMP_CLAUSE__CONDTEMP_:
1424 decl = OMP_CLAUSE_DECL (c);
1425 if (is_parallel_ctx (ctx))
1426 {
1427 install_var_field (decl, false, 3, ctx);
1428 install_var_local (decl, ctx);
1429 }
1430 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1431 && (gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
1432 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1433 install_var_local (decl, ctx);
1434 break;
1435
1436 case OMP_CLAUSE__CACHE_:
1437 default:
1438 gcc_unreachable ();
1439 }
1440 }
1441
1442 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1443 {
1444 switch (OMP_CLAUSE_CODE (c))
1445 {
1446 case OMP_CLAUSE_LASTPRIVATE:
1447 /* Let the corresponding firstprivate clause create
1448 the variable. */
1449 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1450 scan_array_reductions = true;
1451 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1452 break;
1453 /* FALLTHRU */
1454
1455 case OMP_CLAUSE_FIRSTPRIVATE:
1456 case OMP_CLAUSE_PRIVATE:
1457 case OMP_CLAUSE_LINEAR:
1458 case OMP_CLAUSE_IS_DEVICE_PTR:
1459 decl = OMP_CLAUSE_DECL (c);
1460 if (is_variable_sized (decl))
1461 {
1462 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1463 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1464 && is_gimple_omp_offloaded (ctx->stmt))
1465 {
1466 tree decl2 = DECL_VALUE_EXPR (decl);
1467 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1468 decl2 = TREE_OPERAND (decl2, 0);
1469 gcc_assert (DECL_P (decl2));
1470 install_var_local (decl2, ctx);
1471 fixup_remapped_decl (decl2, ctx, false);
1472 }
1473 install_var_local (decl, ctx);
1474 }
1475 fixup_remapped_decl (decl, ctx,
1476 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1477 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1478 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1479 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1480 scan_array_reductions = true;
1481 break;
1482
1483 case OMP_CLAUSE_REDUCTION:
1484 case OMP_CLAUSE_IN_REDUCTION:
1485 decl = OMP_CLAUSE_DECL (c);
1486 if (TREE_CODE (decl) != MEM_REF)
1487 {
1488 if (is_variable_sized (decl))
1489 install_var_local (decl, ctx);
1490 fixup_remapped_decl (decl, ctx, false);
1491 }
1492 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1493 scan_array_reductions = true;
1494 break;
1495
1496 case OMP_CLAUSE_TASK_REDUCTION:
1497 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1498 scan_array_reductions = true;
1499 break;
1500
1501 case OMP_CLAUSE_SHARED:
1502 /* Ignore shared directives in teams construct inside of
1503 target construct. */
1504 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1505 && !is_host_teams_ctx (ctx))
1506 break;
1507 decl = OMP_CLAUSE_DECL (c);
1508 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1509 break;
1510 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1511 {
1512 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1513 ctx->outer)))
1514 break;
1515 bool by_ref = use_pointer_for_field (decl, ctx);
1516 install_var_field (decl, by_ref, 11, ctx);
1517 break;
1518 }
1519 fixup_remapped_decl (decl, ctx, false);
1520 break;
1521
1522 case OMP_CLAUSE_MAP:
1523 if (!is_gimple_omp_offloaded (ctx->stmt))
1524 break;
1525 decl = OMP_CLAUSE_DECL (c);
1526 if (DECL_P (decl)
1527 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1528 && (OMP_CLAUSE_MAP_KIND (c)
1529 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1530 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1531 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1532 && varpool_node::get_create (decl)->offloadable)
1533 break;
1534 if (DECL_P (decl))
1535 {
1536 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1537 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1538 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1539 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1540 {
1541 tree new_decl = lookup_decl (decl, ctx);
1542 TREE_TYPE (new_decl)
1543 = remap_type (TREE_TYPE (decl), &ctx->cb);
1544 }
1545 else if (DECL_SIZE (decl)
1546 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1547 {
1548 tree decl2 = DECL_VALUE_EXPR (decl);
1549 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1550 decl2 = TREE_OPERAND (decl2, 0);
1551 gcc_assert (DECL_P (decl2));
1552 fixup_remapped_decl (decl2, ctx, false);
1553 fixup_remapped_decl (decl, ctx, true);
1554 }
1555 else
1556 fixup_remapped_decl (decl, ctx, false);
1557 }
1558 break;
1559
1560 case OMP_CLAUSE_COPYPRIVATE:
1561 case OMP_CLAUSE_COPYIN:
1562 case OMP_CLAUSE_DEFAULT:
1563 case OMP_CLAUSE_IF:
1564 case OMP_CLAUSE_NUM_THREADS:
1565 case OMP_CLAUSE_NUM_TEAMS:
1566 case OMP_CLAUSE_THREAD_LIMIT:
1567 case OMP_CLAUSE_DEVICE:
1568 case OMP_CLAUSE_SCHEDULE:
1569 case OMP_CLAUSE_DIST_SCHEDULE:
1570 case OMP_CLAUSE_NOWAIT:
1571 case OMP_CLAUSE_ORDERED:
1572 case OMP_CLAUSE_COLLAPSE:
1573 case OMP_CLAUSE_UNTIED:
1574 case OMP_CLAUSE_FINAL:
1575 case OMP_CLAUSE_MERGEABLE:
1576 case OMP_CLAUSE_PROC_BIND:
1577 case OMP_CLAUSE_SAFELEN:
1578 case OMP_CLAUSE_SIMDLEN:
1579 case OMP_CLAUSE_ALIGNED:
1580 case OMP_CLAUSE_DEPEND:
1581 case OMP_CLAUSE__LOOPTEMP_:
1582 case OMP_CLAUSE__REDUCTEMP_:
1583 case OMP_CLAUSE_TO:
1584 case OMP_CLAUSE_FROM:
1585 case OMP_CLAUSE_PRIORITY:
1586 case OMP_CLAUSE_GRAINSIZE:
1587 case OMP_CLAUSE_NUM_TASKS:
1588 case OMP_CLAUSE_THREADS:
1589 case OMP_CLAUSE_SIMD:
1590 case OMP_CLAUSE_NOGROUP:
1591 case OMP_CLAUSE_DEFAULTMAP:
1592 case OMP_CLAUSE_USE_DEVICE_PTR:
1593 case OMP_CLAUSE_NONTEMPORAL:
1594 case OMP_CLAUSE_ASYNC:
1595 case OMP_CLAUSE_WAIT:
1596 case OMP_CLAUSE_NUM_GANGS:
1597 case OMP_CLAUSE_NUM_WORKERS:
1598 case OMP_CLAUSE_VECTOR_LENGTH:
1599 case OMP_CLAUSE_GANG:
1600 case OMP_CLAUSE_WORKER:
1601 case OMP_CLAUSE_VECTOR:
1602 case OMP_CLAUSE_INDEPENDENT:
1603 case OMP_CLAUSE_AUTO:
1604 case OMP_CLAUSE_SEQ:
1605 case OMP_CLAUSE_TILE:
1606 case OMP_CLAUSE__GRIDDIM_:
1607 case OMP_CLAUSE__SIMT_:
1608 case OMP_CLAUSE_IF_PRESENT:
1609 case OMP_CLAUSE_FINALIZE:
1610 case OMP_CLAUSE__CONDTEMP_:
1611 break;
1612
1613 case OMP_CLAUSE__CACHE_:
1614 default:
1615 gcc_unreachable ();
1616 }
1617 }
1618
1619 gcc_checking_assert (!scan_array_reductions
1620 || !is_gimple_omp_oacc (ctx->stmt));
1621 if (scan_array_reductions)
1622 {
1623 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1624 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1625 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1626 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1627 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1628 {
1629 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1630 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1631 }
1632 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1633 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1634 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1635 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1636 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1637 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1638 }
1639 }
1640
1641 /* Create a new name for omp child function. Returns an identifier. */
1642
1643 static tree
1644 create_omp_child_function_name (bool task_copy)
1645 {
1646 return clone_function_name_numbered (current_function_decl,
1647 task_copy ? "_omp_cpyfn" : "_omp_fn");
1648 }
1649
1650 /* Return true if CTX may belong to offloaded code: either if current function
1651 is offloaded, or any enclosing context corresponds to a target region. */
1652
1653 static bool
1654 omp_maybe_offloaded_ctx (omp_context *ctx)
1655 {
1656 if (cgraph_node::get (current_function_decl)->offloadable)
1657 return true;
1658 for (; ctx; ctx = ctx->outer)
1659 if (is_gimple_omp_offloaded (ctx->stmt))
1660 return true;
1661 return false;
1662 }
1663
1664 /* Build a decl for the omp child function. It'll not contain a body
1665 yet, just the bare decl. */
1666
1667 static void
1668 create_omp_child_function (omp_context *ctx, bool task_copy)
1669 {
1670 tree decl, type, name, t;
1671
1672 name = create_omp_child_function_name (task_copy);
1673 if (task_copy)
1674 type = build_function_type_list (void_type_node, ptr_type_node,
1675 ptr_type_node, NULL_TREE);
1676 else
1677 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1678
1679 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1680
1681 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1682 || !task_copy);
1683 if (!task_copy)
1684 ctx->cb.dst_fn = decl;
1685 else
1686 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1687
1688 TREE_STATIC (decl) = 1;
1689 TREE_USED (decl) = 1;
1690 DECL_ARTIFICIAL (decl) = 1;
1691 DECL_IGNORED_P (decl) = 0;
1692 TREE_PUBLIC (decl) = 0;
1693 DECL_UNINLINABLE (decl) = 1;
1694 DECL_EXTERNAL (decl) = 0;
1695 DECL_CONTEXT (decl) = NULL_TREE;
1696 DECL_INITIAL (decl) = make_node (BLOCK);
1697 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1698 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1699 /* Remove omp declare simd attribute from the new attributes. */
1700 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1701 {
1702 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1703 a = a2;
1704 a = TREE_CHAIN (a);
1705 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1706 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1707 *p = TREE_CHAIN (*p);
1708 else
1709 {
1710 tree chain = TREE_CHAIN (*p);
1711 *p = copy_node (*p);
1712 p = &TREE_CHAIN (*p);
1713 *p = chain;
1714 }
1715 }
1716 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1717 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1718 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1719 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1720 DECL_FUNCTION_VERSIONED (decl)
1721 = DECL_FUNCTION_VERSIONED (current_function_decl);
1722
1723 if (omp_maybe_offloaded_ctx (ctx))
1724 {
1725 cgraph_node::get_create (decl)->offloadable = 1;
1726 if (ENABLE_OFFLOADING)
1727 g->have_offload = true;
1728 }
1729
1730 if (cgraph_node::get_create (decl)->offloadable
1731 && !lookup_attribute ("omp declare target",
1732 DECL_ATTRIBUTES (current_function_decl)))
1733 {
1734 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1735 ? "omp target entrypoint"
1736 : "omp declare target");
1737 DECL_ATTRIBUTES (decl)
1738 = tree_cons (get_identifier (target_attr),
1739 NULL_TREE, DECL_ATTRIBUTES (decl));
1740 }
1741
1742 t = build_decl (DECL_SOURCE_LOCATION (decl),
1743 RESULT_DECL, NULL_TREE, void_type_node);
1744 DECL_ARTIFICIAL (t) = 1;
1745 DECL_IGNORED_P (t) = 1;
1746 DECL_CONTEXT (t) = decl;
1747 DECL_RESULT (decl) = t;
1748
1749 tree data_name = get_identifier (".omp_data_i");
1750 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1751 ptr_type_node);
1752 DECL_ARTIFICIAL (t) = 1;
1753 DECL_NAMELESS (t) = 1;
1754 DECL_ARG_TYPE (t) = ptr_type_node;
1755 DECL_CONTEXT (t) = current_function_decl;
1756 TREE_USED (t) = 1;
1757 TREE_READONLY (t) = 1;
1758 DECL_ARGUMENTS (decl) = t;
1759 if (!task_copy)
1760 ctx->receiver_decl = t;
1761 else
1762 {
1763 t = build_decl (DECL_SOURCE_LOCATION (decl),
1764 PARM_DECL, get_identifier (".omp_data_o"),
1765 ptr_type_node);
1766 DECL_ARTIFICIAL (t) = 1;
1767 DECL_NAMELESS (t) = 1;
1768 DECL_ARG_TYPE (t) = ptr_type_node;
1769 DECL_CONTEXT (t) = current_function_decl;
1770 TREE_USED (t) = 1;
1771 TREE_ADDRESSABLE (t) = 1;
1772 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1773 DECL_ARGUMENTS (decl) = t;
1774 }
1775
1776 /* Allocate memory for the function structure. The call to
1777 allocate_struct_function clobbers CFUN, so we need to restore
1778 it afterward. */
1779 push_struct_function (decl);
1780 cfun->function_end_locus = gimple_location (ctx->stmt);
1781 init_tree_ssa (cfun);
1782 pop_cfun ();
1783 }
1784
1785 /* Callback for walk_gimple_seq. Check if combined parallel
1786 contains gimple_omp_for_combined_into_p OMP_FOR. */
1787
1788 tree
1789 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1790 bool *handled_ops_p,
1791 struct walk_stmt_info *wi)
1792 {
1793 gimple *stmt = gsi_stmt (*gsi_p);
1794
1795 *handled_ops_p = true;
1796 switch (gimple_code (stmt))
1797 {
1798 WALK_SUBSTMTS;
1799
1800 case GIMPLE_OMP_FOR:
1801 if (gimple_omp_for_combined_into_p (stmt)
1802 && gimple_omp_for_kind (stmt)
1803 == *(const enum gf_mask *) (wi->info))
1804 {
1805 wi->info = stmt;
1806 return integer_zero_node;
1807 }
1808 break;
1809 default:
1810 break;
1811 }
1812 return NULL;
1813 }
1814
1815 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1816
1817 static void
1818 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1819 omp_context *outer_ctx)
1820 {
1821 struct walk_stmt_info wi;
1822
1823 memset (&wi, 0, sizeof (wi));
1824 wi.val_only = true;
1825 wi.info = (void *) &msk;
1826 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1827 if (wi.info != (void *) &msk)
1828 {
1829 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1830 struct omp_for_data fd;
1831 omp_extract_for_data (for_stmt, &fd, NULL);
1832 /* We need two temporaries with fd.loop.v type (istart/iend)
1833 and then (fd.collapse - 1) temporaries with the same
1834 type for count2 ... countN-1 vars if not constant. */
1835 size_t count = 2, i;
1836 tree type = fd.iter_type;
1837 if (fd.collapse > 1
1838 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1839 {
1840 count += fd.collapse - 1;
1841 /* If there are lastprivate clauses on the inner
1842 GIMPLE_OMP_FOR, add one more temporaries for the total number
1843 of iterations (product of count1 ... countN-1). */
1844 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1845 OMP_CLAUSE_LASTPRIVATE))
1846 count++;
1847 else if (msk == GF_OMP_FOR_KIND_FOR
1848 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1849 OMP_CLAUSE_LASTPRIVATE))
1850 count++;
1851 }
1852 for (i = 0; i < count; i++)
1853 {
1854 tree temp = create_tmp_var (type);
1855 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1856 insert_decl_map (&outer_ctx->cb, temp, temp);
1857 OMP_CLAUSE_DECL (c) = temp;
1858 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1859 gimple_omp_taskreg_set_clauses (stmt, c);
1860 }
1861 }
1862 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1863 && omp_find_clause (gimple_omp_task_clauses (stmt),
1864 OMP_CLAUSE_REDUCTION))
1865 {
1866 tree type = build_pointer_type (pointer_sized_int_node);
1867 tree temp = create_tmp_var (type);
1868 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1869 insert_decl_map (&outer_ctx->cb, temp, temp);
1870 OMP_CLAUSE_DECL (c) = temp;
1871 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1872 gimple_omp_task_set_clauses (stmt, c);
1873 }
1874 }
1875
1876 /* Scan an OpenMP parallel directive. */
1877
1878 static void
1879 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1880 {
1881 omp_context *ctx;
1882 tree name;
1883 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1884
1885 /* Ignore parallel directives with empty bodies, unless there
1886 are copyin clauses. */
1887 if (optimize > 0
1888 && empty_body_p (gimple_omp_body (stmt))
1889 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1890 OMP_CLAUSE_COPYIN) == NULL)
1891 {
1892 gsi_replace (gsi, gimple_build_nop (), false);
1893 return;
1894 }
1895
1896 if (gimple_omp_parallel_combined_p (stmt))
1897 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1898 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1899 OMP_CLAUSE_REDUCTION);
1900 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1901 if (OMP_CLAUSE_REDUCTION_TASK (c))
1902 {
1903 tree type = build_pointer_type (pointer_sized_int_node);
1904 tree temp = create_tmp_var (type);
1905 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1906 if (outer_ctx)
1907 insert_decl_map (&outer_ctx->cb, temp, temp);
1908 OMP_CLAUSE_DECL (c) = temp;
1909 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1910 gimple_omp_parallel_set_clauses (stmt, c);
1911 break;
1912 }
1913 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1914 break;
1915
1916 ctx = new_omp_context (stmt, outer_ctx);
1917 taskreg_contexts.safe_push (ctx);
1918 if (taskreg_nesting_level > 1)
1919 ctx->is_nested = true;
1920 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1921 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1922 name = create_tmp_var_name (".omp_data_s");
1923 name = build_decl (gimple_location (stmt),
1924 TYPE_DECL, name, ctx->record_type);
1925 DECL_ARTIFICIAL (name) = 1;
1926 DECL_NAMELESS (name) = 1;
1927 TYPE_NAME (ctx->record_type) = name;
1928 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1929 if (!gimple_omp_parallel_grid_phony (stmt))
1930 {
1931 create_omp_child_function (ctx, false);
1932 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1933 }
1934
1935 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1936 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1937
1938 if (TYPE_FIELDS (ctx->record_type) == NULL)
1939 ctx->record_type = ctx->receiver_decl = NULL;
1940 }
1941
1942 /* Scan an OpenMP task directive. */
1943
1944 static void
1945 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1946 {
1947 omp_context *ctx;
1948 tree name, t;
1949 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1950
1951 /* Ignore task directives with empty bodies, unless they have depend
1952 clause. */
1953 if (optimize > 0
1954 && gimple_omp_body (stmt)
1955 && empty_body_p (gimple_omp_body (stmt))
1956 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1957 {
1958 gsi_replace (gsi, gimple_build_nop (), false);
1959 return;
1960 }
1961
1962 if (gimple_omp_task_taskloop_p (stmt))
1963 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1964
1965 ctx = new_omp_context (stmt, outer_ctx);
1966
1967 if (gimple_omp_task_taskwait_p (stmt))
1968 {
1969 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1970 return;
1971 }
1972
1973 taskreg_contexts.safe_push (ctx);
1974 if (taskreg_nesting_level > 1)
1975 ctx->is_nested = true;
1976 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1977 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1978 name = create_tmp_var_name (".omp_data_s");
1979 name = build_decl (gimple_location (stmt),
1980 TYPE_DECL, name, ctx->record_type);
1981 DECL_ARTIFICIAL (name) = 1;
1982 DECL_NAMELESS (name) = 1;
1983 TYPE_NAME (ctx->record_type) = name;
1984 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1985 create_omp_child_function (ctx, false);
1986 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1987
1988 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1989
1990 if (ctx->srecord_type)
1991 {
1992 name = create_tmp_var_name (".omp_data_a");
1993 name = build_decl (gimple_location (stmt),
1994 TYPE_DECL, name, ctx->srecord_type);
1995 DECL_ARTIFICIAL (name) = 1;
1996 DECL_NAMELESS (name) = 1;
1997 TYPE_NAME (ctx->srecord_type) = name;
1998 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1999 create_omp_child_function (ctx, true);
2000 }
2001
2002 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2003
2004 if (TYPE_FIELDS (ctx->record_type) == NULL)
2005 {
2006 ctx->record_type = ctx->receiver_decl = NULL;
2007 t = build_int_cst (long_integer_type_node, 0);
2008 gimple_omp_task_set_arg_size (stmt, t);
2009 t = build_int_cst (long_integer_type_node, 1);
2010 gimple_omp_task_set_arg_align (stmt, t);
2011 }
2012 }
2013
2014 /* Helper function for finish_taskreg_scan, called through walk_tree.
2015 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2016 tree, replace it in the expression. */
2017
2018 static tree
2019 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2020 {
2021 if (VAR_P (*tp))
2022 {
2023 omp_context *ctx = (omp_context *) data;
2024 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2025 if (t != *tp)
2026 {
2027 if (DECL_HAS_VALUE_EXPR_P (t))
2028 t = unshare_expr (DECL_VALUE_EXPR (t));
2029 *tp = t;
2030 }
2031 *walk_subtrees = 0;
2032 }
2033 else if (IS_TYPE_OR_DECL_P (*tp))
2034 *walk_subtrees = 0;
2035 return NULL_TREE;
2036 }
2037
2038 /* If any decls have been made addressable during scan_omp,
2039 adjust their fields if needed, and layout record types
2040 of parallel/task constructs. */
2041
2042 static void
2043 finish_taskreg_scan (omp_context *ctx)
2044 {
2045 if (ctx->record_type == NULL_TREE)
2046 return;
2047
2048 /* If any task_shared_vars were needed, verify all
2049 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2050 statements if use_pointer_for_field hasn't changed
2051 because of that. If it did, update field types now. */
2052 if (task_shared_vars)
2053 {
2054 tree c;
2055
2056 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2057 c; c = OMP_CLAUSE_CHAIN (c))
2058 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2059 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2060 {
2061 tree decl = OMP_CLAUSE_DECL (c);
2062
2063 /* Global variables don't need to be copied,
2064 the receiver side will use them directly. */
2065 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2066 continue;
2067 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2068 || !use_pointer_for_field (decl, ctx))
2069 continue;
2070 tree field = lookup_field (decl, ctx);
2071 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2072 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2073 continue;
2074 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2075 TREE_THIS_VOLATILE (field) = 0;
2076 DECL_USER_ALIGN (field) = 0;
2077 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2078 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2079 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2080 if (ctx->srecord_type)
2081 {
2082 tree sfield = lookup_sfield (decl, ctx);
2083 TREE_TYPE (sfield) = TREE_TYPE (field);
2084 TREE_THIS_VOLATILE (sfield) = 0;
2085 DECL_USER_ALIGN (sfield) = 0;
2086 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2087 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2088 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2089 }
2090 }
2091 }
2092
2093 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2094 {
2095 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2096 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2097 if (c)
2098 {
2099 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2100 expects to find it at the start of data. */
2101 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2102 tree *p = &TYPE_FIELDS (ctx->record_type);
2103 while (*p)
2104 if (*p == f)
2105 {
2106 *p = DECL_CHAIN (*p);
2107 break;
2108 }
2109 else
2110 p = &DECL_CHAIN (*p);
2111 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2112 TYPE_FIELDS (ctx->record_type) = f;
2113 }
2114 layout_type (ctx->record_type);
2115 fixup_child_record_type (ctx);
2116 }
2117 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2118 {
2119 layout_type (ctx->record_type);
2120 fixup_child_record_type (ctx);
2121 }
2122 else
2123 {
2124 location_t loc = gimple_location (ctx->stmt);
2125 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2126 /* Move VLA fields to the end. */
2127 p = &TYPE_FIELDS (ctx->record_type);
2128 while (*p)
2129 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2130 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2131 {
2132 *q = *p;
2133 *p = TREE_CHAIN (*p);
2134 TREE_CHAIN (*q) = NULL_TREE;
2135 q = &TREE_CHAIN (*q);
2136 }
2137 else
2138 p = &DECL_CHAIN (*p);
2139 *p = vla_fields;
2140 if (gimple_omp_task_taskloop_p (ctx->stmt))
2141 {
2142 /* Move fields corresponding to first and second _looptemp_
2143 clause first. There are filled by GOMP_taskloop
2144 and thus need to be in specific positions. */
2145 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2146 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2147 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2148 OMP_CLAUSE__LOOPTEMP_);
2149 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2150 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2151 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2152 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2153 p = &TYPE_FIELDS (ctx->record_type);
2154 while (*p)
2155 if (*p == f1 || *p == f2 || *p == f3)
2156 *p = DECL_CHAIN (*p);
2157 else
2158 p = &DECL_CHAIN (*p);
2159 DECL_CHAIN (f1) = f2;
2160 if (c3)
2161 {
2162 DECL_CHAIN (f2) = f3;
2163 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2164 }
2165 else
2166 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2167 TYPE_FIELDS (ctx->record_type) = f1;
2168 if (ctx->srecord_type)
2169 {
2170 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2171 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2172 if (c3)
2173 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2174 p = &TYPE_FIELDS (ctx->srecord_type);
2175 while (*p)
2176 if (*p == f1 || *p == f2 || *p == f3)
2177 *p = DECL_CHAIN (*p);
2178 else
2179 p = &DECL_CHAIN (*p);
2180 DECL_CHAIN (f1) = f2;
2181 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2182 if (c3)
2183 {
2184 DECL_CHAIN (f2) = f3;
2185 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2186 }
2187 else
2188 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2189 TYPE_FIELDS (ctx->srecord_type) = f1;
2190 }
2191 }
2192 layout_type (ctx->record_type);
2193 fixup_child_record_type (ctx);
2194 if (ctx->srecord_type)
2195 layout_type (ctx->srecord_type);
2196 tree t = fold_convert_loc (loc, long_integer_type_node,
2197 TYPE_SIZE_UNIT (ctx->record_type));
2198 if (TREE_CODE (t) != INTEGER_CST)
2199 {
2200 t = unshare_expr (t);
2201 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2202 }
2203 gimple_omp_task_set_arg_size (ctx->stmt, t);
2204 t = build_int_cst (long_integer_type_node,
2205 TYPE_ALIGN_UNIT (ctx->record_type));
2206 gimple_omp_task_set_arg_align (ctx->stmt, t);
2207 }
2208 }
2209
2210 /* Find the enclosing offload context. */
2211
2212 static omp_context *
2213 enclosing_target_ctx (omp_context *ctx)
2214 {
2215 for (; ctx; ctx = ctx->outer)
2216 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2217 break;
2218
2219 return ctx;
2220 }
2221
2222 /* Return true if ctx is part of an oacc kernels region. */
2223
2224 static bool
2225 ctx_in_oacc_kernels_region (omp_context *ctx)
2226 {
2227 for (;ctx != NULL; ctx = ctx->outer)
2228 {
2229 gimple *stmt = ctx->stmt;
2230 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2231 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2232 return true;
2233 }
2234
2235 return false;
2236 }
2237
2238 /* Check the parallelism clauses inside a kernels regions.
2239 Until kernels handling moves to use the same loop indirection
2240 scheme as parallel, we need to do this checking early. */
2241
2242 static unsigned
2243 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2244 {
2245 bool checking = true;
2246 unsigned outer_mask = 0;
2247 unsigned this_mask = 0;
2248 bool has_seq = false, has_auto = false;
2249
2250 if (ctx->outer)
2251 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2252 if (!stmt)
2253 {
2254 checking = false;
2255 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2256 return outer_mask;
2257 stmt = as_a <gomp_for *> (ctx->stmt);
2258 }
2259
2260 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2261 {
2262 switch (OMP_CLAUSE_CODE (c))
2263 {
2264 case OMP_CLAUSE_GANG:
2265 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2266 break;
2267 case OMP_CLAUSE_WORKER:
2268 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2269 break;
2270 case OMP_CLAUSE_VECTOR:
2271 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2272 break;
2273 case OMP_CLAUSE_SEQ:
2274 has_seq = true;
2275 break;
2276 case OMP_CLAUSE_AUTO:
2277 has_auto = true;
2278 break;
2279 default:
2280 break;
2281 }
2282 }
2283
2284 if (checking)
2285 {
2286 if (has_seq && (this_mask || has_auto))
2287 error_at (gimple_location (stmt), "%<seq%> overrides other"
2288 " OpenACC loop specifiers");
2289 else if (has_auto && this_mask)
2290 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2291 " OpenACC loop specifiers");
2292
2293 if (this_mask & outer_mask)
2294 error_at (gimple_location (stmt), "inner loop uses same"
2295 " OpenACC parallelism as containing loop");
2296 }
2297
2298 return outer_mask | this_mask;
2299 }
2300
2301 /* Scan a GIMPLE_OMP_FOR. */
2302
2303 static omp_context *
2304 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2305 {
2306 omp_context *ctx;
2307 size_t i;
2308 tree clauses = gimple_omp_for_clauses (stmt);
2309
2310 ctx = new_omp_context (stmt, outer_ctx);
2311
2312 if (is_gimple_omp_oacc (stmt))
2313 {
2314 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2315
2316 if (!tgt || is_oacc_parallel (tgt))
2317 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2318 {
2319 char const *check = NULL;
2320
2321 switch (OMP_CLAUSE_CODE (c))
2322 {
2323 case OMP_CLAUSE_GANG:
2324 check = "gang";
2325 break;
2326
2327 case OMP_CLAUSE_WORKER:
2328 check = "worker";
2329 break;
2330
2331 case OMP_CLAUSE_VECTOR:
2332 check = "vector";
2333 break;
2334
2335 default:
2336 break;
2337 }
2338
2339 if (check && OMP_CLAUSE_OPERAND (c, 0))
2340 error_at (gimple_location (stmt),
2341 "argument not permitted on %qs clause in"
2342 " OpenACC %<parallel%>", check);
2343 }
2344
2345 if (tgt && is_oacc_kernels (tgt))
2346 {
2347 /* Strip out reductions, as they are not handled yet. */
2348 tree *prev_ptr = &clauses;
2349
2350 while (tree probe = *prev_ptr)
2351 {
2352 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2353
2354 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2355 *prev_ptr = *next_ptr;
2356 else
2357 prev_ptr = next_ptr;
2358 }
2359
2360 gimple_omp_for_set_clauses (stmt, clauses);
2361 check_oacc_kernel_gwv (stmt, ctx);
2362 }
2363 }
2364
2365 scan_sharing_clauses (clauses, ctx);
2366
2367 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2368 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2369 {
2370 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2371 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2372 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2373 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2374 }
2375 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2376 return ctx;
2377 }
2378
2379 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2380
2381 static void
2382 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2383 omp_context *outer_ctx)
2384 {
2385 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2386 gsi_replace (gsi, bind, false);
2387 gimple_seq seq = NULL;
2388 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2389 tree cond = create_tmp_var_raw (integer_type_node);
2390 DECL_CONTEXT (cond) = current_function_decl;
2391 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2392 gimple_bind_set_vars (bind, cond);
2393 gimple_call_set_lhs (g, cond);
2394 gimple_seq_add_stmt (&seq, g);
2395 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2396 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2397 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2398 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2399 gimple_seq_add_stmt (&seq, g);
2400 g = gimple_build_label (lab1);
2401 gimple_seq_add_stmt (&seq, g);
2402 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2403 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2404 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2405 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2406 gimple_omp_for_set_clauses (new_stmt, clause);
2407 gimple_seq_add_stmt (&seq, new_stmt);
2408 g = gimple_build_goto (lab3);
2409 gimple_seq_add_stmt (&seq, g);
2410 g = gimple_build_label (lab2);
2411 gimple_seq_add_stmt (&seq, g);
2412 gimple_seq_add_stmt (&seq, stmt);
2413 g = gimple_build_label (lab3);
2414 gimple_seq_add_stmt (&seq, g);
2415 gimple_bind_set_body (bind, seq);
2416 update_stmt (bind);
2417 scan_omp_for (new_stmt, outer_ctx);
2418 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2419 }
2420
2421 /* Scan an OpenMP sections directive. */
2422
2423 static void
2424 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2425 {
2426 omp_context *ctx;
2427
2428 ctx = new_omp_context (stmt, outer_ctx);
2429 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2430 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2431 }
2432
2433 /* Scan an OpenMP single directive. */
2434
2435 static void
2436 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2437 {
2438 omp_context *ctx;
2439 tree name;
2440
2441 ctx = new_omp_context (stmt, outer_ctx);
2442 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2443 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2444 name = create_tmp_var_name (".omp_copy_s");
2445 name = build_decl (gimple_location (stmt),
2446 TYPE_DECL, name, ctx->record_type);
2447 TYPE_NAME (ctx->record_type) = name;
2448
2449 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2450 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2451
2452 if (TYPE_FIELDS (ctx->record_type) == NULL)
2453 ctx->record_type = NULL;
2454 else
2455 layout_type (ctx->record_type);
2456 }
2457
2458 /* Scan a GIMPLE_OMP_TARGET. */
2459
2460 static void
2461 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2462 {
2463 omp_context *ctx;
2464 tree name;
2465 bool offloaded = is_gimple_omp_offloaded (stmt);
2466 tree clauses = gimple_omp_target_clauses (stmt);
2467
2468 ctx = new_omp_context (stmt, outer_ctx);
2469 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2470 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2471 name = create_tmp_var_name (".omp_data_t");
2472 name = build_decl (gimple_location (stmt),
2473 TYPE_DECL, name, ctx->record_type);
2474 DECL_ARTIFICIAL (name) = 1;
2475 DECL_NAMELESS (name) = 1;
2476 TYPE_NAME (ctx->record_type) = name;
2477 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2478
2479 if (offloaded)
2480 {
2481 create_omp_child_function (ctx, false);
2482 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2483 }
2484
2485 scan_sharing_clauses (clauses, ctx);
2486 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2487
2488 if (TYPE_FIELDS (ctx->record_type) == NULL)
2489 ctx->record_type = ctx->receiver_decl = NULL;
2490 else
2491 {
2492 TYPE_FIELDS (ctx->record_type)
2493 = nreverse (TYPE_FIELDS (ctx->record_type));
2494 if (flag_checking)
2495 {
2496 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2497 for (tree field = TYPE_FIELDS (ctx->record_type);
2498 field;
2499 field = DECL_CHAIN (field))
2500 gcc_assert (DECL_ALIGN (field) == align);
2501 }
2502 layout_type (ctx->record_type);
2503 if (offloaded)
2504 fixup_child_record_type (ctx);
2505 }
2506 }
2507
2508 /* Scan an OpenMP teams directive. */
2509
2510 static void
2511 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2512 {
2513 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2514
2515 if (!gimple_omp_teams_host (stmt))
2516 {
2517 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2518 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2519 return;
2520 }
2521 taskreg_contexts.safe_push (ctx);
2522 gcc_assert (taskreg_nesting_level == 1);
2523 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2524 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2525 tree name = create_tmp_var_name (".omp_data_s");
2526 name = build_decl (gimple_location (stmt),
2527 TYPE_DECL, name, ctx->record_type);
2528 DECL_ARTIFICIAL (name) = 1;
2529 DECL_NAMELESS (name) = 1;
2530 TYPE_NAME (ctx->record_type) = name;
2531 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2532 create_omp_child_function (ctx, false);
2533 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2534
2535 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2536 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2537
2538 if (TYPE_FIELDS (ctx->record_type) == NULL)
2539 ctx->record_type = ctx->receiver_decl = NULL;
2540 }
2541
2542 /* Check nesting restrictions. */
2543 static bool
2544 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2545 {
2546 tree c;
2547
2548 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2549 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2550 the original copy of its contents. */
2551 return true;
2552
2553 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2554 inside an OpenACC CTX. */
2555 if (!(is_gimple_omp (stmt)
2556 && is_gimple_omp_oacc (stmt))
2557 /* Except for atomic codes that we share with OpenMP. */
2558 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2559 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2560 {
2561 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2562 {
2563 error_at (gimple_location (stmt),
2564 "non-OpenACC construct inside of OpenACC routine");
2565 return false;
2566 }
2567 else
2568 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2569 if (is_gimple_omp (octx->stmt)
2570 && is_gimple_omp_oacc (octx->stmt))
2571 {
2572 error_at (gimple_location (stmt),
2573 "non-OpenACC construct inside of OpenACC region");
2574 return false;
2575 }
2576 }
2577
2578 if (ctx != NULL)
2579 {
2580 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2581 && ctx->outer
2582 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2583 ctx = ctx->outer;
2584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2585 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2586 {
2587 c = NULL_TREE;
2588 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2589 {
2590 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2591 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2592 {
2593 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2594 && (ctx->outer == NULL
2595 || !gimple_omp_for_combined_into_p (ctx->stmt)
2596 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2597 || (gimple_omp_for_kind (ctx->outer->stmt)
2598 != GF_OMP_FOR_KIND_FOR)
2599 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2600 {
2601 error_at (gimple_location (stmt),
2602 "%<ordered simd threads%> must be closely "
2603 "nested inside of %<for simd%> region");
2604 return false;
2605 }
2606 return true;
2607 }
2608 }
2609 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2610 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2611 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2612 return true;
2613 error_at (gimple_location (stmt),
2614 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2615 " or %<#pragma omp atomic%> may not be nested inside"
2616 " %<simd%> region");
2617 return false;
2618 }
2619 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2620 {
2621 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2622 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2623 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2624 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2625 {
2626 error_at (gimple_location (stmt),
2627 "only %<distribute%> or %<parallel%> regions are "
2628 "allowed to be strictly nested inside %<teams%> "
2629 "region");
2630 return false;
2631 }
2632 }
2633 }
2634 switch (gimple_code (stmt))
2635 {
2636 case GIMPLE_OMP_FOR:
2637 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2638 return true;
2639 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2640 {
2641 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2642 {
2643 error_at (gimple_location (stmt),
2644 "%<distribute%> region must be strictly nested "
2645 "inside %<teams%> construct");
2646 return false;
2647 }
2648 return true;
2649 }
2650 /* We split taskloop into task and nested taskloop in it. */
2651 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2652 return true;
2653 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2654 {
2655 bool ok = false;
2656
2657 if (ctx)
2658 switch (gimple_code (ctx->stmt))
2659 {
2660 case GIMPLE_OMP_FOR:
2661 ok = (gimple_omp_for_kind (ctx->stmt)
2662 == GF_OMP_FOR_KIND_OACC_LOOP);
2663 break;
2664
2665 case GIMPLE_OMP_TARGET:
2666 switch (gimple_omp_target_kind (ctx->stmt))
2667 {
2668 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2669 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2670 ok = true;
2671 break;
2672
2673 default:
2674 break;
2675 }
2676
2677 default:
2678 break;
2679 }
2680 else if (oacc_get_fn_attrib (current_function_decl))
2681 ok = true;
2682 if (!ok)
2683 {
2684 error_at (gimple_location (stmt),
2685 "OpenACC loop directive must be associated with"
2686 " an OpenACC compute region");
2687 return false;
2688 }
2689 }
2690 /* FALLTHRU */
2691 case GIMPLE_CALL:
2692 if (is_gimple_call (stmt)
2693 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2694 == BUILT_IN_GOMP_CANCEL
2695 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2696 == BUILT_IN_GOMP_CANCELLATION_POINT))
2697 {
2698 const char *bad = NULL;
2699 const char *kind = NULL;
2700 const char *construct
2701 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2702 == BUILT_IN_GOMP_CANCEL)
2703 ? "#pragma omp cancel"
2704 : "#pragma omp cancellation point";
2705 if (ctx == NULL)
2706 {
2707 error_at (gimple_location (stmt), "orphaned %qs construct",
2708 construct);
2709 return false;
2710 }
2711 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2712 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2713 : 0)
2714 {
2715 case 1:
2716 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2717 bad = "#pragma omp parallel";
2718 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2719 == BUILT_IN_GOMP_CANCEL
2720 && !integer_zerop (gimple_call_arg (stmt, 1)))
2721 ctx->cancellable = true;
2722 kind = "parallel";
2723 break;
2724 case 2:
2725 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2726 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2727 bad = "#pragma omp for";
2728 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2729 == BUILT_IN_GOMP_CANCEL
2730 && !integer_zerop (gimple_call_arg (stmt, 1)))
2731 {
2732 ctx->cancellable = true;
2733 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2734 OMP_CLAUSE_NOWAIT))
2735 warning_at (gimple_location (stmt), 0,
2736 "%<#pragma omp cancel for%> inside "
2737 "%<nowait%> for construct");
2738 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2739 OMP_CLAUSE_ORDERED))
2740 warning_at (gimple_location (stmt), 0,
2741 "%<#pragma omp cancel for%> inside "
2742 "%<ordered%> for construct");
2743 }
2744 kind = "for";
2745 break;
2746 case 4:
2747 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2748 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2749 bad = "#pragma omp sections";
2750 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2751 == BUILT_IN_GOMP_CANCEL
2752 && !integer_zerop (gimple_call_arg (stmt, 1)))
2753 {
2754 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2755 {
2756 ctx->cancellable = true;
2757 if (omp_find_clause (gimple_omp_sections_clauses
2758 (ctx->stmt),
2759 OMP_CLAUSE_NOWAIT))
2760 warning_at (gimple_location (stmt), 0,
2761 "%<#pragma omp cancel sections%> inside "
2762 "%<nowait%> sections construct");
2763 }
2764 else
2765 {
2766 gcc_assert (ctx->outer
2767 && gimple_code (ctx->outer->stmt)
2768 == GIMPLE_OMP_SECTIONS);
2769 ctx->outer->cancellable = true;
2770 if (omp_find_clause (gimple_omp_sections_clauses
2771 (ctx->outer->stmt),
2772 OMP_CLAUSE_NOWAIT))
2773 warning_at (gimple_location (stmt), 0,
2774 "%<#pragma omp cancel sections%> inside "
2775 "%<nowait%> sections construct");
2776 }
2777 }
2778 kind = "sections";
2779 break;
2780 case 8:
2781 if (!is_task_ctx (ctx)
2782 && (!is_taskloop_ctx (ctx)
2783 || ctx->outer == NULL
2784 || !is_task_ctx (ctx->outer)))
2785 bad = "#pragma omp task";
2786 else
2787 {
2788 for (omp_context *octx = ctx->outer;
2789 octx; octx = octx->outer)
2790 {
2791 switch (gimple_code (octx->stmt))
2792 {
2793 case GIMPLE_OMP_TASKGROUP:
2794 break;
2795 case GIMPLE_OMP_TARGET:
2796 if (gimple_omp_target_kind (octx->stmt)
2797 != GF_OMP_TARGET_KIND_REGION)
2798 continue;
2799 /* FALLTHRU */
2800 case GIMPLE_OMP_PARALLEL:
2801 case GIMPLE_OMP_TEAMS:
2802 error_at (gimple_location (stmt),
2803 "%<%s taskgroup%> construct not closely "
2804 "nested inside of %<taskgroup%> region",
2805 construct);
2806 return false;
2807 case GIMPLE_OMP_TASK:
2808 if (gimple_omp_task_taskloop_p (octx->stmt)
2809 && octx->outer
2810 && is_taskloop_ctx (octx->outer))
2811 {
2812 tree clauses
2813 = gimple_omp_for_clauses (octx->outer->stmt);
2814 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2815 break;
2816 }
2817 continue;
2818 default:
2819 continue;
2820 }
2821 break;
2822 }
2823 ctx->cancellable = true;
2824 }
2825 kind = "taskgroup";
2826 break;
2827 default:
2828 error_at (gimple_location (stmt), "invalid arguments");
2829 return false;
2830 }
2831 if (bad)
2832 {
2833 error_at (gimple_location (stmt),
2834 "%<%s %s%> construct not closely nested inside of %qs",
2835 construct, kind, bad);
2836 return false;
2837 }
2838 }
2839 /* FALLTHRU */
2840 case GIMPLE_OMP_SECTIONS:
2841 case GIMPLE_OMP_SINGLE:
2842 for (; ctx != NULL; ctx = ctx->outer)
2843 switch (gimple_code (ctx->stmt))
2844 {
2845 case GIMPLE_OMP_FOR:
2846 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2847 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2848 break;
2849 /* FALLTHRU */
2850 case GIMPLE_OMP_SECTIONS:
2851 case GIMPLE_OMP_SINGLE:
2852 case GIMPLE_OMP_ORDERED:
2853 case GIMPLE_OMP_MASTER:
2854 case GIMPLE_OMP_TASK:
2855 case GIMPLE_OMP_CRITICAL:
2856 if (is_gimple_call (stmt))
2857 {
2858 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2859 != BUILT_IN_GOMP_BARRIER)
2860 return true;
2861 error_at (gimple_location (stmt),
2862 "barrier region may not be closely nested inside "
2863 "of work-sharing, %<critical%>, %<ordered%>, "
2864 "%<master%>, explicit %<task%> or %<taskloop%> "
2865 "region");
2866 return false;
2867 }
2868 error_at (gimple_location (stmt),
2869 "work-sharing region may not be closely nested inside "
2870 "of work-sharing, %<critical%>, %<ordered%>, "
2871 "%<master%>, explicit %<task%> or %<taskloop%> region");
2872 return false;
2873 case GIMPLE_OMP_PARALLEL:
2874 case GIMPLE_OMP_TEAMS:
2875 return true;
2876 case GIMPLE_OMP_TARGET:
2877 if (gimple_omp_target_kind (ctx->stmt)
2878 == GF_OMP_TARGET_KIND_REGION)
2879 return true;
2880 break;
2881 default:
2882 break;
2883 }
2884 break;
2885 case GIMPLE_OMP_MASTER:
2886 for (; ctx != NULL; ctx = ctx->outer)
2887 switch (gimple_code (ctx->stmt))
2888 {
2889 case GIMPLE_OMP_FOR:
2890 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2891 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2892 break;
2893 /* FALLTHRU */
2894 case GIMPLE_OMP_SECTIONS:
2895 case GIMPLE_OMP_SINGLE:
2896 case GIMPLE_OMP_TASK:
2897 error_at (gimple_location (stmt),
2898 "%<master%> region may not be closely nested inside "
2899 "of work-sharing, explicit %<task%> or %<taskloop%> "
2900 "region");
2901 return false;
2902 case GIMPLE_OMP_PARALLEL:
2903 case GIMPLE_OMP_TEAMS:
2904 return true;
2905 case GIMPLE_OMP_TARGET:
2906 if (gimple_omp_target_kind (ctx->stmt)
2907 == GF_OMP_TARGET_KIND_REGION)
2908 return true;
2909 break;
2910 default:
2911 break;
2912 }
2913 break;
2914 case GIMPLE_OMP_TASK:
2915 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2916 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2917 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2918 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2919 {
2920 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2921 error_at (OMP_CLAUSE_LOCATION (c),
2922 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2923 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2924 return false;
2925 }
2926 break;
2927 case GIMPLE_OMP_ORDERED:
2928 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2929 c; c = OMP_CLAUSE_CHAIN (c))
2930 {
2931 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2932 {
2933 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2934 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2935 continue;
2936 }
2937 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2938 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2939 || kind == OMP_CLAUSE_DEPEND_SINK)
2940 {
2941 tree oclause;
2942 /* Look for containing ordered(N) loop. */
2943 if (ctx == NULL
2944 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2945 || (oclause
2946 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2947 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2948 {
2949 error_at (OMP_CLAUSE_LOCATION (c),
2950 "%<ordered%> construct with %<depend%> clause "
2951 "must be closely nested inside an %<ordered%> "
2952 "loop");
2953 return false;
2954 }
2955 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2956 {
2957 error_at (OMP_CLAUSE_LOCATION (c),
2958 "%<ordered%> construct with %<depend%> clause "
2959 "must be closely nested inside a loop with "
2960 "%<ordered%> clause with a parameter");
2961 return false;
2962 }
2963 }
2964 else
2965 {
2966 error_at (OMP_CLAUSE_LOCATION (c),
2967 "invalid depend kind in omp %<ordered%> %<depend%>");
2968 return false;
2969 }
2970 }
2971 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2972 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2973 {
2974 /* ordered simd must be closely nested inside of simd region,
2975 and simd region must not encounter constructs other than
2976 ordered simd, therefore ordered simd may be either orphaned,
2977 or ctx->stmt must be simd. The latter case is handled already
2978 earlier. */
2979 if (ctx != NULL)
2980 {
2981 error_at (gimple_location (stmt),
2982 "%<ordered%> %<simd%> must be closely nested inside "
2983 "%<simd%> region");
2984 return false;
2985 }
2986 }
2987 for (; ctx != NULL; ctx = ctx->outer)
2988 switch (gimple_code (ctx->stmt))
2989 {
2990 case GIMPLE_OMP_CRITICAL:
2991 case GIMPLE_OMP_TASK:
2992 case GIMPLE_OMP_ORDERED:
2993 ordered_in_taskloop:
2994 error_at (gimple_location (stmt),
2995 "%<ordered%> region may not be closely nested inside "
2996 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2997 "%<taskloop%> region");
2998 return false;
2999 case GIMPLE_OMP_FOR:
3000 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3001 goto ordered_in_taskloop;
3002 tree o;
3003 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3004 OMP_CLAUSE_ORDERED);
3005 if (o == NULL)
3006 {
3007 error_at (gimple_location (stmt),
3008 "%<ordered%> region must be closely nested inside "
3009 "a loop region with an %<ordered%> clause");
3010 return false;
3011 }
3012 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3013 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3014 {
3015 error_at (gimple_location (stmt),
3016 "%<ordered%> region without %<depend%> clause may "
3017 "not be closely nested inside a loop region with "
3018 "an %<ordered%> clause with a parameter");
3019 return false;
3020 }
3021 return true;
3022 case GIMPLE_OMP_TARGET:
3023 if (gimple_omp_target_kind (ctx->stmt)
3024 != GF_OMP_TARGET_KIND_REGION)
3025 break;
3026 /* FALLTHRU */
3027 case GIMPLE_OMP_PARALLEL:
3028 case GIMPLE_OMP_TEAMS:
3029 error_at (gimple_location (stmt),
3030 "%<ordered%> region must be closely nested inside "
3031 "a loop region with an %<ordered%> clause");
3032 return false;
3033 default:
3034 break;
3035 }
3036 break;
3037 case GIMPLE_OMP_CRITICAL:
3038 {
3039 tree this_stmt_name
3040 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3041 for (; ctx != NULL; ctx = ctx->outer)
3042 if (gomp_critical *other_crit
3043 = dyn_cast <gomp_critical *> (ctx->stmt))
3044 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3045 {
3046 error_at (gimple_location (stmt),
3047 "%<critical%> region may not be nested inside "
3048 "a %<critical%> region with the same name");
3049 return false;
3050 }
3051 }
3052 break;
3053 case GIMPLE_OMP_TEAMS:
3054 if (ctx == NULL)
3055 break;
3056 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3057 || (gimple_omp_target_kind (ctx->stmt)
3058 != GF_OMP_TARGET_KIND_REGION))
3059 {
3060 /* Teams construct can appear either strictly nested inside of
3061 target construct with no intervening stmts, or can be encountered
3062 only by initial task (so must not appear inside any OpenMP
3063 construct. */
3064 error_at (gimple_location (stmt),
3065 "%<teams%> construct must be closely nested inside of "
3066 "%<target%> construct or not nested in any OpenMP "
3067 "construct");
3068 return false;
3069 }
3070 break;
3071 case GIMPLE_OMP_TARGET:
3072 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3073 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3074 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3075 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3076 {
3077 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3078 error_at (OMP_CLAUSE_LOCATION (c),
3079 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3080 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3081 return false;
3082 }
3083 if (is_gimple_omp_offloaded (stmt)
3084 && oacc_get_fn_attrib (cfun->decl) != NULL)
3085 {
3086 error_at (gimple_location (stmt),
3087 "OpenACC region inside of OpenACC routine, nested "
3088 "parallelism not supported yet");
3089 return false;
3090 }
3091 for (; ctx != NULL; ctx = ctx->outer)
3092 {
3093 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3094 {
3095 if (is_gimple_omp (stmt)
3096 && is_gimple_omp_oacc (stmt)
3097 && is_gimple_omp (ctx->stmt))
3098 {
3099 error_at (gimple_location (stmt),
3100 "OpenACC construct inside of non-OpenACC region");
3101 return false;
3102 }
3103 continue;
3104 }
3105
3106 const char *stmt_name, *ctx_stmt_name;
3107 switch (gimple_omp_target_kind (stmt))
3108 {
3109 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3110 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3111 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3112 case GF_OMP_TARGET_KIND_ENTER_DATA:
3113 stmt_name = "target enter data"; break;
3114 case GF_OMP_TARGET_KIND_EXIT_DATA:
3115 stmt_name = "target exit data"; break;
3116 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3117 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3118 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3119 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3120 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3121 stmt_name = "enter/exit data"; break;
3122 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3123 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3124 break;
3125 default: gcc_unreachable ();
3126 }
3127 switch (gimple_omp_target_kind (ctx->stmt))
3128 {
3129 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3130 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3131 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3132 ctx_stmt_name = "parallel"; break;
3133 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3134 ctx_stmt_name = "kernels"; break;
3135 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3136 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3137 ctx_stmt_name = "host_data"; break;
3138 default: gcc_unreachable ();
3139 }
3140
3141 /* OpenACC/OpenMP mismatch? */
3142 if (is_gimple_omp_oacc (stmt)
3143 != is_gimple_omp_oacc (ctx->stmt))
3144 {
3145 error_at (gimple_location (stmt),
3146 "%s %qs construct inside of %s %qs region",
3147 (is_gimple_omp_oacc (stmt)
3148 ? "OpenACC" : "OpenMP"), stmt_name,
3149 (is_gimple_omp_oacc (ctx->stmt)
3150 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3151 return false;
3152 }
3153 if (is_gimple_omp_offloaded (ctx->stmt))
3154 {
3155 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3156 if (is_gimple_omp_oacc (ctx->stmt))
3157 {
3158 error_at (gimple_location (stmt),
3159 "%qs construct inside of %qs region",
3160 stmt_name, ctx_stmt_name);
3161 return false;
3162 }
3163 else
3164 {
3165 warning_at (gimple_location (stmt), 0,
3166 "%qs construct inside of %qs region",
3167 stmt_name, ctx_stmt_name);
3168 }
3169 }
3170 }
3171 break;
3172 default:
3173 break;
3174 }
3175 return true;
3176 }
3177
3178
3179 /* Helper function scan_omp.
3180
3181 Callback for walk_tree or operators in walk_gimple_stmt used to
3182 scan for OMP directives in TP. */
3183
3184 static tree
3185 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3186 {
3187 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3188 omp_context *ctx = (omp_context *) wi->info;
3189 tree t = *tp;
3190
3191 switch (TREE_CODE (t))
3192 {
3193 case VAR_DECL:
3194 case PARM_DECL:
3195 case LABEL_DECL:
3196 case RESULT_DECL:
3197 if (ctx)
3198 {
3199 tree repl = remap_decl (t, &ctx->cb);
3200 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3201 *tp = repl;
3202 }
3203 break;
3204
3205 default:
3206 if (ctx && TYPE_P (t))
3207 *tp = remap_type (t, &ctx->cb);
3208 else if (!DECL_P (t))
3209 {
3210 *walk_subtrees = 1;
3211 if (ctx)
3212 {
3213 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3214 if (tem != TREE_TYPE (t))
3215 {
3216 if (TREE_CODE (t) == INTEGER_CST)
3217 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3218 else
3219 TREE_TYPE (t) = tem;
3220 }
3221 }
3222 }
3223 break;
3224 }
3225
3226 return NULL_TREE;
3227 }
3228
3229 /* Return true if FNDECL is a setjmp or a longjmp. */
3230
3231 static bool
3232 setjmp_or_longjmp_p (const_tree fndecl)
3233 {
3234 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3235 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3236 return true;
3237
3238 tree declname = DECL_NAME (fndecl);
3239 if (!declname)
3240 return false;
3241 const char *name = IDENTIFIER_POINTER (declname);
3242 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3243 }
3244
3245
3246 /* Helper function for scan_omp.
3247
3248 Callback for walk_gimple_stmt used to scan for OMP directives in
3249 the current statement in GSI. */
3250
3251 static tree
3252 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3253 struct walk_stmt_info *wi)
3254 {
3255 gimple *stmt = gsi_stmt (*gsi);
3256 omp_context *ctx = (omp_context *) wi->info;
3257
3258 if (gimple_has_location (stmt))
3259 input_location = gimple_location (stmt);
3260
3261 /* Check the nesting restrictions. */
3262 bool remove = false;
3263 if (is_gimple_omp (stmt))
3264 remove = !check_omp_nesting_restrictions (stmt, ctx);
3265 else if (is_gimple_call (stmt))
3266 {
3267 tree fndecl = gimple_call_fndecl (stmt);
3268 if (fndecl)
3269 {
3270 if (setjmp_or_longjmp_p (fndecl)
3271 && ctx
3272 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3273 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3274 {
3275 remove = true;
3276 error_at (gimple_location (stmt),
3277 "setjmp/longjmp inside simd construct");
3278 }
3279 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3280 switch (DECL_FUNCTION_CODE (fndecl))
3281 {
3282 case BUILT_IN_GOMP_BARRIER:
3283 case BUILT_IN_GOMP_CANCEL:
3284 case BUILT_IN_GOMP_CANCELLATION_POINT:
3285 case BUILT_IN_GOMP_TASKYIELD:
3286 case BUILT_IN_GOMP_TASKWAIT:
3287 case BUILT_IN_GOMP_TASKGROUP_START:
3288 case BUILT_IN_GOMP_TASKGROUP_END:
3289 remove = !check_omp_nesting_restrictions (stmt, ctx);
3290 break;
3291 default:
3292 break;
3293 }
3294 }
3295 }
3296 if (remove)
3297 {
3298 stmt = gimple_build_nop ();
3299 gsi_replace (gsi, stmt, false);
3300 }
3301
3302 *handled_ops_p = true;
3303
3304 switch (gimple_code (stmt))
3305 {
3306 case GIMPLE_OMP_PARALLEL:
3307 taskreg_nesting_level++;
3308 scan_omp_parallel (gsi, ctx);
3309 taskreg_nesting_level--;
3310 break;
3311
3312 case GIMPLE_OMP_TASK:
3313 taskreg_nesting_level++;
3314 scan_omp_task (gsi, ctx);
3315 taskreg_nesting_level--;
3316 break;
3317
3318 case GIMPLE_OMP_FOR:
3319 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3320 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3321 && omp_maybe_offloaded_ctx (ctx)
3322 && omp_max_simt_vf ())
3323 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3324 else
3325 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3326 break;
3327
3328 case GIMPLE_OMP_SECTIONS:
3329 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3330 break;
3331
3332 case GIMPLE_OMP_SINGLE:
3333 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3334 break;
3335
3336 case GIMPLE_OMP_SCAN:
3337 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3338 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3339 ctx->scan_inclusive = true;
3340 /* FALLTHRU */
3341 case GIMPLE_OMP_SECTION:
3342 case GIMPLE_OMP_MASTER:
3343 case GIMPLE_OMP_ORDERED:
3344 case GIMPLE_OMP_CRITICAL:
3345 case GIMPLE_OMP_GRID_BODY:
3346 ctx = new_omp_context (stmt, ctx);
3347 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3348 break;
3349
3350 case GIMPLE_OMP_TASKGROUP:
3351 ctx = new_omp_context (stmt, ctx);
3352 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3353 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3354 break;
3355
3356 case GIMPLE_OMP_TARGET:
3357 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3358 break;
3359
3360 case GIMPLE_OMP_TEAMS:
3361 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3362 {
3363 taskreg_nesting_level++;
3364 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3365 taskreg_nesting_level--;
3366 }
3367 else
3368 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3369 break;
3370
3371 case GIMPLE_BIND:
3372 {
3373 tree var;
3374
3375 *handled_ops_p = false;
3376 if (ctx)
3377 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3378 var ;
3379 var = DECL_CHAIN (var))
3380 insert_decl_map (&ctx->cb, var, var);
3381 }
3382 break;
3383 default:
3384 *handled_ops_p = false;
3385 break;
3386 }
3387
3388 return NULL_TREE;
3389 }
3390
3391
3392 /* Scan all the statements starting at the current statement. CTX
3393 contains context information about the OMP directives and
3394 clauses found during the scan. */
3395
3396 static void
3397 scan_omp (gimple_seq *body_p, omp_context *ctx)
3398 {
3399 location_t saved_location;
3400 struct walk_stmt_info wi;
3401
3402 memset (&wi, 0, sizeof (wi));
3403 wi.info = ctx;
3404 wi.want_locations = true;
3405
3406 saved_location = input_location;
3407 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3408 input_location = saved_location;
3409 }
3410 \f
3411 /* Re-gimplification and code generation routines. */
3412
3413 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3414 of BIND if in a method. */
3415
3416 static void
3417 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3418 {
3419 if (DECL_ARGUMENTS (current_function_decl)
3420 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3421 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3422 == POINTER_TYPE))
3423 {
3424 tree vars = gimple_bind_vars (bind);
3425 for (tree *pvar = &vars; *pvar; )
3426 if (omp_member_access_dummy_var (*pvar))
3427 *pvar = DECL_CHAIN (*pvar);
3428 else
3429 pvar = &DECL_CHAIN (*pvar);
3430 gimple_bind_set_vars (bind, vars);
3431 }
3432 }
3433
3434 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3435 block and its subblocks. */
3436
3437 static void
3438 remove_member_access_dummy_vars (tree block)
3439 {
3440 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3441 if (omp_member_access_dummy_var (*pvar))
3442 *pvar = DECL_CHAIN (*pvar);
3443 else
3444 pvar = &DECL_CHAIN (*pvar);
3445
3446 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3447 remove_member_access_dummy_vars (block);
3448 }
3449
3450 /* If a context was created for STMT when it was scanned, return it. */
3451
3452 static omp_context *
3453 maybe_lookup_ctx (gimple *stmt)
3454 {
3455 splay_tree_node n;
3456 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3457 return n ? (omp_context *) n->value : NULL;
3458 }
3459
3460
3461 /* Find the mapping for DECL in CTX or the immediately enclosing
3462 context that has a mapping for DECL.
3463
3464 If CTX is a nested parallel directive, we may have to use the decl
3465 mappings created in CTX's parent context. Suppose that we have the
3466 following parallel nesting (variable UIDs showed for clarity):
3467
3468 iD.1562 = 0;
3469 #omp parallel shared(iD.1562) -> outer parallel
3470 iD.1562 = iD.1562 + 1;
3471
3472 #omp parallel shared (iD.1562) -> inner parallel
3473 iD.1562 = iD.1562 - 1;
3474
3475 Each parallel structure will create a distinct .omp_data_s structure
3476 for copying iD.1562 in/out of the directive:
3477
3478 outer parallel .omp_data_s.1.i -> iD.1562
3479 inner parallel .omp_data_s.2.i -> iD.1562
3480
3481 A shared variable mapping will produce a copy-out operation before
3482 the parallel directive and a copy-in operation after it. So, in
3483 this case we would have:
3484
3485 iD.1562 = 0;
3486 .omp_data_o.1.i = iD.1562;
3487 #omp parallel shared(iD.1562) -> outer parallel
3488 .omp_data_i.1 = &.omp_data_o.1
3489 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3490
3491 .omp_data_o.2.i = iD.1562; -> **
3492 #omp parallel shared(iD.1562) -> inner parallel
3493 .omp_data_i.2 = &.omp_data_o.2
3494 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3495
3496
3497 ** This is a problem. The symbol iD.1562 cannot be referenced
3498 inside the body of the outer parallel region. But since we are
3499 emitting this copy operation while expanding the inner parallel
3500 directive, we need to access the CTX structure of the outer
3501 parallel directive to get the correct mapping:
3502
3503 .omp_data_o.2.i = .omp_data_i.1->i
3504
3505 Since there may be other workshare or parallel directives enclosing
3506 the parallel directive, it may be necessary to walk up the context
3507 parent chain. This is not a problem in general because nested
3508 parallelism happens only rarely. */
3509
3510 static tree
3511 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3512 {
3513 tree t;
3514 omp_context *up;
3515
3516 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3517 t = maybe_lookup_decl (decl, up);
3518
3519 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3520
3521 return t ? t : decl;
3522 }
3523
3524
3525 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3526 in outer contexts. */
3527
3528 static tree
3529 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3530 {
3531 tree t = NULL;
3532 omp_context *up;
3533
3534 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3535 t = maybe_lookup_decl (decl, up);
3536
3537 return t ? t : decl;
3538 }
3539
3540
3541 /* Construct the initialization value for reduction operation OP. */
3542
3543 tree
3544 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3545 {
3546 switch (op)
3547 {
3548 case PLUS_EXPR:
3549 case MINUS_EXPR:
3550 case BIT_IOR_EXPR:
3551 case BIT_XOR_EXPR:
3552 case TRUTH_OR_EXPR:
3553 case TRUTH_ORIF_EXPR:
3554 case TRUTH_XOR_EXPR:
3555 case NE_EXPR:
3556 return build_zero_cst (type);
3557
3558 case MULT_EXPR:
3559 case TRUTH_AND_EXPR:
3560 case TRUTH_ANDIF_EXPR:
3561 case EQ_EXPR:
3562 return fold_convert_loc (loc, type, integer_one_node);
3563
3564 case BIT_AND_EXPR:
3565 return fold_convert_loc (loc, type, integer_minus_one_node);
3566
3567 case MAX_EXPR:
3568 if (SCALAR_FLOAT_TYPE_P (type))
3569 {
3570 REAL_VALUE_TYPE max, min;
3571 if (HONOR_INFINITIES (type))
3572 {
3573 real_inf (&max);
3574 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3575 }
3576 else
3577 real_maxval (&min, 1, TYPE_MODE (type));
3578 return build_real (type, min);
3579 }
3580 else if (POINTER_TYPE_P (type))
3581 {
3582 wide_int min
3583 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3584 return wide_int_to_tree (type, min);
3585 }
3586 else
3587 {
3588 gcc_assert (INTEGRAL_TYPE_P (type));
3589 return TYPE_MIN_VALUE (type);
3590 }
3591
3592 case MIN_EXPR:
3593 if (SCALAR_FLOAT_TYPE_P (type))
3594 {
3595 REAL_VALUE_TYPE max;
3596 if (HONOR_INFINITIES (type))
3597 real_inf (&max);
3598 else
3599 real_maxval (&max, 0, TYPE_MODE (type));
3600 return build_real (type, max);
3601 }
3602 else if (POINTER_TYPE_P (type))
3603 {
3604 wide_int max
3605 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3606 return wide_int_to_tree (type, max);
3607 }
3608 else
3609 {
3610 gcc_assert (INTEGRAL_TYPE_P (type));
3611 return TYPE_MAX_VALUE (type);
3612 }
3613
3614 default:
3615 gcc_unreachable ();
3616 }
3617 }
3618
3619 /* Construct the initialization value for reduction CLAUSE. */
3620
3621 tree
3622 omp_reduction_init (tree clause, tree type)
3623 {
3624 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3625 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3626 }
3627
3628 /* Return alignment to be assumed for var in CLAUSE, which should be
3629 OMP_CLAUSE_ALIGNED. */
3630
3631 static tree
3632 omp_clause_aligned_alignment (tree clause)
3633 {
3634 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3635 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3636
3637 /* Otherwise return implementation defined alignment. */
3638 unsigned int al = 1;
3639 opt_scalar_mode mode_iter;
3640 auto_vector_sizes sizes;
3641 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3642 poly_uint64 vs = 0;
3643 for (unsigned int i = 0; i < sizes.length (); ++i)
3644 vs = ordered_max (vs, sizes[i]);
3645 static enum mode_class classes[]
3646 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3647 for (int i = 0; i < 4; i += 2)
3648 /* The for loop above dictates that we only walk through scalar classes. */
3649 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3650 {
3651 scalar_mode mode = mode_iter.require ();
3652 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3653 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3654 continue;
3655 while (maybe_ne (vs, 0U)
3656 && known_lt (GET_MODE_SIZE (vmode), vs)
3657 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3658 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3659
3660 tree type = lang_hooks.types.type_for_mode (mode, 1);
3661 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3662 continue;
3663 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3664 GET_MODE_SIZE (mode));
3665 type = build_vector_type (type, nelts);
3666 if (TYPE_MODE (type) != vmode)
3667 continue;
3668 if (TYPE_ALIGN_UNIT (type) > al)
3669 al = TYPE_ALIGN_UNIT (type);
3670 }
3671 return build_int_cst (integer_type_node, al);
3672 }
3673
3674
3675 /* This structure is part of the interface between lower_rec_simd_input_clauses
3676 and lower_rec_input_clauses. */
3677
3678 struct omplow_simd_context {
3679 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3680 tree idx;
3681 tree lane;
3682 tree lastlane;
3683 vec<tree, va_heap> simt_eargs;
3684 gimple_seq simt_dlist;
3685 poly_uint64_pod max_vf;
3686 bool is_simt;
3687 };
3688
3689 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3690 privatization. */
3691
3692 static bool
3693 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3694 omplow_simd_context *sctx, tree &ivar,
3695 tree &lvar, tree *rvar = NULL,
3696 tree *rvar2 = NULL)
3697 {
3698 if (known_eq (sctx->max_vf, 0U))
3699 {
3700 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3701 if (maybe_gt (sctx->max_vf, 1U))
3702 {
3703 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3704 OMP_CLAUSE_SAFELEN);
3705 if (c)
3706 {
3707 poly_uint64 safe_len;
3708 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3709 || maybe_lt (safe_len, 1U))
3710 sctx->max_vf = 1;
3711 else
3712 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3713 }
3714 }
3715 if (maybe_gt (sctx->max_vf, 1U))
3716 {
3717 sctx->idx = create_tmp_var (unsigned_type_node);
3718 sctx->lane = create_tmp_var (unsigned_type_node);
3719 }
3720 }
3721 if (known_eq (sctx->max_vf, 1U))
3722 return false;
3723
3724 if (sctx->is_simt)
3725 {
3726 if (is_gimple_reg (new_var))
3727 {
3728 ivar = lvar = new_var;
3729 return true;
3730 }
3731 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3732 ivar = lvar = create_tmp_var (type);
3733 TREE_ADDRESSABLE (ivar) = 1;
3734 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3735 NULL, DECL_ATTRIBUTES (ivar));
3736 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3737 tree clobber = build_constructor (type, NULL);
3738 TREE_THIS_VOLATILE (clobber) = 1;
3739 gimple *g = gimple_build_assign (ivar, clobber);
3740 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3741 }
3742 else
3743 {
3744 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3745 tree avar = create_tmp_var_raw (atype);
3746 if (TREE_ADDRESSABLE (new_var))
3747 TREE_ADDRESSABLE (avar) = 1;
3748 DECL_ATTRIBUTES (avar)
3749 = tree_cons (get_identifier ("omp simd array"), NULL,
3750 DECL_ATTRIBUTES (avar));
3751 gimple_add_tmp_var (avar);
3752 tree iavar = avar;
3753 if (rvar)
3754 {
3755 /* For inscan reductions, create another array temporary,
3756 which will hold the reduced value. */
3757 iavar = create_tmp_var_raw (atype);
3758 if (TREE_ADDRESSABLE (new_var))
3759 TREE_ADDRESSABLE (iavar) = 1;
3760 DECL_ATTRIBUTES (iavar)
3761 = tree_cons (get_identifier ("omp simd array"), NULL,
3762 tree_cons (get_identifier ("omp simd inscan"), NULL,
3763 DECL_ATTRIBUTES (iavar)));
3764 gimple_add_tmp_var (iavar);
3765 ctx->cb.decl_map->put (avar, iavar);
3766 if (sctx->lastlane == NULL_TREE)
3767 sctx->lastlane = create_tmp_var (unsigned_type_node);
3768 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
3769 sctx->lastlane, NULL_TREE, NULL_TREE);
3770 TREE_THIS_NOTRAP (*rvar) = 1;
3771
3772 if (!ctx->scan_inclusive)
3773 {
3774 /* And for exclusive scan yet another one, which will
3775 hold the value during the scan phase. */
3776 tree savar = create_tmp_var_raw (atype);
3777 if (TREE_ADDRESSABLE (new_var))
3778 TREE_ADDRESSABLE (savar) = 1;
3779 DECL_ATTRIBUTES (savar)
3780 = tree_cons (get_identifier ("omp simd array"), NULL,
3781 tree_cons (get_identifier ("omp simd inscan "
3782 "exclusive"), NULL,
3783 DECL_ATTRIBUTES (savar)));
3784 gimple_add_tmp_var (savar);
3785 ctx->cb.decl_map->put (iavar, savar);
3786 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
3787 sctx->idx, NULL_TREE, NULL_TREE);
3788 TREE_THIS_NOTRAP (*rvar2) = 1;
3789 }
3790 }
3791 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
3792 NULL_TREE, NULL_TREE);
3793 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3794 NULL_TREE, NULL_TREE);
3795 TREE_THIS_NOTRAP (ivar) = 1;
3796 TREE_THIS_NOTRAP (lvar) = 1;
3797 }
3798 if (DECL_P (new_var))
3799 {
3800 SET_DECL_VALUE_EXPR (new_var, lvar);
3801 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3802 }
3803 return true;
3804 }
3805
3806 /* Helper function of lower_rec_input_clauses. For a reference
3807 in simd reduction, add an underlying variable it will reference. */
3808
3809 static void
3810 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3811 {
3812 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3813 if (TREE_CONSTANT (z))
3814 {
3815 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3816 get_name (new_vard));
3817 gimple_add_tmp_var (z);
3818 TREE_ADDRESSABLE (z) = 1;
3819 z = build_fold_addr_expr_loc (loc, z);
3820 gimplify_assign (new_vard, z, ilist);
3821 }
3822 }
3823
3824 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3825 code to emit (type) (tskred_temp[idx]). */
3826
3827 static tree
3828 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3829 unsigned idx)
3830 {
3831 unsigned HOST_WIDE_INT sz
3832 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3833 tree r = build2 (MEM_REF, pointer_sized_int_node,
3834 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3835 idx * sz));
3836 tree v = create_tmp_var (pointer_sized_int_node);
3837 gimple *g = gimple_build_assign (v, r);
3838 gimple_seq_add_stmt (ilist, g);
3839 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3840 {
3841 v = create_tmp_var (type);
3842 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3843 gimple_seq_add_stmt (ilist, g);
3844 }
3845 return v;
3846 }
3847
3848 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3849 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3850 private variables. Initialization statements go in ILIST, while calls
3851 to destructors go in DLIST. */
3852
3853 static void
3854 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3855 omp_context *ctx, struct omp_for_data *fd)
3856 {
3857 tree c, dtor, copyin_seq, x, ptr;
3858 bool copyin_by_ref = false;
3859 bool lastprivate_firstprivate = false;
3860 bool reduction_omp_orig_ref = false;
3861 int pass;
3862 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3863 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3864 omplow_simd_context sctx = omplow_simd_context ();
3865 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3866 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3867 gimple_seq llist[4] = { };
3868 tree nonconst_simd_if = NULL_TREE;
3869
3870 copyin_seq = NULL;
3871 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3872
3873 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3874 with data sharing clauses referencing variable sized vars. That
3875 is unnecessarily hard to support and very unlikely to result in
3876 vectorized code anyway. */
3877 if (is_simd)
3878 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3879 switch (OMP_CLAUSE_CODE (c))
3880 {
3881 case OMP_CLAUSE_LINEAR:
3882 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3883 sctx.max_vf = 1;
3884 /* FALLTHRU */
3885 case OMP_CLAUSE_PRIVATE:
3886 case OMP_CLAUSE_FIRSTPRIVATE:
3887 case OMP_CLAUSE_LASTPRIVATE:
3888 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3889 sctx.max_vf = 1;
3890 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3891 {
3892 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3893 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3894 sctx.max_vf = 1;
3895 }
3896 break;
3897 case OMP_CLAUSE_REDUCTION:
3898 case OMP_CLAUSE_IN_REDUCTION:
3899 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3900 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3901 sctx.max_vf = 1;
3902 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3903 {
3904 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3905 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3906 sctx.max_vf = 1;
3907 }
3908 break;
3909 case OMP_CLAUSE_IF:
3910 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3911 sctx.max_vf = 1;
3912 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3913 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
3914 break;
3915 case OMP_CLAUSE_SIMDLEN:
3916 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3917 sctx.max_vf = 1;
3918 break;
3919 case OMP_CLAUSE__CONDTEMP_:
3920 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3921 if (sctx.is_simt)
3922 sctx.max_vf = 1;
3923 break;
3924 default:
3925 continue;
3926 }
3927
3928 /* Add a placeholder for simduid. */
3929 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3930 sctx.simt_eargs.safe_push (NULL_TREE);
3931
3932 unsigned task_reduction_cnt = 0;
3933 unsigned task_reduction_cntorig = 0;
3934 unsigned task_reduction_cnt_full = 0;
3935 unsigned task_reduction_cntorig_full = 0;
3936 unsigned task_reduction_other_cnt = 0;
3937 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3938 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3939 /* Do all the fixed sized types in the first pass, and the variable sized
3940 types in the second pass. This makes sure that the scalar arguments to
3941 the variable sized types are processed before we use them in the
3942 variable sized operations. For task reductions we use 4 passes, in the
3943 first two we ignore them, in the third one gather arguments for
3944 GOMP_task_reduction_remap call and in the last pass actually handle
3945 the task reductions. */
3946 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3947 ? 4 : 2); ++pass)
3948 {
3949 if (pass == 2 && task_reduction_cnt)
3950 {
3951 tskred_atype
3952 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3953 + task_reduction_cntorig);
3954 tskred_avar = create_tmp_var_raw (tskred_atype);
3955 gimple_add_tmp_var (tskred_avar);
3956 TREE_ADDRESSABLE (tskred_avar) = 1;
3957 task_reduction_cnt_full = task_reduction_cnt;
3958 task_reduction_cntorig_full = task_reduction_cntorig;
3959 }
3960 else if (pass == 3 && task_reduction_cnt)
3961 {
3962 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3963 gimple *g
3964 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3965 size_int (task_reduction_cntorig),
3966 build_fold_addr_expr (tskred_avar));
3967 gimple_seq_add_stmt (ilist, g);
3968 }
3969 if (pass == 3 && task_reduction_other_cnt)
3970 {
3971 /* For reduction clauses, build
3972 tskred_base = (void *) tskred_temp[2]
3973 + omp_get_thread_num () * tskred_temp[1]
3974 or if tskred_temp[1] is known to be constant, that constant
3975 directly. This is the start of the private reduction copy block
3976 for the current thread. */
3977 tree v = create_tmp_var (integer_type_node);
3978 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3979 gimple *g = gimple_build_call (x, 0);
3980 gimple_call_set_lhs (g, v);
3981 gimple_seq_add_stmt (ilist, g);
3982 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3983 tskred_temp = OMP_CLAUSE_DECL (c);
3984 if (is_taskreg_ctx (ctx))
3985 tskred_temp = lookup_decl (tskred_temp, ctx);
3986 tree v2 = create_tmp_var (sizetype);
3987 g = gimple_build_assign (v2, NOP_EXPR, v);
3988 gimple_seq_add_stmt (ilist, g);
3989 if (ctx->task_reductions[0])
3990 v = fold_convert (sizetype, ctx->task_reductions[0]);
3991 else
3992 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3993 tree v3 = create_tmp_var (sizetype);
3994 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3995 gimple_seq_add_stmt (ilist, g);
3996 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3997 tskred_base = create_tmp_var (ptr_type_node);
3998 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3999 gimple_seq_add_stmt (ilist, g);
4000 }
4001 task_reduction_cnt = 0;
4002 task_reduction_cntorig = 0;
4003 task_reduction_other_cnt = 0;
4004 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4005 {
4006 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4007 tree var, new_var;
4008 bool by_ref;
4009 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4010 bool task_reduction_p = false;
4011 bool task_reduction_needs_orig_p = false;
4012 tree cond = NULL_TREE;
4013
4014 switch (c_kind)
4015 {
4016 case OMP_CLAUSE_PRIVATE:
4017 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4018 continue;
4019 break;
4020 case OMP_CLAUSE_SHARED:
4021 /* Ignore shared directives in teams construct inside
4022 of target construct. */
4023 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4024 && !is_host_teams_ctx (ctx))
4025 continue;
4026 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4027 {
4028 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4029 || is_global_var (OMP_CLAUSE_DECL (c)));
4030 continue;
4031 }
4032 case OMP_CLAUSE_FIRSTPRIVATE:
4033 case OMP_CLAUSE_COPYIN:
4034 break;
4035 case OMP_CLAUSE_LINEAR:
4036 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4037 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4038 lastprivate_firstprivate = true;
4039 break;
4040 case OMP_CLAUSE_REDUCTION:
4041 case OMP_CLAUSE_IN_REDUCTION:
4042 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4043 {
4044 task_reduction_p = true;
4045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4046 {
4047 task_reduction_other_cnt++;
4048 if (pass == 2)
4049 continue;
4050 }
4051 else
4052 task_reduction_cnt++;
4053 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4054 {
4055 var = OMP_CLAUSE_DECL (c);
4056 /* If var is a global variable that isn't privatized
4057 in outer contexts, we don't need to look up the
4058 original address, it is always the address of the
4059 global variable itself. */
4060 if (!DECL_P (var)
4061 || omp_is_reference (var)
4062 || !is_global_var
4063 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4064 {
4065 task_reduction_needs_orig_p = true;
4066 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4067 task_reduction_cntorig++;
4068 }
4069 }
4070 }
4071 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4072 reduction_omp_orig_ref = true;
4073 break;
4074 case OMP_CLAUSE__REDUCTEMP_:
4075 if (!is_taskreg_ctx (ctx))
4076 continue;
4077 /* FALLTHRU */
4078 case OMP_CLAUSE__LOOPTEMP_:
4079 /* Handle _looptemp_/_reductemp_ clauses only on
4080 parallel/task. */
4081 if (fd)
4082 continue;
4083 break;
4084 case OMP_CLAUSE_LASTPRIVATE:
4085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4086 {
4087 lastprivate_firstprivate = true;
4088 if (pass != 0 || is_taskloop_ctx (ctx))
4089 continue;
4090 }
4091 /* Even without corresponding firstprivate, if
4092 decl is Fortran allocatable, it needs outer var
4093 reference. */
4094 else if (pass == 0
4095 && lang_hooks.decls.omp_private_outer_ref
4096 (OMP_CLAUSE_DECL (c)))
4097 lastprivate_firstprivate = true;
4098 break;
4099 case OMP_CLAUSE_ALIGNED:
4100 if (pass != 1)
4101 continue;
4102 var = OMP_CLAUSE_DECL (c);
4103 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4104 && !is_global_var (var))
4105 {
4106 new_var = maybe_lookup_decl (var, ctx);
4107 if (new_var == NULL_TREE)
4108 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4109 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4110 tree alarg = omp_clause_aligned_alignment (c);
4111 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4112 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4113 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4114 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4115 gimplify_and_add (x, ilist);
4116 }
4117 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4118 && is_global_var (var))
4119 {
4120 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4121 new_var = lookup_decl (var, ctx);
4122 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4123 t = build_fold_addr_expr_loc (clause_loc, t);
4124 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4125 tree alarg = omp_clause_aligned_alignment (c);
4126 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4127 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4128 t = fold_convert_loc (clause_loc, ptype, t);
4129 x = create_tmp_var (ptype);
4130 t = build2 (MODIFY_EXPR, ptype, x, t);
4131 gimplify_and_add (t, ilist);
4132 t = build_simple_mem_ref_loc (clause_loc, x);
4133 SET_DECL_VALUE_EXPR (new_var, t);
4134 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4135 }
4136 continue;
4137 case OMP_CLAUSE__CONDTEMP_:
4138 if (is_parallel_ctx (ctx)
4139 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4140 break;
4141 continue;
4142 default:
4143 continue;
4144 }
4145
4146 if (task_reduction_p != (pass >= 2))
4147 continue;
4148
4149 new_var = var = OMP_CLAUSE_DECL (c);
4150 if ((c_kind == OMP_CLAUSE_REDUCTION
4151 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4152 && TREE_CODE (var) == MEM_REF)
4153 {
4154 var = TREE_OPERAND (var, 0);
4155 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4156 var = TREE_OPERAND (var, 0);
4157 if (TREE_CODE (var) == INDIRECT_REF
4158 || TREE_CODE (var) == ADDR_EXPR)
4159 var = TREE_OPERAND (var, 0);
4160 if (is_variable_sized (var))
4161 {
4162 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4163 var = DECL_VALUE_EXPR (var);
4164 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4165 var = TREE_OPERAND (var, 0);
4166 gcc_assert (DECL_P (var));
4167 }
4168 new_var = var;
4169 }
4170 if (c_kind != OMP_CLAUSE_COPYIN)
4171 new_var = lookup_decl (var, ctx);
4172
4173 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4174 {
4175 if (pass != 0)
4176 continue;
4177 }
4178 /* C/C++ array section reductions. */
4179 else if ((c_kind == OMP_CLAUSE_REDUCTION
4180 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4181 && var != OMP_CLAUSE_DECL (c))
4182 {
4183 if (pass == 0)
4184 continue;
4185
4186 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4187 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4188
4189 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4190 {
4191 tree b = TREE_OPERAND (orig_var, 1);
4192 b = maybe_lookup_decl (b, ctx);
4193 if (b == NULL)
4194 {
4195 b = TREE_OPERAND (orig_var, 1);
4196 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4197 }
4198 if (integer_zerop (bias))
4199 bias = b;
4200 else
4201 {
4202 bias = fold_convert_loc (clause_loc,
4203 TREE_TYPE (b), bias);
4204 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4205 TREE_TYPE (b), b, bias);
4206 }
4207 orig_var = TREE_OPERAND (orig_var, 0);
4208 }
4209 if (pass == 2)
4210 {
4211 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4212 if (is_global_var (out)
4213 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4214 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4215 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4216 != POINTER_TYPE)))
4217 x = var;
4218 else
4219 {
4220 bool by_ref = use_pointer_for_field (var, NULL);
4221 x = build_receiver_ref (var, by_ref, ctx);
4222 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4223 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4224 == POINTER_TYPE))
4225 x = build_fold_addr_expr (x);
4226 }
4227 if (TREE_CODE (orig_var) == INDIRECT_REF)
4228 x = build_simple_mem_ref (x);
4229 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4230 {
4231 if (var == TREE_OPERAND (orig_var, 0))
4232 x = build_fold_addr_expr (x);
4233 }
4234 bias = fold_convert (sizetype, bias);
4235 x = fold_convert (ptr_type_node, x);
4236 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4237 TREE_TYPE (x), x, bias);
4238 unsigned cnt = task_reduction_cnt - 1;
4239 if (!task_reduction_needs_orig_p)
4240 cnt += (task_reduction_cntorig_full
4241 - task_reduction_cntorig);
4242 else
4243 cnt = task_reduction_cntorig - 1;
4244 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4245 size_int (cnt), NULL_TREE, NULL_TREE);
4246 gimplify_assign (r, x, ilist);
4247 continue;
4248 }
4249
4250 if (TREE_CODE (orig_var) == INDIRECT_REF
4251 || TREE_CODE (orig_var) == ADDR_EXPR)
4252 orig_var = TREE_OPERAND (orig_var, 0);
4253 tree d = OMP_CLAUSE_DECL (c);
4254 tree type = TREE_TYPE (d);
4255 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4256 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4257 const char *name = get_name (orig_var);
4258 if (pass == 3)
4259 {
4260 tree xv = create_tmp_var (ptr_type_node);
4261 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4262 {
4263 unsigned cnt = task_reduction_cnt - 1;
4264 if (!task_reduction_needs_orig_p)
4265 cnt += (task_reduction_cntorig_full
4266 - task_reduction_cntorig);
4267 else
4268 cnt = task_reduction_cntorig - 1;
4269 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4270 size_int (cnt), NULL_TREE, NULL_TREE);
4271
4272 gimple *g = gimple_build_assign (xv, x);
4273 gimple_seq_add_stmt (ilist, g);
4274 }
4275 else
4276 {
4277 unsigned int idx = *ctx->task_reduction_map->get (c);
4278 tree off;
4279 if (ctx->task_reductions[1 + idx])
4280 off = fold_convert (sizetype,
4281 ctx->task_reductions[1 + idx]);
4282 else
4283 off = task_reduction_read (ilist, tskred_temp, sizetype,
4284 7 + 3 * idx + 1);
4285 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4286 tskred_base, off);
4287 gimple_seq_add_stmt (ilist, g);
4288 }
4289 x = fold_convert (build_pointer_type (boolean_type_node),
4290 xv);
4291 if (TREE_CONSTANT (v))
4292 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4293 TYPE_SIZE_UNIT (type));
4294 else
4295 {
4296 tree t = maybe_lookup_decl (v, ctx);
4297 if (t)
4298 v = t;
4299 else
4300 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4301 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4302 fb_rvalue);
4303 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4304 TREE_TYPE (v), v,
4305 build_int_cst (TREE_TYPE (v), 1));
4306 t = fold_build2_loc (clause_loc, MULT_EXPR,
4307 TREE_TYPE (v), t,
4308 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4309 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4310 }
4311 cond = create_tmp_var (TREE_TYPE (x));
4312 gimplify_assign (cond, x, ilist);
4313 x = xv;
4314 }
4315 else if (TREE_CONSTANT (v))
4316 {
4317 x = create_tmp_var_raw (type, name);
4318 gimple_add_tmp_var (x);
4319 TREE_ADDRESSABLE (x) = 1;
4320 x = build_fold_addr_expr_loc (clause_loc, x);
4321 }
4322 else
4323 {
4324 tree atmp
4325 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4326 tree t = maybe_lookup_decl (v, ctx);
4327 if (t)
4328 v = t;
4329 else
4330 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4331 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4332 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4333 TREE_TYPE (v), v,
4334 build_int_cst (TREE_TYPE (v), 1));
4335 t = fold_build2_loc (clause_loc, MULT_EXPR,
4336 TREE_TYPE (v), t,
4337 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4338 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4339 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4340 }
4341
4342 tree ptype = build_pointer_type (TREE_TYPE (type));
4343 x = fold_convert_loc (clause_loc, ptype, x);
4344 tree y = create_tmp_var (ptype, name);
4345 gimplify_assign (y, x, ilist);
4346 x = y;
4347 tree yb = y;
4348
4349 if (!integer_zerop (bias))
4350 {
4351 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4352 bias);
4353 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4354 x);
4355 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4356 pointer_sized_int_node, yb, bias);
4357 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4358 yb = create_tmp_var (ptype, name);
4359 gimplify_assign (yb, x, ilist);
4360 x = yb;
4361 }
4362
4363 d = TREE_OPERAND (d, 0);
4364 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4365 d = TREE_OPERAND (d, 0);
4366 if (TREE_CODE (d) == ADDR_EXPR)
4367 {
4368 if (orig_var != var)
4369 {
4370 gcc_assert (is_variable_sized (orig_var));
4371 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4372 x);
4373 gimplify_assign (new_var, x, ilist);
4374 tree new_orig_var = lookup_decl (orig_var, ctx);
4375 tree t = build_fold_indirect_ref (new_var);
4376 DECL_IGNORED_P (new_var) = 0;
4377 TREE_THIS_NOTRAP (t) = 1;
4378 SET_DECL_VALUE_EXPR (new_orig_var, t);
4379 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4380 }
4381 else
4382 {
4383 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4384 build_int_cst (ptype, 0));
4385 SET_DECL_VALUE_EXPR (new_var, x);
4386 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4387 }
4388 }
4389 else
4390 {
4391 gcc_assert (orig_var == var);
4392 if (TREE_CODE (d) == INDIRECT_REF)
4393 {
4394 x = create_tmp_var (ptype, name);
4395 TREE_ADDRESSABLE (x) = 1;
4396 gimplify_assign (x, yb, ilist);
4397 x = build_fold_addr_expr_loc (clause_loc, x);
4398 }
4399 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4400 gimplify_assign (new_var, x, ilist);
4401 }
4402 /* GOMP_taskgroup_reduction_register memsets the whole
4403 array to zero. If the initializer is zero, we don't
4404 need to initialize it again, just mark it as ever
4405 used unconditionally, i.e. cond = true. */
4406 if (cond
4407 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4408 && initializer_zerop (omp_reduction_init (c,
4409 TREE_TYPE (type))))
4410 {
4411 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4412 boolean_true_node);
4413 gimple_seq_add_stmt (ilist, g);
4414 continue;
4415 }
4416 tree end = create_artificial_label (UNKNOWN_LOCATION);
4417 if (cond)
4418 {
4419 gimple *g;
4420 if (!is_parallel_ctx (ctx))
4421 {
4422 tree condv = create_tmp_var (boolean_type_node);
4423 g = gimple_build_assign (condv,
4424 build_simple_mem_ref (cond));
4425 gimple_seq_add_stmt (ilist, g);
4426 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4427 g = gimple_build_cond (NE_EXPR, condv,
4428 boolean_false_node, end, lab1);
4429 gimple_seq_add_stmt (ilist, g);
4430 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4431 }
4432 g = gimple_build_assign (build_simple_mem_ref (cond),
4433 boolean_true_node);
4434 gimple_seq_add_stmt (ilist, g);
4435 }
4436
4437 tree y1 = create_tmp_var (ptype);
4438 gimplify_assign (y1, y, ilist);
4439 tree i2 = NULL_TREE, y2 = NULL_TREE;
4440 tree body2 = NULL_TREE, end2 = NULL_TREE;
4441 tree y3 = NULL_TREE, y4 = NULL_TREE;
4442 if (task_reduction_needs_orig_p)
4443 {
4444 y3 = create_tmp_var (ptype);
4445 tree ref;
4446 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4447 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4448 size_int (task_reduction_cnt_full
4449 + task_reduction_cntorig - 1),
4450 NULL_TREE, NULL_TREE);
4451 else
4452 {
4453 unsigned int idx = *ctx->task_reduction_map->get (c);
4454 ref = task_reduction_read (ilist, tskred_temp, ptype,
4455 7 + 3 * idx);
4456 }
4457 gimplify_assign (y3, ref, ilist);
4458 }
4459 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4460 {
4461 if (pass != 3)
4462 {
4463 y2 = create_tmp_var (ptype);
4464 gimplify_assign (y2, y, ilist);
4465 }
4466 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4467 {
4468 tree ref = build_outer_var_ref (var, ctx);
4469 /* For ref build_outer_var_ref already performs this. */
4470 if (TREE_CODE (d) == INDIRECT_REF)
4471 gcc_assert (omp_is_reference (var));
4472 else if (TREE_CODE (d) == ADDR_EXPR)
4473 ref = build_fold_addr_expr (ref);
4474 else if (omp_is_reference (var))
4475 ref = build_fold_addr_expr (ref);
4476 ref = fold_convert_loc (clause_loc, ptype, ref);
4477 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4478 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4479 {
4480 y3 = create_tmp_var (ptype);
4481 gimplify_assign (y3, unshare_expr (ref), ilist);
4482 }
4483 if (is_simd)
4484 {
4485 y4 = create_tmp_var (ptype);
4486 gimplify_assign (y4, ref, dlist);
4487 }
4488 }
4489 }
4490 tree i = create_tmp_var (TREE_TYPE (v));
4491 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4492 tree body = create_artificial_label (UNKNOWN_LOCATION);
4493 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4494 if (y2)
4495 {
4496 i2 = create_tmp_var (TREE_TYPE (v));
4497 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4498 body2 = create_artificial_label (UNKNOWN_LOCATION);
4499 end2 = create_artificial_label (UNKNOWN_LOCATION);
4500 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4501 }
4502 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4503 {
4504 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4505 tree decl_placeholder
4506 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4507 SET_DECL_VALUE_EXPR (decl_placeholder,
4508 build_simple_mem_ref (y1));
4509 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4510 SET_DECL_VALUE_EXPR (placeholder,
4511 y3 ? build_simple_mem_ref (y3)
4512 : error_mark_node);
4513 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4514 x = lang_hooks.decls.omp_clause_default_ctor
4515 (c, build_simple_mem_ref (y1),
4516 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4517 if (x)
4518 gimplify_and_add (x, ilist);
4519 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4520 {
4521 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4522 lower_omp (&tseq, ctx);
4523 gimple_seq_add_seq (ilist, tseq);
4524 }
4525 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4526 if (is_simd)
4527 {
4528 SET_DECL_VALUE_EXPR (decl_placeholder,
4529 build_simple_mem_ref (y2));
4530 SET_DECL_VALUE_EXPR (placeholder,
4531 build_simple_mem_ref (y4));
4532 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4533 lower_omp (&tseq, ctx);
4534 gimple_seq_add_seq (dlist, tseq);
4535 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4536 }
4537 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4538 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4539 if (y2)
4540 {
4541 x = lang_hooks.decls.omp_clause_dtor
4542 (c, build_simple_mem_ref (y2));
4543 if (x)
4544 {
4545 gimple_seq tseq = NULL;
4546 dtor = x;
4547 gimplify_stmt (&dtor, &tseq);
4548 gimple_seq_add_seq (dlist, tseq);
4549 }
4550 }
4551 }
4552 else
4553 {
4554 x = omp_reduction_init (c, TREE_TYPE (type));
4555 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4556
4557 /* reduction(-:var) sums up the partial results, so it
4558 acts identically to reduction(+:var). */
4559 if (code == MINUS_EXPR)
4560 code = PLUS_EXPR;
4561
4562 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4563 if (is_simd)
4564 {
4565 x = build2 (code, TREE_TYPE (type),
4566 build_simple_mem_ref (y4),
4567 build_simple_mem_ref (y2));
4568 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4569 }
4570 }
4571 gimple *g
4572 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4573 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4574 gimple_seq_add_stmt (ilist, g);
4575 if (y3)
4576 {
4577 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4578 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4579 gimple_seq_add_stmt (ilist, g);
4580 }
4581 g = gimple_build_assign (i, PLUS_EXPR, i,
4582 build_int_cst (TREE_TYPE (i), 1));
4583 gimple_seq_add_stmt (ilist, g);
4584 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4585 gimple_seq_add_stmt (ilist, g);
4586 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4587 if (y2)
4588 {
4589 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4590 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4591 gimple_seq_add_stmt (dlist, g);
4592 if (y4)
4593 {
4594 g = gimple_build_assign
4595 (y4, POINTER_PLUS_EXPR, y4,
4596 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4597 gimple_seq_add_stmt (dlist, g);
4598 }
4599 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4600 build_int_cst (TREE_TYPE (i2), 1));
4601 gimple_seq_add_stmt (dlist, g);
4602 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4603 gimple_seq_add_stmt (dlist, g);
4604 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4605 }
4606 continue;
4607 }
4608 else if (pass == 2)
4609 {
4610 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4611 x = var;
4612 else
4613 {
4614 bool by_ref = use_pointer_for_field (var, ctx);
4615 x = build_receiver_ref (var, by_ref, ctx);
4616 }
4617 if (!omp_is_reference (var))
4618 x = build_fold_addr_expr (x);
4619 x = fold_convert (ptr_type_node, x);
4620 unsigned cnt = task_reduction_cnt - 1;
4621 if (!task_reduction_needs_orig_p)
4622 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4623 else
4624 cnt = task_reduction_cntorig - 1;
4625 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4626 size_int (cnt), NULL_TREE, NULL_TREE);
4627 gimplify_assign (r, x, ilist);
4628 continue;
4629 }
4630 else if (pass == 3)
4631 {
4632 tree type = TREE_TYPE (new_var);
4633 if (!omp_is_reference (var))
4634 type = build_pointer_type (type);
4635 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4636 {
4637 unsigned cnt = task_reduction_cnt - 1;
4638 if (!task_reduction_needs_orig_p)
4639 cnt += (task_reduction_cntorig_full
4640 - task_reduction_cntorig);
4641 else
4642 cnt = task_reduction_cntorig - 1;
4643 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4644 size_int (cnt), NULL_TREE, NULL_TREE);
4645 }
4646 else
4647 {
4648 unsigned int idx = *ctx->task_reduction_map->get (c);
4649 tree off;
4650 if (ctx->task_reductions[1 + idx])
4651 off = fold_convert (sizetype,
4652 ctx->task_reductions[1 + idx]);
4653 else
4654 off = task_reduction_read (ilist, tskred_temp, sizetype,
4655 7 + 3 * idx + 1);
4656 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4657 tskred_base, off);
4658 }
4659 x = fold_convert (type, x);
4660 tree t;
4661 if (omp_is_reference (var))
4662 {
4663 gimplify_assign (new_var, x, ilist);
4664 t = new_var;
4665 new_var = build_simple_mem_ref (new_var);
4666 }
4667 else
4668 {
4669 t = create_tmp_var (type);
4670 gimplify_assign (t, x, ilist);
4671 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4672 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4673 }
4674 t = fold_convert (build_pointer_type (boolean_type_node), t);
4675 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4676 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4677 cond = create_tmp_var (TREE_TYPE (t));
4678 gimplify_assign (cond, t, ilist);
4679 }
4680 else if (is_variable_sized (var))
4681 {
4682 /* For variable sized types, we need to allocate the
4683 actual storage here. Call alloca and store the
4684 result in the pointer decl that we created elsewhere. */
4685 if (pass == 0)
4686 continue;
4687
4688 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4689 {
4690 gcall *stmt;
4691 tree tmp, atmp;
4692
4693 ptr = DECL_VALUE_EXPR (new_var);
4694 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4695 ptr = TREE_OPERAND (ptr, 0);
4696 gcc_assert (DECL_P (ptr));
4697 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4698
4699 /* void *tmp = __builtin_alloca */
4700 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4701 stmt = gimple_build_call (atmp, 2, x,
4702 size_int (DECL_ALIGN (var)));
4703 tmp = create_tmp_var_raw (ptr_type_node);
4704 gimple_add_tmp_var (tmp);
4705 gimple_call_set_lhs (stmt, tmp);
4706
4707 gimple_seq_add_stmt (ilist, stmt);
4708
4709 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4710 gimplify_assign (ptr, x, ilist);
4711 }
4712 }
4713 else if (omp_is_reference (var)
4714 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4715 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4716 {
4717 /* For references that are being privatized for Fortran,
4718 allocate new backing storage for the new pointer
4719 variable. This allows us to avoid changing all the
4720 code that expects a pointer to something that expects
4721 a direct variable. */
4722 if (pass == 0)
4723 continue;
4724
4725 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4726 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4727 {
4728 x = build_receiver_ref (var, false, ctx);
4729 x = build_fold_addr_expr_loc (clause_loc, x);
4730 }
4731 else if (TREE_CONSTANT (x))
4732 {
4733 /* For reduction in SIMD loop, defer adding the
4734 initialization of the reference, because if we decide
4735 to use SIMD array for it, the initilization could cause
4736 expansion ICE. Ditto for other privatization clauses. */
4737 if (is_simd)
4738 x = NULL_TREE;
4739 else
4740 {
4741 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4742 get_name (var));
4743 gimple_add_tmp_var (x);
4744 TREE_ADDRESSABLE (x) = 1;
4745 x = build_fold_addr_expr_loc (clause_loc, x);
4746 }
4747 }
4748 else
4749 {
4750 tree atmp
4751 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4752 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4753 tree al = size_int (TYPE_ALIGN (rtype));
4754 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4755 }
4756
4757 if (x)
4758 {
4759 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4760 gimplify_assign (new_var, x, ilist);
4761 }
4762
4763 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4764 }
4765 else if ((c_kind == OMP_CLAUSE_REDUCTION
4766 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4767 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4768 {
4769 if (pass == 0)
4770 continue;
4771 }
4772 else if (pass != 0)
4773 continue;
4774
4775 switch (OMP_CLAUSE_CODE (c))
4776 {
4777 case OMP_CLAUSE_SHARED:
4778 /* Ignore shared directives in teams construct inside
4779 target construct. */
4780 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4781 && !is_host_teams_ctx (ctx))
4782 continue;
4783 /* Shared global vars are just accessed directly. */
4784 if (is_global_var (new_var))
4785 break;
4786 /* For taskloop firstprivate/lastprivate, represented
4787 as firstprivate and shared clause on the task, new_var
4788 is the firstprivate var. */
4789 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4790 break;
4791 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4792 needs to be delayed until after fixup_child_record_type so
4793 that we get the correct type during the dereference. */
4794 by_ref = use_pointer_for_field (var, ctx);
4795 x = build_receiver_ref (var, by_ref, ctx);
4796 SET_DECL_VALUE_EXPR (new_var, x);
4797 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4798
4799 /* ??? If VAR is not passed by reference, and the variable
4800 hasn't been initialized yet, then we'll get a warning for
4801 the store into the omp_data_s structure. Ideally, we'd be
4802 able to notice this and not store anything at all, but
4803 we're generating code too early. Suppress the warning. */
4804 if (!by_ref)
4805 TREE_NO_WARNING (var) = 1;
4806 break;
4807
4808 case OMP_CLAUSE__CONDTEMP_:
4809 if (is_parallel_ctx (ctx))
4810 {
4811 x = build_receiver_ref (var, false, ctx);
4812 SET_DECL_VALUE_EXPR (new_var, x);
4813 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4814 }
4815 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
4816 {
4817 x = build_zero_cst (TREE_TYPE (var));
4818 goto do_private;
4819 }
4820 break;
4821
4822 case OMP_CLAUSE_LASTPRIVATE:
4823 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4824 break;
4825 /* FALLTHRU */
4826
4827 case OMP_CLAUSE_PRIVATE:
4828 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4829 x = build_outer_var_ref (var, ctx);
4830 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4831 {
4832 if (is_task_ctx (ctx))
4833 x = build_receiver_ref (var, false, ctx);
4834 else
4835 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4836 }
4837 else
4838 x = NULL;
4839 do_private:
4840 tree nx;
4841 nx = lang_hooks.decls.omp_clause_default_ctor
4842 (c, unshare_expr (new_var), x);
4843 if (is_simd)
4844 {
4845 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4846 if ((TREE_ADDRESSABLE (new_var) || nx || y
4847 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4848 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
4849 || omp_is_reference (var))
4850 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4851 ivar, lvar))
4852 {
4853 if (omp_is_reference (var))
4854 {
4855 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4856 tree new_vard = TREE_OPERAND (new_var, 0);
4857 gcc_assert (DECL_P (new_vard));
4858 SET_DECL_VALUE_EXPR (new_vard,
4859 build_fold_addr_expr (lvar));
4860 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4861 }
4862
4863 if (nx)
4864 x = lang_hooks.decls.omp_clause_default_ctor
4865 (c, unshare_expr (ivar), x);
4866 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
4867 {
4868 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
4869 unshare_expr (ivar), x);
4870 nx = x;
4871 }
4872 if (nx && x)
4873 gimplify_and_add (x, &llist[0]);
4874 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4875 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
4876 {
4877 tree v = new_var;
4878 if (!DECL_P (v))
4879 {
4880 gcc_assert (TREE_CODE (v) == MEM_REF);
4881 v = TREE_OPERAND (v, 0);
4882 gcc_assert (DECL_P (v));
4883 }
4884 v = *ctx->lastprivate_conditional_map->get (v);
4885 tree t = create_tmp_var (TREE_TYPE (v));
4886 tree z = build_zero_cst (TREE_TYPE (v));
4887 tree orig_v
4888 = build_outer_var_ref (var, ctx,
4889 OMP_CLAUSE_LASTPRIVATE);
4890 gimple_seq_add_stmt (dlist,
4891 gimple_build_assign (t, z));
4892 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
4893 tree civar = DECL_VALUE_EXPR (v);
4894 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
4895 civar = unshare_expr (civar);
4896 TREE_OPERAND (civar, 1) = sctx.idx;
4897 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
4898 unshare_expr (civar));
4899 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
4900 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
4901 orig_v, unshare_expr (ivar)));
4902 tree cond = build2 (LT_EXPR, boolean_type_node, t,
4903 civar);
4904 x = build3 (COND_EXPR, void_type_node, cond, x,
4905 void_node);
4906 gimple_seq tseq = NULL;
4907 gimplify_and_add (x, &tseq);
4908 if (ctx->outer)
4909 lower_omp (&tseq, ctx->outer);
4910 gimple_seq_add_seq (&llist[1], tseq);
4911 }
4912 if (y)
4913 {
4914 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4915 if (y)
4916 {
4917 gimple_seq tseq = NULL;
4918
4919 dtor = y;
4920 gimplify_stmt (&dtor, &tseq);
4921 gimple_seq_add_seq (&llist[1], tseq);
4922 }
4923 }
4924 break;
4925 }
4926 if (omp_is_reference (var))
4927 {
4928 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4929 tree new_vard = TREE_OPERAND (new_var, 0);
4930 gcc_assert (DECL_P (new_vard));
4931 tree type = TREE_TYPE (TREE_TYPE (new_vard));
4932 x = TYPE_SIZE_UNIT (type);
4933 if (TREE_CONSTANT (x))
4934 {
4935 x = create_tmp_var_raw (type, get_name (var));
4936 gimple_add_tmp_var (x);
4937 TREE_ADDRESSABLE (x) = 1;
4938 x = build_fold_addr_expr_loc (clause_loc, x);
4939 x = fold_convert_loc (clause_loc,
4940 TREE_TYPE (new_vard), x);
4941 gimplify_assign (new_vard, x, ilist);
4942 }
4943 }
4944 }
4945 if (nx)
4946 gimplify_and_add (nx, ilist);
4947 /* FALLTHRU */
4948
4949 do_dtor:
4950 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4951 if (x)
4952 {
4953 gimple_seq tseq = NULL;
4954
4955 dtor = x;
4956 gimplify_stmt (&dtor, &tseq);
4957 gimple_seq_add_seq (dlist, tseq);
4958 }
4959 break;
4960
4961 case OMP_CLAUSE_LINEAR:
4962 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4963 goto do_firstprivate;
4964 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4965 x = NULL;
4966 else
4967 x = build_outer_var_ref (var, ctx);
4968 goto do_private;
4969
4970 case OMP_CLAUSE_FIRSTPRIVATE:
4971 if (is_task_ctx (ctx))
4972 {
4973 if ((omp_is_reference (var)
4974 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4975 || is_variable_sized (var))
4976 goto do_dtor;
4977 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4978 ctx))
4979 || use_pointer_for_field (var, NULL))
4980 {
4981 x = build_receiver_ref (var, false, ctx);
4982 SET_DECL_VALUE_EXPR (new_var, x);
4983 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4984 goto do_dtor;
4985 }
4986 }
4987 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4988 && omp_is_reference (var))
4989 {
4990 x = build_outer_var_ref (var, ctx);
4991 gcc_assert (TREE_CODE (x) == MEM_REF
4992 && integer_zerop (TREE_OPERAND (x, 1)));
4993 x = TREE_OPERAND (x, 0);
4994 x = lang_hooks.decls.omp_clause_copy_ctor
4995 (c, unshare_expr (new_var), x);
4996 gimplify_and_add (x, ilist);
4997 goto do_dtor;
4998 }
4999 do_firstprivate:
5000 x = build_outer_var_ref (var, ctx);
5001 if (is_simd)
5002 {
5003 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5004 && gimple_omp_for_combined_into_p (ctx->stmt))
5005 {
5006 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5007 tree stept = TREE_TYPE (t);
5008 tree ct = omp_find_clause (clauses,
5009 OMP_CLAUSE__LOOPTEMP_);
5010 gcc_assert (ct);
5011 tree l = OMP_CLAUSE_DECL (ct);
5012 tree n1 = fd->loop.n1;
5013 tree step = fd->loop.step;
5014 tree itype = TREE_TYPE (l);
5015 if (POINTER_TYPE_P (itype))
5016 itype = signed_type_for (itype);
5017 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5018 if (TYPE_UNSIGNED (itype)
5019 && fd->loop.cond_code == GT_EXPR)
5020 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5021 fold_build1 (NEGATE_EXPR, itype, l),
5022 fold_build1 (NEGATE_EXPR,
5023 itype, step));
5024 else
5025 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5026 t = fold_build2 (MULT_EXPR, stept,
5027 fold_convert (stept, l), t);
5028
5029 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5030 {
5031 if (omp_is_reference (var))
5032 {
5033 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5034 tree new_vard = TREE_OPERAND (new_var, 0);
5035 gcc_assert (DECL_P (new_vard));
5036 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5037 nx = TYPE_SIZE_UNIT (type);
5038 if (TREE_CONSTANT (nx))
5039 {
5040 nx = create_tmp_var_raw (type,
5041 get_name (var));
5042 gimple_add_tmp_var (nx);
5043 TREE_ADDRESSABLE (nx) = 1;
5044 nx = build_fold_addr_expr_loc (clause_loc,
5045 nx);
5046 nx = fold_convert_loc (clause_loc,
5047 TREE_TYPE (new_vard),
5048 nx);
5049 gimplify_assign (new_vard, nx, ilist);
5050 }
5051 }
5052
5053 x = lang_hooks.decls.omp_clause_linear_ctor
5054 (c, new_var, x, t);
5055 gimplify_and_add (x, ilist);
5056 goto do_dtor;
5057 }
5058
5059 if (POINTER_TYPE_P (TREE_TYPE (x)))
5060 x = fold_build2 (POINTER_PLUS_EXPR,
5061 TREE_TYPE (x), x, t);
5062 else
5063 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5064 }
5065
5066 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5067 || TREE_ADDRESSABLE (new_var)
5068 || omp_is_reference (var))
5069 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5070 ivar, lvar))
5071 {
5072 if (omp_is_reference (var))
5073 {
5074 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5075 tree new_vard = TREE_OPERAND (new_var, 0);
5076 gcc_assert (DECL_P (new_vard));
5077 SET_DECL_VALUE_EXPR (new_vard,
5078 build_fold_addr_expr (lvar));
5079 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5080 }
5081 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5082 {
5083 tree iv = create_tmp_var (TREE_TYPE (new_var));
5084 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5085 gimplify_and_add (x, ilist);
5086 gimple_stmt_iterator gsi
5087 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5088 gassign *g
5089 = gimple_build_assign (unshare_expr (lvar), iv);
5090 gsi_insert_before_without_update (&gsi, g,
5091 GSI_SAME_STMT);
5092 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5093 enum tree_code code = PLUS_EXPR;
5094 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5095 code = POINTER_PLUS_EXPR;
5096 g = gimple_build_assign (iv, code, iv, t);
5097 gsi_insert_before_without_update (&gsi, g,
5098 GSI_SAME_STMT);
5099 break;
5100 }
5101 x = lang_hooks.decls.omp_clause_copy_ctor
5102 (c, unshare_expr (ivar), x);
5103 gimplify_and_add (x, &llist[0]);
5104 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5105 if (x)
5106 {
5107 gimple_seq tseq = NULL;
5108
5109 dtor = x;
5110 gimplify_stmt (&dtor, &tseq);
5111 gimple_seq_add_seq (&llist[1], tseq);
5112 }
5113 break;
5114 }
5115 if (omp_is_reference (var))
5116 {
5117 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5118 tree new_vard = TREE_OPERAND (new_var, 0);
5119 gcc_assert (DECL_P (new_vard));
5120 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5121 nx = TYPE_SIZE_UNIT (type);
5122 if (TREE_CONSTANT (nx))
5123 {
5124 nx = create_tmp_var_raw (type, get_name (var));
5125 gimple_add_tmp_var (nx);
5126 TREE_ADDRESSABLE (nx) = 1;
5127 nx = build_fold_addr_expr_loc (clause_loc, nx);
5128 nx = fold_convert_loc (clause_loc,
5129 TREE_TYPE (new_vard), nx);
5130 gimplify_assign (new_vard, nx, ilist);
5131 }
5132 }
5133 }
5134 x = lang_hooks.decls.omp_clause_copy_ctor
5135 (c, unshare_expr (new_var), x);
5136 gimplify_and_add (x, ilist);
5137 goto do_dtor;
5138
5139 case OMP_CLAUSE__LOOPTEMP_:
5140 case OMP_CLAUSE__REDUCTEMP_:
5141 gcc_assert (is_taskreg_ctx (ctx));
5142 x = build_outer_var_ref (var, ctx);
5143 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5144 gimplify_and_add (x, ilist);
5145 break;
5146
5147 case OMP_CLAUSE_COPYIN:
5148 by_ref = use_pointer_for_field (var, NULL);
5149 x = build_receiver_ref (var, by_ref, ctx);
5150 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5151 append_to_statement_list (x, &copyin_seq);
5152 copyin_by_ref |= by_ref;
5153 break;
5154
5155 case OMP_CLAUSE_REDUCTION:
5156 case OMP_CLAUSE_IN_REDUCTION:
5157 /* OpenACC reductions are initialized using the
5158 GOACC_REDUCTION internal function. */
5159 if (is_gimple_omp_oacc (ctx->stmt))
5160 break;
5161 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5162 {
5163 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5164 gimple *tseq;
5165 tree ptype = TREE_TYPE (placeholder);
5166 if (cond)
5167 {
5168 x = error_mark_node;
5169 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5170 && !task_reduction_needs_orig_p)
5171 x = var;
5172 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5173 {
5174 tree pptype = build_pointer_type (ptype);
5175 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5176 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5177 size_int (task_reduction_cnt_full
5178 + task_reduction_cntorig - 1),
5179 NULL_TREE, NULL_TREE);
5180 else
5181 {
5182 unsigned int idx
5183 = *ctx->task_reduction_map->get (c);
5184 x = task_reduction_read (ilist, tskred_temp,
5185 pptype, 7 + 3 * idx);
5186 }
5187 x = fold_convert (pptype, x);
5188 x = build_simple_mem_ref (x);
5189 }
5190 }
5191 else
5192 {
5193 x = build_outer_var_ref (var, ctx);
5194
5195 if (omp_is_reference (var)
5196 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5197 x = build_fold_addr_expr_loc (clause_loc, x);
5198 }
5199 SET_DECL_VALUE_EXPR (placeholder, x);
5200 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5201 tree new_vard = new_var;
5202 if (omp_is_reference (var))
5203 {
5204 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5205 new_vard = TREE_OPERAND (new_var, 0);
5206 gcc_assert (DECL_P (new_vard));
5207 }
5208 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5209 if (is_simd
5210 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5211 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5212 rvarp = &rvar;
5213 if (is_simd
5214 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5215 ivar, lvar, rvarp,
5216 &rvar2))
5217 {
5218 if (new_vard == new_var)
5219 {
5220 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5221 SET_DECL_VALUE_EXPR (new_var, ivar);
5222 }
5223 else
5224 {
5225 SET_DECL_VALUE_EXPR (new_vard,
5226 build_fold_addr_expr (ivar));
5227 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5228 }
5229 x = lang_hooks.decls.omp_clause_default_ctor
5230 (c, unshare_expr (ivar),
5231 build_outer_var_ref (var, ctx));
5232 if (rvarp)
5233 {
5234 if (x)
5235 {
5236 gimplify_and_add (x, &llist[0]);
5237
5238 tree ivar2 = unshare_expr (lvar);
5239 TREE_OPERAND (ivar2, 1) = sctx.idx;
5240 x = lang_hooks.decls.omp_clause_default_ctor
5241 (c, ivar2, build_outer_var_ref (var, ctx));
5242 gimplify_and_add (x, &llist[0]);
5243
5244 if (rvar2)
5245 {
5246 x = lang_hooks.decls.omp_clause_default_ctor
5247 (c, unshare_expr (rvar2),
5248 build_outer_var_ref (var, ctx));
5249 gimplify_and_add (x, &llist[0]);
5250 }
5251
5252 /* For types that need construction, add another
5253 private var which will be default constructed
5254 and optionally initialized with
5255 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5256 loop we want to assign this value instead of
5257 constructing and destructing it in each
5258 iteration. */
5259 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5260 gimple_add_tmp_var (nv);
5261 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5262 ? rvar2
5263 : ivar, 0),
5264 nv);
5265 x = lang_hooks.decls.omp_clause_default_ctor
5266 (c, nv, build_outer_var_ref (var, ctx));
5267 gimplify_and_add (x, ilist);
5268
5269 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5270 {
5271 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5272 x = DECL_VALUE_EXPR (new_vard);
5273 tree vexpr = nv;
5274 if (new_vard != new_var)
5275 vexpr = build_fold_addr_expr (nv);
5276 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5277 lower_omp (&tseq, ctx);
5278 SET_DECL_VALUE_EXPR (new_vard, x);
5279 gimple_seq_add_seq (ilist, tseq);
5280 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5281 }
5282
5283 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5284 if (x)
5285 {
5286 tseq = NULL;
5287 dtor = x;
5288 gimplify_stmt (&dtor, &tseq);
5289 gimple_seq_add_seq (dlist, tseq);
5290 }
5291 }
5292
5293 tree ref = build_outer_var_ref (var, ctx);
5294 x = unshare_expr (ivar);
5295 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5296 ref);
5297 gimplify_and_add (x, &llist[0]);
5298
5299 ref = build_outer_var_ref (var, ctx);
5300 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5301 rvar);
5302 gimplify_and_add (x, &llist[3]);
5303
5304 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5305 if (new_vard == new_var)
5306 SET_DECL_VALUE_EXPR (new_var, lvar);
5307 else
5308 SET_DECL_VALUE_EXPR (new_vard,
5309 build_fold_addr_expr (lvar));
5310
5311 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5312 if (x)
5313 {
5314 tseq = NULL;
5315 dtor = x;
5316 gimplify_stmt (&dtor, &tseq);
5317 gimple_seq_add_seq (&llist[1], tseq);
5318 }
5319
5320 tree ivar2 = unshare_expr (lvar);
5321 TREE_OPERAND (ivar2, 1) = sctx.idx;
5322 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5323 if (x)
5324 {
5325 tseq = NULL;
5326 dtor = x;
5327 gimplify_stmt (&dtor, &tseq);
5328 gimple_seq_add_seq (&llist[1], tseq);
5329 }
5330
5331 if (rvar2)
5332 {
5333 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5334 if (x)
5335 {
5336 tseq = NULL;
5337 dtor = x;
5338 gimplify_stmt (&dtor, &tseq);
5339 gimple_seq_add_seq (&llist[1], tseq);
5340 }
5341 }
5342 break;
5343 }
5344 if (x)
5345 gimplify_and_add (x, &llist[0]);
5346 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5347 {
5348 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5349 lower_omp (&tseq, ctx);
5350 gimple_seq_add_seq (&llist[0], tseq);
5351 }
5352 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5353 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5354 lower_omp (&tseq, ctx);
5355 gimple_seq_add_seq (&llist[1], tseq);
5356 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5357 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5358 if (new_vard == new_var)
5359 SET_DECL_VALUE_EXPR (new_var, lvar);
5360 else
5361 SET_DECL_VALUE_EXPR (new_vard,
5362 build_fold_addr_expr (lvar));
5363 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5364 if (x)
5365 {
5366 tseq = NULL;
5367 dtor = x;
5368 gimplify_stmt (&dtor, &tseq);
5369 gimple_seq_add_seq (&llist[1], tseq);
5370 }
5371 break;
5372 }
5373 /* If this is a reference to constant size reduction var
5374 with placeholder, we haven't emitted the initializer
5375 for it because it is undesirable if SIMD arrays are used.
5376 But if they aren't used, we need to emit the deferred
5377 initialization now. */
5378 else if (omp_is_reference (var) && is_simd)
5379 handle_simd_reference (clause_loc, new_vard, ilist);
5380
5381 tree lab2 = NULL_TREE;
5382 if (cond)
5383 {
5384 gimple *g;
5385 if (!is_parallel_ctx (ctx))
5386 {
5387 tree condv = create_tmp_var (boolean_type_node);
5388 tree m = build_simple_mem_ref (cond);
5389 g = gimple_build_assign (condv, m);
5390 gimple_seq_add_stmt (ilist, g);
5391 tree lab1
5392 = create_artificial_label (UNKNOWN_LOCATION);
5393 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5394 g = gimple_build_cond (NE_EXPR, condv,
5395 boolean_false_node,
5396 lab2, lab1);
5397 gimple_seq_add_stmt (ilist, g);
5398 gimple_seq_add_stmt (ilist,
5399 gimple_build_label (lab1));
5400 }
5401 g = gimple_build_assign (build_simple_mem_ref (cond),
5402 boolean_true_node);
5403 gimple_seq_add_stmt (ilist, g);
5404 }
5405 x = lang_hooks.decls.omp_clause_default_ctor
5406 (c, unshare_expr (new_var),
5407 cond ? NULL_TREE
5408 : build_outer_var_ref (var, ctx));
5409 if (x)
5410 gimplify_and_add (x, ilist);
5411
5412 if (rvarp)
5413 {
5414 if (x)
5415 {
5416 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5417 gimple_add_tmp_var (nv);
5418 ctx->cb.decl_map->put (new_vard, nv);
5419 x = lang_hooks.decls.omp_clause_default_ctor
5420 (c, nv, build_outer_var_ref (var, ctx));
5421 gimplify_and_add (x, ilist);
5422 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5423 {
5424 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5425 tree vexpr = nv;
5426 if (new_vard != new_var)
5427 vexpr = build_fold_addr_expr (nv);
5428 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5429 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5430 lower_omp (&tseq, ctx);
5431 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5432 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5433 gimple_seq_add_seq (ilist, tseq);
5434 }
5435 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5436 if (!ctx->scan_inclusive)
5437 {
5438 tree nv2
5439 = create_tmp_var_raw (TREE_TYPE (new_var));
5440 gimple_add_tmp_var (nv2);
5441 ctx->cb.decl_map->put (nv, nv2);
5442 x = lang_hooks.decls.omp_clause_default_ctor
5443 (c, nv2, build_outer_var_ref (var, ctx));
5444 gimplify_and_add (x, ilist);
5445 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5446 if (x)
5447 {
5448 tseq = NULL;
5449 dtor = x;
5450 gimplify_stmt (&dtor, &tseq);
5451 gimple_seq_add_seq (dlist, tseq);
5452 }
5453 }
5454 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5455 if (x)
5456 {
5457 tseq = NULL;
5458 dtor = x;
5459 gimplify_stmt (&dtor, &tseq);
5460 gimple_seq_add_seq (dlist, tseq);
5461 }
5462 }
5463 else if (!ctx->scan_inclusive
5464 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5465 {
5466 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5467 gimple_add_tmp_var (nv2);
5468 ctx->cb.decl_map->put (new_vard, nv2);
5469 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5470 if (x)
5471 {
5472 tseq = NULL;
5473 dtor = x;
5474 gimplify_stmt (&dtor, &tseq);
5475 gimple_seq_add_seq (dlist, tseq);
5476 }
5477 }
5478 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5479 goto do_dtor;
5480 }
5481
5482 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5483 {
5484 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5485 lower_omp (&tseq, ctx);
5486 gimple_seq_add_seq (ilist, tseq);
5487 }
5488 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5489 if (is_simd)
5490 {
5491 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5492 lower_omp (&tseq, ctx);
5493 gimple_seq_add_seq (dlist, tseq);
5494 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5495 }
5496 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5497 if (cond)
5498 {
5499 if (lab2)
5500 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5501 break;
5502 }
5503 goto do_dtor;
5504 }
5505 else
5506 {
5507 x = omp_reduction_init (c, TREE_TYPE (new_var));
5508 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5509 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5510
5511 if (cond)
5512 {
5513 gimple *g;
5514 tree lab2 = NULL_TREE;
5515 /* GOMP_taskgroup_reduction_register memsets the whole
5516 array to zero. If the initializer is zero, we don't
5517 need to initialize it again, just mark it as ever
5518 used unconditionally, i.e. cond = true. */
5519 if (initializer_zerop (x))
5520 {
5521 g = gimple_build_assign (build_simple_mem_ref (cond),
5522 boolean_true_node);
5523 gimple_seq_add_stmt (ilist, g);
5524 break;
5525 }
5526
5527 /* Otherwise, emit
5528 if (!cond) { cond = true; new_var = x; } */
5529 if (!is_parallel_ctx (ctx))
5530 {
5531 tree condv = create_tmp_var (boolean_type_node);
5532 tree m = build_simple_mem_ref (cond);
5533 g = gimple_build_assign (condv, m);
5534 gimple_seq_add_stmt (ilist, g);
5535 tree lab1
5536 = create_artificial_label (UNKNOWN_LOCATION);
5537 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5538 g = gimple_build_cond (NE_EXPR, condv,
5539 boolean_false_node,
5540 lab2, lab1);
5541 gimple_seq_add_stmt (ilist, g);
5542 gimple_seq_add_stmt (ilist,
5543 gimple_build_label (lab1));
5544 }
5545 g = gimple_build_assign (build_simple_mem_ref (cond),
5546 boolean_true_node);
5547 gimple_seq_add_stmt (ilist, g);
5548 gimplify_assign (new_var, x, ilist);
5549 if (lab2)
5550 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5551 break;
5552 }
5553
5554 /* reduction(-:var) sums up the partial results, so it
5555 acts identically to reduction(+:var). */
5556 if (code == MINUS_EXPR)
5557 code = PLUS_EXPR;
5558
5559 tree new_vard = new_var;
5560 if (is_simd && omp_is_reference (var))
5561 {
5562 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5563 new_vard = TREE_OPERAND (new_var, 0);
5564 gcc_assert (DECL_P (new_vard));
5565 }
5566 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5567 if (is_simd
5568 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5569 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5570 rvarp = &rvar;
5571 if (is_simd
5572 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5573 ivar, lvar, rvarp,
5574 &rvar2))
5575 {
5576 if (new_vard != new_var)
5577 {
5578 SET_DECL_VALUE_EXPR (new_vard,
5579 build_fold_addr_expr (lvar));
5580 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5581 }
5582
5583 tree ref = build_outer_var_ref (var, ctx);
5584
5585 if (rvarp)
5586 {
5587 gimplify_assign (ivar, ref, &llist[0]);
5588 ref = build_outer_var_ref (var, ctx);
5589 gimplify_assign (ref, rvar, &llist[3]);
5590 break;
5591 }
5592
5593 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5594
5595 if (sctx.is_simt)
5596 {
5597 if (!simt_lane)
5598 simt_lane = create_tmp_var (unsigned_type_node);
5599 x = build_call_expr_internal_loc
5600 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5601 TREE_TYPE (ivar), 2, ivar, simt_lane);
5602 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5603 gimplify_assign (ivar, x, &llist[2]);
5604 }
5605 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5606 ref = build_outer_var_ref (var, ctx);
5607 gimplify_assign (ref, x, &llist[1]);
5608
5609 }
5610 else
5611 {
5612 if (omp_is_reference (var) && is_simd)
5613 handle_simd_reference (clause_loc, new_vard, ilist);
5614 if (rvarp)
5615 break;
5616 gimplify_assign (new_var, x, ilist);
5617 if (is_simd)
5618 {
5619 tree ref = build_outer_var_ref (var, ctx);
5620
5621 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5622 ref = build_outer_var_ref (var, ctx);
5623 gimplify_assign (ref, x, dlist);
5624 }
5625 }
5626 }
5627 break;
5628
5629 default:
5630 gcc_unreachable ();
5631 }
5632 }
5633 }
5634 if (tskred_avar)
5635 {
5636 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5637 TREE_THIS_VOLATILE (clobber) = 1;
5638 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5639 }
5640
5641 if (known_eq (sctx.max_vf, 1U))
5642 {
5643 sctx.is_simt = false;
5644 if (ctx->lastprivate_conditional_map)
5645 {
5646 if (gimple_omp_for_combined_into_p (ctx->stmt))
5647 {
5648 /* Signal to lower_omp_1 that it should use parent context. */
5649 ctx->combined_into_simd_safelen0 = true;
5650 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5651 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5652 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5653 {
5654 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5655 tree *v
5656 = ctx->lastprivate_conditional_map->get (o);
5657 tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx->outer);
5658 tree *pv
5659 = ctx->outer->lastprivate_conditional_map->get (po);
5660 *v = *pv;
5661 }
5662 }
5663 else
5664 {
5665 /* When not vectorized, treat lastprivate(conditional:) like
5666 normal lastprivate, as there will be just one simd lane
5667 writing the privatized variable. */
5668 delete ctx->lastprivate_conditional_map;
5669 ctx->lastprivate_conditional_map = NULL;
5670 }
5671 }
5672 }
5673
5674 if (nonconst_simd_if)
5675 {
5676 if (sctx.lane == NULL_TREE)
5677 {
5678 sctx.idx = create_tmp_var (unsigned_type_node);
5679 sctx.lane = create_tmp_var (unsigned_type_node);
5680 }
5681 /* FIXME: For now. */
5682 sctx.is_simt = false;
5683 }
5684
5685 if (sctx.lane || sctx.is_simt)
5686 {
5687 uid = create_tmp_var (ptr_type_node, "simduid");
5688 /* Don't want uninit warnings on simduid, it is always uninitialized,
5689 but we use it not for the value, but for the DECL_UID only. */
5690 TREE_NO_WARNING (uid) = 1;
5691 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5692 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5693 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5694 gimple_omp_for_set_clauses (ctx->stmt, c);
5695 }
5696 /* Emit calls denoting privatized variables and initializing a pointer to
5697 structure that holds private variables as fields after ompdevlow pass. */
5698 if (sctx.is_simt)
5699 {
5700 sctx.simt_eargs[0] = uid;
5701 gimple *g
5702 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5703 gimple_call_set_lhs (g, uid);
5704 gimple_seq_add_stmt (ilist, g);
5705 sctx.simt_eargs.release ();
5706
5707 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5708 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5709 gimple_call_set_lhs (g, simtrec);
5710 gimple_seq_add_stmt (ilist, g);
5711 }
5712 if (sctx.lane)
5713 {
5714 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5715 2 + (nonconst_simd_if != NULL),
5716 uid, integer_zero_node,
5717 nonconst_simd_if);
5718 gimple_call_set_lhs (g, sctx.lane);
5719 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5720 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5721 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5722 build_int_cst (unsigned_type_node, 0));
5723 gimple_seq_add_stmt (ilist, g);
5724 if (sctx.lastlane)
5725 {
5726 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5727 2, uid, sctx.lane);
5728 gimple_call_set_lhs (g, sctx.lastlane);
5729 gimple_seq_add_stmt (dlist, g);
5730 gimple_seq_add_seq (dlist, llist[3]);
5731 }
5732 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5733 if (llist[2])
5734 {
5735 tree simt_vf = create_tmp_var (unsigned_type_node);
5736 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5737 gimple_call_set_lhs (g, simt_vf);
5738 gimple_seq_add_stmt (dlist, g);
5739
5740 tree t = build_int_cst (unsigned_type_node, 1);
5741 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5742 gimple_seq_add_stmt (dlist, g);
5743
5744 t = build_int_cst (unsigned_type_node, 0);
5745 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5746 gimple_seq_add_stmt (dlist, g);
5747
5748 tree body = create_artificial_label (UNKNOWN_LOCATION);
5749 tree header = create_artificial_label (UNKNOWN_LOCATION);
5750 tree end = create_artificial_label (UNKNOWN_LOCATION);
5751 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5752 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5753
5754 gimple_seq_add_seq (dlist, llist[2]);
5755
5756 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5757 gimple_seq_add_stmt (dlist, g);
5758
5759 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5760 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5761 gimple_seq_add_stmt (dlist, g);
5762
5763 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5764 }
5765 for (int i = 0; i < 2; i++)
5766 if (llist[i])
5767 {
5768 tree vf = create_tmp_var (unsigned_type_node);
5769 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5770 gimple_call_set_lhs (g, vf);
5771 gimple_seq *seq = i == 0 ? ilist : dlist;
5772 gimple_seq_add_stmt (seq, g);
5773 tree t = build_int_cst (unsigned_type_node, 0);
5774 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5775 gimple_seq_add_stmt (seq, g);
5776 tree body = create_artificial_label (UNKNOWN_LOCATION);
5777 tree header = create_artificial_label (UNKNOWN_LOCATION);
5778 tree end = create_artificial_label (UNKNOWN_LOCATION);
5779 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5780 gimple_seq_add_stmt (seq, gimple_build_label (body));
5781 gimple_seq_add_seq (seq, llist[i]);
5782 t = build_int_cst (unsigned_type_node, 1);
5783 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5784 gimple_seq_add_stmt (seq, g);
5785 gimple_seq_add_stmt (seq, gimple_build_label (header));
5786 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5787 gimple_seq_add_stmt (seq, g);
5788 gimple_seq_add_stmt (seq, gimple_build_label (end));
5789 }
5790 }
5791 if (sctx.is_simt)
5792 {
5793 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5794 gimple *g
5795 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5796 gimple_seq_add_stmt (dlist, g);
5797 }
5798
5799 /* The copyin sequence is not to be executed by the main thread, since
5800 that would result in self-copies. Perhaps not visible to scalars,
5801 but it certainly is to C++ operator=. */
5802 if (copyin_seq)
5803 {
5804 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5805 0);
5806 x = build2 (NE_EXPR, boolean_type_node, x,
5807 build_int_cst (TREE_TYPE (x), 0));
5808 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5809 gimplify_and_add (x, ilist);
5810 }
5811
5812 /* If any copyin variable is passed by reference, we must ensure the
5813 master thread doesn't modify it before it is copied over in all
5814 threads. Similarly for variables in both firstprivate and
5815 lastprivate clauses we need to ensure the lastprivate copying
5816 happens after firstprivate copying in all threads. And similarly
5817 for UDRs if initializer expression refers to omp_orig. */
5818 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
5819 {
5820 /* Don't add any barrier for #pragma omp simd or
5821 #pragma omp distribute. */
5822 if (!is_task_ctx (ctx)
5823 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5824 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5825 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5826 }
5827
5828 /* If max_vf is non-zero, then we can use only a vectorization factor
5829 up to the max_vf we chose. So stick it into the safelen clause. */
5830 if (maybe_ne (sctx.max_vf, 0U))
5831 {
5832 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5833 OMP_CLAUSE_SAFELEN);
5834 poly_uint64 safe_len;
5835 if (c == NULL_TREE
5836 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5837 && maybe_gt (safe_len, sctx.max_vf)))
5838 {
5839 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5840 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5841 sctx.max_vf);
5842 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5843 gimple_omp_for_set_clauses (ctx->stmt, c);
5844 }
5845 }
5846 }
5847
5848 /* Create temporary variables for lastprivate(conditional:) implementation
5849 in context CTX with CLAUSES. */
5850
5851 static void
5852 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
5853 {
5854 tree iter_type = NULL_TREE;
5855 tree cond_ptr = NULL_TREE;
5856 tree iter_var = NULL_TREE;
5857 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5858 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
5859 tree next = *clauses;
5860 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
5861 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5862 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5863 {
5864 if (is_simd)
5865 {
5866 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
5867 gcc_assert (cc);
5868 if (iter_type == NULL_TREE)
5869 {
5870 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
5871 iter_var = create_tmp_var_raw (iter_type);
5872 DECL_CONTEXT (iter_var) = current_function_decl;
5873 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5874 DECL_CHAIN (iter_var) = ctx->block_vars;
5875 ctx->block_vars = iter_var;
5876 tree c3
5877 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5878 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5879 OMP_CLAUSE_DECL (c3) = iter_var;
5880 OMP_CLAUSE_CHAIN (c3) = *clauses;
5881 *clauses = c3;
5882 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5883 }
5884 next = OMP_CLAUSE_CHAIN (cc);
5885 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5886 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
5887 ctx->lastprivate_conditional_map->put (o, v);
5888 continue;
5889 }
5890 if (iter_type == NULL)
5891 {
5892 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
5893 {
5894 struct omp_for_data fd;
5895 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
5896 NULL);
5897 iter_type = unsigned_type_for (fd.iter_type);
5898 }
5899 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
5900 iter_type = unsigned_type_node;
5901 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
5902 if (c2)
5903 {
5904 cond_ptr
5905 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
5906 OMP_CLAUSE_DECL (c2) = cond_ptr;
5907 }
5908 else
5909 {
5910 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
5911 DECL_CONTEXT (cond_ptr) = current_function_decl;
5912 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
5913 DECL_CHAIN (cond_ptr) = ctx->block_vars;
5914 ctx->block_vars = cond_ptr;
5915 c2 = build_omp_clause (UNKNOWN_LOCATION,
5916 OMP_CLAUSE__CONDTEMP_);
5917 OMP_CLAUSE_DECL (c2) = cond_ptr;
5918 OMP_CLAUSE_CHAIN (c2) = *clauses;
5919 *clauses = c2;
5920 }
5921 iter_var = create_tmp_var_raw (iter_type);
5922 DECL_CONTEXT (iter_var) = current_function_decl;
5923 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5924 DECL_CHAIN (iter_var) = ctx->block_vars;
5925 ctx->block_vars = iter_var;
5926 tree c3
5927 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5928 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5929 OMP_CLAUSE_DECL (c3) = iter_var;
5930 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
5931 OMP_CLAUSE_CHAIN (c2) = c3;
5932 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5933 }
5934 tree v = create_tmp_var_raw (iter_type);
5935 DECL_CONTEXT (v) = current_function_decl;
5936 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
5937 DECL_CHAIN (v) = ctx->block_vars;
5938 ctx->block_vars = v;
5939 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5940 ctx->lastprivate_conditional_map->put (o, v);
5941 }
5942 }
5943
5944
5945 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5946 both parallel and workshare constructs. PREDICATE may be NULL if it's
5947 always true. BODY_P is the sequence to insert early initialization
5948 if needed, STMT_LIST is where the non-conditional lastprivate handling
5949 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5950 section. */
5951
5952 static void
5953 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
5954 gimple_seq *stmt_list, gimple_seq *cstmt_list,
5955 omp_context *ctx)
5956 {
5957 tree x, c, label = NULL, orig_clauses = clauses;
5958 bool par_clauses = false;
5959 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5960 unsigned HOST_WIDE_INT conditional_off = 0;
5961
5962 /* Early exit if there are no lastprivate or linear clauses. */
5963 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5964 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5965 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5966 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5967 break;
5968 if (clauses == NULL)
5969 {
5970 /* If this was a workshare clause, see if it had been combined
5971 with its parallel. In that case, look for the clauses on the
5972 parallel statement itself. */
5973 if (is_parallel_ctx (ctx))
5974 return;
5975
5976 ctx = ctx->outer;
5977 if (ctx == NULL || !is_parallel_ctx (ctx))
5978 return;
5979
5980 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5981 OMP_CLAUSE_LASTPRIVATE);
5982 if (clauses == NULL)
5983 return;
5984 par_clauses = true;
5985 }
5986
5987 bool maybe_simt = false;
5988 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5989 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5990 {
5991 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5992 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5993 if (simduid)
5994 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5995 }
5996
5997 if (predicate)
5998 {
5999 gcond *stmt;
6000 tree label_true, arm1, arm2;
6001 enum tree_code pred_code = TREE_CODE (predicate);
6002
6003 label = create_artificial_label (UNKNOWN_LOCATION);
6004 label_true = create_artificial_label (UNKNOWN_LOCATION);
6005 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6006 {
6007 arm1 = TREE_OPERAND (predicate, 0);
6008 arm2 = TREE_OPERAND (predicate, 1);
6009 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6010 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6011 }
6012 else
6013 {
6014 arm1 = predicate;
6015 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6016 arm2 = boolean_false_node;
6017 pred_code = NE_EXPR;
6018 }
6019 if (maybe_simt)
6020 {
6021 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6022 c = fold_convert (integer_type_node, c);
6023 simtcond = create_tmp_var (integer_type_node);
6024 gimplify_assign (simtcond, c, stmt_list);
6025 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6026 1, simtcond);
6027 c = create_tmp_var (integer_type_node);
6028 gimple_call_set_lhs (g, c);
6029 gimple_seq_add_stmt (stmt_list, g);
6030 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6031 label_true, label);
6032 }
6033 else
6034 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6035 gimple_seq_add_stmt (stmt_list, stmt);
6036 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6037 }
6038
6039 tree cond_ptr = NULL_TREE;
6040 for (c = clauses; c ;)
6041 {
6042 tree var, new_var;
6043 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6044 gimple_seq *this_stmt_list = stmt_list;
6045 tree lab2 = NULL_TREE;
6046
6047 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6048 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6049 && ctx->lastprivate_conditional_map
6050 && !ctx->combined_into_simd_safelen0)
6051 {
6052 gcc_assert (body_p);
6053 if (simduid)
6054 goto next;
6055 if (cond_ptr == NULL_TREE)
6056 {
6057 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6058 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6059 }
6060 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6061 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6062 tree v = *ctx->lastprivate_conditional_map->get (o);
6063 gimplify_assign (v, build_zero_cst (type), body_p);
6064 this_stmt_list = cstmt_list;
6065 tree mem;
6066 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6067 {
6068 mem = build2 (MEM_REF, type, cond_ptr,
6069 build_int_cst (TREE_TYPE (cond_ptr),
6070 conditional_off));
6071 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6072 }
6073 else
6074 mem = build4 (ARRAY_REF, type, cond_ptr,
6075 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6076 tree mem2 = copy_node (mem);
6077 gimple_seq seq = NULL;
6078 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6079 gimple_seq_add_seq (this_stmt_list, seq);
6080 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6081 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6082 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6083 gimple_seq_add_stmt (this_stmt_list, g);
6084 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6085 gimplify_assign (mem2, v, this_stmt_list);
6086 }
6087
6088 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6089 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6090 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6091 {
6092 var = OMP_CLAUSE_DECL (c);
6093 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6094 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6095 && is_taskloop_ctx (ctx))
6096 {
6097 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6098 new_var = lookup_decl (var, ctx->outer);
6099 }
6100 else
6101 {
6102 new_var = lookup_decl (var, ctx);
6103 /* Avoid uninitialized warnings for lastprivate and
6104 for linear iterators. */
6105 if (predicate
6106 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6107 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6108 TREE_NO_WARNING (new_var) = 1;
6109 }
6110
6111 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6112 {
6113 tree val = DECL_VALUE_EXPR (new_var);
6114 if (TREE_CODE (val) == ARRAY_REF
6115 && VAR_P (TREE_OPERAND (val, 0))
6116 && lookup_attribute ("omp simd array",
6117 DECL_ATTRIBUTES (TREE_OPERAND (val,
6118 0))))
6119 {
6120 if (lastlane == NULL)
6121 {
6122 lastlane = create_tmp_var (unsigned_type_node);
6123 gcall *g
6124 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6125 2, simduid,
6126 TREE_OPERAND (val, 1));
6127 gimple_call_set_lhs (g, lastlane);
6128 gimple_seq_add_stmt (this_stmt_list, g);
6129 }
6130 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6131 TREE_OPERAND (val, 0), lastlane,
6132 NULL_TREE, NULL_TREE);
6133 TREE_THIS_NOTRAP (new_var) = 1;
6134 }
6135 }
6136 else if (maybe_simt)
6137 {
6138 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6139 ? DECL_VALUE_EXPR (new_var)
6140 : new_var);
6141 if (simtlast == NULL)
6142 {
6143 simtlast = create_tmp_var (unsigned_type_node);
6144 gcall *g = gimple_build_call_internal
6145 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6146 gimple_call_set_lhs (g, simtlast);
6147 gimple_seq_add_stmt (this_stmt_list, g);
6148 }
6149 x = build_call_expr_internal_loc
6150 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6151 TREE_TYPE (val), 2, val, simtlast);
6152 new_var = unshare_expr (new_var);
6153 gimplify_assign (new_var, x, this_stmt_list);
6154 new_var = unshare_expr (new_var);
6155 }
6156
6157 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6158 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6159 {
6160 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6161 gimple_seq_add_seq (this_stmt_list,
6162 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6163 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6164 }
6165 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6166 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6167 {
6168 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6169 gimple_seq_add_seq (this_stmt_list,
6170 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6171 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6172 }
6173
6174 x = NULL_TREE;
6175 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6176 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
6177 {
6178 gcc_checking_assert (is_taskloop_ctx (ctx));
6179 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6180 ctx->outer->outer);
6181 if (is_global_var (ovar))
6182 x = ovar;
6183 }
6184 if (!x)
6185 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6186 if (omp_is_reference (var))
6187 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6188 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6189 gimplify_and_add (x, this_stmt_list);
6190
6191 if (lab2)
6192 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6193 }
6194
6195 next:
6196 c = OMP_CLAUSE_CHAIN (c);
6197 if (c == NULL && !par_clauses)
6198 {
6199 /* If this was a workshare clause, see if it had been combined
6200 with its parallel. In that case, continue looking for the
6201 clauses also on the parallel statement itself. */
6202 if (is_parallel_ctx (ctx))
6203 break;
6204
6205 ctx = ctx->outer;
6206 if (ctx == NULL || !is_parallel_ctx (ctx))
6207 break;
6208
6209 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6210 OMP_CLAUSE_LASTPRIVATE);
6211 par_clauses = true;
6212 }
6213 }
6214
6215 if (label)
6216 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6217 }
6218
6219 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6220 (which might be a placeholder). INNER is true if this is an inner
6221 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6222 join markers. Generate the before-loop forking sequence in
6223 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6224 general form of these sequences is
6225
6226 GOACC_REDUCTION_SETUP
6227 GOACC_FORK
6228 GOACC_REDUCTION_INIT
6229 ...
6230 GOACC_REDUCTION_FINI
6231 GOACC_JOIN
6232 GOACC_REDUCTION_TEARDOWN. */
6233
6234 static void
6235 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6236 gcall *fork, gcall *join, gimple_seq *fork_seq,
6237 gimple_seq *join_seq, omp_context *ctx)
6238 {
6239 gimple_seq before_fork = NULL;
6240 gimple_seq after_fork = NULL;
6241 gimple_seq before_join = NULL;
6242 gimple_seq after_join = NULL;
6243 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6244 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6245 unsigned offset = 0;
6246
6247 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6248 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6249 {
6250 tree orig = OMP_CLAUSE_DECL (c);
6251 tree var = maybe_lookup_decl (orig, ctx);
6252 tree ref_to_res = NULL_TREE;
6253 tree incoming, outgoing, v1, v2, v3;
6254 bool is_private = false;
6255
6256 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6257 if (rcode == MINUS_EXPR)
6258 rcode = PLUS_EXPR;
6259 else if (rcode == TRUTH_ANDIF_EXPR)
6260 rcode = BIT_AND_EXPR;
6261 else if (rcode == TRUTH_ORIF_EXPR)
6262 rcode = BIT_IOR_EXPR;
6263 tree op = build_int_cst (unsigned_type_node, rcode);
6264
6265 if (!var)
6266 var = orig;
6267
6268 incoming = outgoing = var;
6269
6270 if (!inner)
6271 {
6272 /* See if an outer construct also reduces this variable. */
6273 omp_context *outer = ctx;
6274
6275 while (omp_context *probe = outer->outer)
6276 {
6277 enum gimple_code type = gimple_code (probe->stmt);
6278 tree cls;
6279
6280 switch (type)
6281 {
6282 case GIMPLE_OMP_FOR:
6283 cls = gimple_omp_for_clauses (probe->stmt);
6284 break;
6285
6286 case GIMPLE_OMP_TARGET:
6287 if (gimple_omp_target_kind (probe->stmt)
6288 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6289 goto do_lookup;
6290
6291 cls = gimple_omp_target_clauses (probe->stmt);
6292 break;
6293
6294 default:
6295 goto do_lookup;
6296 }
6297
6298 outer = probe;
6299 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6300 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6301 && orig == OMP_CLAUSE_DECL (cls))
6302 {
6303 incoming = outgoing = lookup_decl (orig, probe);
6304 goto has_outer_reduction;
6305 }
6306 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6307 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6308 && orig == OMP_CLAUSE_DECL (cls))
6309 {
6310 is_private = true;
6311 goto do_lookup;
6312 }
6313 }
6314
6315 do_lookup:
6316 /* This is the outermost construct with this reduction,
6317 see if there's a mapping for it. */
6318 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6319 && maybe_lookup_field (orig, outer) && !is_private)
6320 {
6321 ref_to_res = build_receiver_ref (orig, false, outer);
6322 if (omp_is_reference (orig))
6323 ref_to_res = build_simple_mem_ref (ref_to_res);
6324
6325 tree type = TREE_TYPE (var);
6326 if (POINTER_TYPE_P (type))
6327 type = TREE_TYPE (type);
6328
6329 outgoing = var;
6330 incoming = omp_reduction_init_op (loc, rcode, type);
6331 }
6332 else
6333 {
6334 /* Try to look at enclosing contexts for reduction var,
6335 use original if no mapping found. */
6336 tree t = NULL_TREE;
6337 omp_context *c = ctx->outer;
6338 while (c && !t)
6339 {
6340 t = maybe_lookup_decl (orig, c);
6341 c = c->outer;
6342 }
6343 incoming = outgoing = (t ? t : orig);
6344 }
6345
6346 has_outer_reduction:;
6347 }
6348
6349 if (!ref_to_res)
6350 ref_to_res = integer_zero_node;
6351
6352 if (omp_is_reference (orig))
6353 {
6354 tree type = TREE_TYPE (var);
6355 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6356
6357 if (!inner)
6358 {
6359 tree x = create_tmp_var (TREE_TYPE (type), id);
6360 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6361 }
6362
6363 v1 = create_tmp_var (type, id);
6364 v2 = create_tmp_var (type, id);
6365 v3 = create_tmp_var (type, id);
6366
6367 gimplify_assign (v1, var, fork_seq);
6368 gimplify_assign (v2, var, fork_seq);
6369 gimplify_assign (v3, var, fork_seq);
6370
6371 var = build_simple_mem_ref (var);
6372 v1 = build_simple_mem_ref (v1);
6373 v2 = build_simple_mem_ref (v2);
6374 v3 = build_simple_mem_ref (v3);
6375 outgoing = build_simple_mem_ref (outgoing);
6376
6377 if (!TREE_CONSTANT (incoming))
6378 incoming = build_simple_mem_ref (incoming);
6379 }
6380 else
6381 v1 = v2 = v3 = var;
6382
6383 /* Determine position in reduction buffer, which may be used
6384 by target. The parser has ensured that this is not a
6385 variable-sized type. */
6386 fixed_size_mode mode
6387 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6388 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6389 offset = (offset + align - 1) & ~(align - 1);
6390 tree off = build_int_cst (sizetype, offset);
6391 offset += GET_MODE_SIZE (mode);
6392
6393 if (!init_code)
6394 {
6395 init_code = build_int_cst (integer_type_node,
6396 IFN_GOACC_REDUCTION_INIT);
6397 fini_code = build_int_cst (integer_type_node,
6398 IFN_GOACC_REDUCTION_FINI);
6399 setup_code = build_int_cst (integer_type_node,
6400 IFN_GOACC_REDUCTION_SETUP);
6401 teardown_code = build_int_cst (integer_type_node,
6402 IFN_GOACC_REDUCTION_TEARDOWN);
6403 }
6404
6405 tree setup_call
6406 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6407 TREE_TYPE (var), 6, setup_code,
6408 unshare_expr (ref_to_res),
6409 incoming, level, op, off);
6410 tree init_call
6411 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6412 TREE_TYPE (var), 6, init_code,
6413 unshare_expr (ref_to_res),
6414 v1, level, op, off);
6415 tree fini_call
6416 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6417 TREE_TYPE (var), 6, fini_code,
6418 unshare_expr (ref_to_res),
6419 v2, level, op, off);
6420 tree teardown_call
6421 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6422 TREE_TYPE (var), 6, teardown_code,
6423 ref_to_res, v3, level, op, off);
6424
6425 gimplify_assign (v1, setup_call, &before_fork);
6426 gimplify_assign (v2, init_call, &after_fork);
6427 gimplify_assign (v3, fini_call, &before_join);
6428 gimplify_assign (outgoing, teardown_call, &after_join);
6429 }
6430
6431 /* Now stitch things together. */
6432 gimple_seq_add_seq (fork_seq, before_fork);
6433 if (fork)
6434 gimple_seq_add_stmt (fork_seq, fork);
6435 gimple_seq_add_seq (fork_seq, after_fork);
6436
6437 gimple_seq_add_seq (join_seq, before_join);
6438 if (join)
6439 gimple_seq_add_stmt (join_seq, join);
6440 gimple_seq_add_seq (join_seq, after_join);
6441 }
6442
6443 /* Generate code to implement the REDUCTION clauses, append it
6444 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6445 that should be emitted also inside of the critical section,
6446 in that case clear *CLIST afterwards, otherwise leave it as is
6447 and let the caller emit it itself. */
6448
6449 static void
6450 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6451 gimple_seq *clist, omp_context *ctx)
6452 {
6453 gimple_seq sub_seq = NULL;
6454 gimple *stmt;
6455 tree x, c;
6456 int count = 0;
6457
6458 /* OpenACC loop reductions are handled elsewhere. */
6459 if (is_gimple_omp_oacc (ctx->stmt))
6460 return;
6461
6462 /* SIMD reductions are handled in lower_rec_input_clauses. */
6463 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6464 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
6465 return;
6466
6467 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6468 update in that case, otherwise use a lock. */
6469 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6470 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6471 && !OMP_CLAUSE_REDUCTION_TASK (c))
6472 {
6473 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6474 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6475 {
6476 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6477 count = -1;
6478 break;
6479 }
6480 count++;
6481 }
6482
6483 if (count == 0)
6484 return;
6485
6486 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6487 {
6488 tree var, ref, new_var, orig_var;
6489 enum tree_code code;
6490 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6491
6492 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6493 || OMP_CLAUSE_REDUCTION_TASK (c))
6494 continue;
6495
6496 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6497 orig_var = var = OMP_CLAUSE_DECL (c);
6498 if (TREE_CODE (var) == MEM_REF)
6499 {
6500 var = TREE_OPERAND (var, 0);
6501 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6502 var = TREE_OPERAND (var, 0);
6503 if (TREE_CODE (var) == ADDR_EXPR)
6504 var = TREE_OPERAND (var, 0);
6505 else
6506 {
6507 /* If this is a pointer or referenced based array
6508 section, the var could be private in the outer
6509 context e.g. on orphaned loop construct. Pretend this
6510 is private variable's outer reference. */
6511 ccode = OMP_CLAUSE_PRIVATE;
6512 if (TREE_CODE (var) == INDIRECT_REF)
6513 var = TREE_OPERAND (var, 0);
6514 }
6515 orig_var = var;
6516 if (is_variable_sized (var))
6517 {
6518 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6519 var = DECL_VALUE_EXPR (var);
6520 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6521 var = TREE_OPERAND (var, 0);
6522 gcc_assert (DECL_P (var));
6523 }
6524 }
6525 new_var = lookup_decl (var, ctx);
6526 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6527 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6528 ref = build_outer_var_ref (var, ctx, ccode);
6529 code = OMP_CLAUSE_REDUCTION_CODE (c);
6530
6531 /* reduction(-:var) sums up the partial results, so it acts
6532 identically to reduction(+:var). */
6533 if (code == MINUS_EXPR)
6534 code = PLUS_EXPR;
6535
6536 if (count == 1)
6537 {
6538 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6539
6540 addr = save_expr (addr);
6541 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6542 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6543 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6544 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6545 gimplify_and_add (x, stmt_seqp);
6546 return;
6547 }
6548 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6549 {
6550 tree d = OMP_CLAUSE_DECL (c);
6551 tree type = TREE_TYPE (d);
6552 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6553 tree i = create_tmp_var (TREE_TYPE (v));
6554 tree ptype = build_pointer_type (TREE_TYPE (type));
6555 tree bias = TREE_OPERAND (d, 1);
6556 d = TREE_OPERAND (d, 0);
6557 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6558 {
6559 tree b = TREE_OPERAND (d, 1);
6560 b = maybe_lookup_decl (b, ctx);
6561 if (b == NULL)
6562 {
6563 b = TREE_OPERAND (d, 1);
6564 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6565 }
6566 if (integer_zerop (bias))
6567 bias = b;
6568 else
6569 {
6570 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6571 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6572 TREE_TYPE (b), b, bias);
6573 }
6574 d = TREE_OPERAND (d, 0);
6575 }
6576 /* For ref build_outer_var_ref already performs this, so
6577 only new_var needs a dereference. */
6578 if (TREE_CODE (d) == INDIRECT_REF)
6579 {
6580 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6581 gcc_assert (omp_is_reference (var) && var == orig_var);
6582 }
6583 else if (TREE_CODE (d) == ADDR_EXPR)
6584 {
6585 if (orig_var == var)
6586 {
6587 new_var = build_fold_addr_expr (new_var);
6588 ref = build_fold_addr_expr (ref);
6589 }
6590 }
6591 else
6592 {
6593 gcc_assert (orig_var == var);
6594 if (omp_is_reference (var))
6595 ref = build_fold_addr_expr (ref);
6596 }
6597 if (DECL_P (v))
6598 {
6599 tree t = maybe_lookup_decl (v, ctx);
6600 if (t)
6601 v = t;
6602 else
6603 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6604 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6605 }
6606 if (!integer_zerop (bias))
6607 {
6608 bias = fold_convert_loc (clause_loc, sizetype, bias);
6609 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6610 TREE_TYPE (new_var), new_var,
6611 unshare_expr (bias));
6612 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6613 TREE_TYPE (ref), ref, bias);
6614 }
6615 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6616 ref = fold_convert_loc (clause_loc, ptype, ref);
6617 tree m = create_tmp_var (ptype);
6618 gimplify_assign (m, new_var, stmt_seqp);
6619 new_var = m;
6620 m = create_tmp_var (ptype);
6621 gimplify_assign (m, ref, stmt_seqp);
6622 ref = m;
6623 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6624 tree body = create_artificial_label (UNKNOWN_LOCATION);
6625 tree end = create_artificial_label (UNKNOWN_LOCATION);
6626 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6627 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6628 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6629 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6630 {
6631 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6632 tree decl_placeholder
6633 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6634 SET_DECL_VALUE_EXPR (placeholder, out);
6635 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6636 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6637 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6638 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6639 gimple_seq_add_seq (&sub_seq,
6640 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6641 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6642 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6643 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6644 }
6645 else
6646 {
6647 x = build2 (code, TREE_TYPE (out), out, priv);
6648 out = unshare_expr (out);
6649 gimplify_assign (out, x, &sub_seq);
6650 }
6651 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6652 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6653 gimple_seq_add_stmt (&sub_seq, g);
6654 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6655 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6656 gimple_seq_add_stmt (&sub_seq, g);
6657 g = gimple_build_assign (i, PLUS_EXPR, i,
6658 build_int_cst (TREE_TYPE (i), 1));
6659 gimple_seq_add_stmt (&sub_seq, g);
6660 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6661 gimple_seq_add_stmt (&sub_seq, g);
6662 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6663 }
6664 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6665 {
6666 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6667
6668 if (omp_is_reference (var)
6669 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6670 TREE_TYPE (ref)))
6671 ref = build_fold_addr_expr_loc (clause_loc, ref);
6672 SET_DECL_VALUE_EXPR (placeholder, ref);
6673 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6674 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6675 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6676 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6677 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6678 }
6679 else
6680 {
6681 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6682 ref = build_outer_var_ref (var, ctx);
6683 gimplify_assign (ref, x, &sub_seq);
6684 }
6685 }
6686
6687 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6688 0);
6689 gimple_seq_add_stmt (stmt_seqp, stmt);
6690
6691 gimple_seq_add_seq (stmt_seqp, sub_seq);
6692
6693 if (clist)
6694 {
6695 gimple_seq_add_seq (stmt_seqp, *clist);
6696 *clist = NULL;
6697 }
6698
6699 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6700 0);
6701 gimple_seq_add_stmt (stmt_seqp, stmt);
6702 }
6703
6704
6705 /* Generate code to implement the COPYPRIVATE clauses. */
6706
6707 static void
6708 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6709 omp_context *ctx)
6710 {
6711 tree c;
6712
6713 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6714 {
6715 tree var, new_var, ref, x;
6716 bool by_ref;
6717 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6718
6719 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6720 continue;
6721
6722 var = OMP_CLAUSE_DECL (c);
6723 by_ref = use_pointer_for_field (var, NULL);
6724
6725 ref = build_sender_ref (var, ctx);
6726 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6727 if (by_ref)
6728 {
6729 x = build_fold_addr_expr_loc (clause_loc, new_var);
6730 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6731 }
6732 gimplify_assign (ref, x, slist);
6733
6734 ref = build_receiver_ref (var, false, ctx);
6735 if (by_ref)
6736 {
6737 ref = fold_convert_loc (clause_loc,
6738 build_pointer_type (TREE_TYPE (new_var)),
6739 ref);
6740 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6741 }
6742 if (omp_is_reference (var))
6743 {
6744 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6745 ref = build_simple_mem_ref_loc (clause_loc, ref);
6746 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6747 }
6748 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6749 gimplify_and_add (x, rlist);
6750 }
6751 }
6752
6753
6754 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6755 and REDUCTION from the sender (aka parent) side. */
6756
6757 static void
6758 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6759 omp_context *ctx)
6760 {
6761 tree c, t;
6762 int ignored_looptemp = 0;
6763 bool is_taskloop = false;
6764
6765 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6766 by GOMP_taskloop. */
6767 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6768 {
6769 ignored_looptemp = 2;
6770 is_taskloop = true;
6771 }
6772
6773 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6774 {
6775 tree val, ref, x, var;
6776 bool by_ref, do_in = false, do_out = false;
6777 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6778
6779 switch (OMP_CLAUSE_CODE (c))
6780 {
6781 case OMP_CLAUSE_PRIVATE:
6782 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6783 break;
6784 continue;
6785 case OMP_CLAUSE_FIRSTPRIVATE:
6786 case OMP_CLAUSE_COPYIN:
6787 case OMP_CLAUSE_LASTPRIVATE:
6788 case OMP_CLAUSE_IN_REDUCTION:
6789 case OMP_CLAUSE__REDUCTEMP_:
6790 break;
6791 case OMP_CLAUSE_REDUCTION:
6792 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6793 continue;
6794 break;
6795 case OMP_CLAUSE_SHARED:
6796 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6797 break;
6798 continue;
6799 case OMP_CLAUSE__LOOPTEMP_:
6800 if (ignored_looptemp)
6801 {
6802 ignored_looptemp--;
6803 continue;
6804 }
6805 break;
6806 default:
6807 continue;
6808 }
6809
6810 val = OMP_CLAUSE_DECL (c);
6811 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6812 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6813 && TREE_CODE (val) == MEM_REF)
6814 {
6815 val = TREE_OPERAND (val, 0);
6816 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6817 val = TREE_OPERAND (val, 0);
6818 if (TREE_CODE (val) == INDIRECT_REF
6819 || TREE_CODE (val) == ADDR_EXPR)
6820 val = TREE_OPERAND (val, 0);
6821 if (is_variable_sized (val))
6822 continue;
6823 }
6824
6825 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6826 outer taskloop region. */
6827 omp_context *ctx_for_o = ctx;
6828 if (is_taskloop
6829 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6830 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6831 ctx_for_o = ctx->outer;
6832
6833 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6834
6835 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6836 && is_global_var (var)
6837 && (val == OMP_CLAUSE_DECL (c)
6838 || !is_task_ctx (ctx)
6839 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6840 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6841 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6842 != POINTER_TYPE)))))
6843 continue;
6844
6845 t = omp_member_access_dummy_var (var);
6846 if (t)
6847 {
6848 var = DECL_VALUE_EXPR (var);
6849 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6850 if (o != t)
6851 var = unshare_and_remap (var, t, o);
6852 else
6853 var = unshare_expr (var);
6854 }
6855
6856 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6857 {
6858 /* Handle taskloop firstprivate/lastprivate, where the
6859 lastprivate on GIMPLE_OMP_TASK is represented as
6860 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6861 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6862 x = omp_build_component_ref (ctx->sender_decl, f);
6863 if (use_pointer_for_field (val, ctx))
6864 var = build_fold_addr_expr (var);
6865 gimplify_assign (x, var, ilist);
6866 DECL_ABSTRACT_ORIGIN (f) = NULL;
6867 continue;
6868 }
6869
6870 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6871 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6872 || val == OMP_CLAUSE_DECL (c))
6873 && is_variable_sized (val))
6874 continue;
6875 by_ref = use_pointer_for_field (val, NULL);
6876
6877 switch (OMP_CLAUSE_CODE (c))
6878 {
6879 case OMP_CLAUSE_FIRSTPRIVATE:
6880 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6881 && !by_ref
6882 && is_task_ctx (ctx))
6883 TREE_NO_WARNING (var) = 1;
6884 do_in = true;
6885 break;
6886
6887 case OMP_CLAUSE_PRIVATE:
6888 case OMP_CLAUSE_COPYIN:
6889 case OMP_CLAUSE__LOOPTEMP_:
6890 case OMP_CLAUSE__REDUCTEMP_:
6891 do_in = true;
6892 break;
6893
6894 case OMP_CLAUSE_LASTPRIVATE:
6895 if (by_ref || omp_is_reference (val))
6896 {
6897 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6898 continue;
6899 do_in = true;
6900 }
6901 else
6902 {
6903 do_out = true;
6904 if (lang_hooks.decls.omp_private_outer_ref (val))
6905 do_in = true;
6906 }
6907 break;
6908
6909 case OMP_CLAUSE_REDUCTION:
6910 case OMP_CLAUSE_IN_REDUCTION:
6911 do_in = true;
6912 if (val == OMP_CLAUSE_DECL (c))
6913 {
6914 if (is_task_ctx (ctx))
6915 by_ref = use_pointer_for_field (val, ctx);
6916 else
6917 do_out = !(by_ref || omp_is_reference (val));
6918 }
6919 else
6920 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6921 break;
6922
6923 default:
6924 gcc_unreachable ();
6925 }
6926
6927 if (do_in)
6928 {
6929 ref = build_sender_ref (val, ctx);
6930 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6931 gimplify_assign (ref, x, ilist);
6932 if (is_task_ctx (ctx))
6933 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6934 }
6935
6936 if (do_out)
6937 {
6938 ref = build_sender_ref (val, ctx);
6939 gimplify_assign (var, ref, olist);
6940 }
6941 }
6942 }
6943
6944 /* Generate code to implement SHARED from the sender (aka parent)
6945 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6946 list things that got automatically shared. */
6947
6948 static void
6949 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6950 {
6951 tree var, ovar, nvar, t, f, x, record_type;
6952
6953 if (ctx->record_type == NULL)
6954 return;
6955
6956 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6957 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6958 {
6959 ovar = DECL_ABSTRACT_ORIGIN (f);
6960 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6961 continue;
6962
6963 nvar = maybe_lookup_decl (ovar, ctx);
6964 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6965 continue;
6966
6967 /* If CTX is a nested parallel directive. Find the immediately
6968 enclosing parallel or workshare construct that contains a
6969 mapping for OVAR. */
6970 var = lookup_decl_in_outer_ctx (ovar, ctx);
6971
6972 t = omp_member_access_dummy_var (var);
6973 if (t)
6974 {
6975 var = DECL_VALUE_EXPR (var);
6976 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6977 if (o != t)
6978 var = unshare_and_remap (var, t, o);
6979 else
6980 var = unshare_expr (var);
6981 }
6982
6983 if (use_pointer_for_field (ovar, ctx))
6984 {
6985 x = build_sender_ref (ovar, ctx);
6986 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
6987 && TREE_TYPE (f) == TREE_TYPE (ovar))
6988 {
6989 gcc_assert (is_parallel_ctx (ctx)
6990 && DECL_ARTIFICIAL (ovar));
6991 /* _condtemp_ clause. */
6992 var = build_constructor (TREE_TYPE (x), NULL);
6993 }
6994 else
6995 var = build_fold_addr_expr (var);
6996 gimplify_assign (x, var, ilist);
6997 }
6998 else
6999 {
7000 x = build_sender_ref (ovar, ctx);
7001 gimplify_assign (x, var, ilist);
7002
7003 if (!TREE_READONLY (var)
7004 /* We don't need to receive a new reference to a result
7005 or parm decl. In fact we may not store to it as we will
7006 invalidate any pending RSO and generate wrong gimple
7007 during inlining. */
7008 && !((TREE_CODE (var) == RESULT_DECL
7009 || TREE_CODE (var) == PARM_DECL)
7010 && DECL_BY_REFERENCE (var)))
7011 {
7012 x = build_sender_ref (ovar, ctx);
7013 gimplify_assign (var, x, olist);
7014 }
7015 }
7016 }
7017 }
7018
7019 /* Emit an OpenACC head marker call, encapulating the partitioning and
7020 other information that must be processed by the target compiler.
7021 Return the maximum number of dimensions the associated loop might
7022 be partitioned over. */
7023
7024 static unsigned
7025 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7026 gimple_seq *seq, omp_context *ctx)
7027 {
7028 unsigned levels = 0;
7029 unsigned tag = 0;
7030 tree gang_static = NULL_TREE;
7031 auto_vec<tree, 5> args;
7032
7033 args.quick_push (build_int_cst
7034 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7035 args.quick_push (ddvar);
7036 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7037 {
7038 switch (OMP_CLAUSE_CODE (c))
7039 {
7040 case OMP_CLAUSE_GANG:
7041 tag |= OLF_DIM_GANG;
7042 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7043 /* static:* is represented by -1, and we can ignore it, as
7044 scheduling is always static. */
7045 if (gang_static && integer_minus_onep (gang_static))
7046 gang_static = NULL_TREE;
7047 levels++;
7048 break;
7049
7050 case OMP_CLAUSE_WORKER:
7051 tag |= OLF_DIM_WORKER;
7052 levels++;
7053 break;
7054
7055 case OMP_CLAUSE_VECTOR:
7056 tag |= OLF_DIM_VECTOR;
7057 levels++;
7058 break;
7059
7060 case OMP_CLAUSE_SEQ:
7061 tag |= OLF_SEQ;
7062 break;
7063
7064 case OMP_CLAUSE_AUTO:
7065 tag |= OLF_AUTO;
7066 break;
7067
7068 case OMP_CLAUSE_INDEPENDENT:
7069 tag |= OLF_INDEPENDENT;
7070 break;
7071
7072 case OMP_CLAUSE_TILE:
7073 tag |= OLF_TILE;
7074 break;
7075
7076 default:
7077 continue;
7078 }
7079 }
7080
7081 if (gang_static)
7082 {
7083 if (DECL_P (gang_static))
7084 gang_static = build_outer_var_ref (gang_static, ctx);
7085 tag |= OLF_GANG_STATIC;
7086 }
7087
7088 /* In a parallel region, loops are implicitly INDEPENDENT. */
7089 omp_context *tgt = enclosing_target_ctx (ctx);
7090 if (!tgt || is_oacc_parallel (tgt))
7091 tag |= OLF_INDEPENDENT;
7092
7093 if (tag & OLF_TILE)
7094 /* Tiling could use all 3 levels. */
7095 levels = 3;
7096 else
7097 {
7098 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7099 Ensure at least one level, or 2 for possible auto
7100 partitioning */
7101 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7102 << OLF_DIM_BASE) | OLF_SEQ));
7103
7104 if (levels < 1u + maybe_auto)
7105 levels = 1u + maybe_auto;
7106 }
7107
7108 args.quick_push (build_int_cst (integer_type_node, levels));
7109 args.quick_push (build_int_cst (integer_type_node, tag));
7110 if (gang_static)
7111 args.quick_push (gang_static);
7112
7113 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7114 gimple_set_location (call, loc);
7115 gimple_set_lhs (call, ddvar);
7116 gimple_seq_add_stmt (seq, call);
7117
7118 return levels;
7119 }
7120
7121 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7122 partitioning level of the enclosed region. */
7123
7124 static void
7125 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7126 tree tofollow, gimple_seq *seq)
7127 {
7128 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7129 : IFN_UNIQUE_OACC_TAIL_MARK);
7130 tree marker = build_int_cst (integer_type_node, marker_kind);
7131 int nargs = 2 + (tofollow != NULL_TREE);
7132 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7133 marker, ddvar, tofollow);
7134 gimple_set_location (call, loc);
7135 gimple_set_lhs (call, ddvar);
7136 gimple_seq_add_stmt (seq, call);
7137 }
7138
7139 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7140 the loop clauses, from which we extract reductions. Initialize
7141 HEAD and TAIL. */
7142
7143 static void
7144 lower_oacc_head_tail (location_t loc, tree clauses,
7145 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7146 {
7147 bool inner = false;
7148 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7149 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7150
7151 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7152 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7153 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7154
7155 gcc_assert (count);
7156 for (unsigned done = 1; count; count--, done++)
7157 {
7158 gimple_seq fork_seq = NULL;
7159 gimple_seq join_seq = NULL;
7160
7161 tree place = build_int_cst (integer_type_node, -1);
7162 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7163 fork_kind, ddvar, place);
7164 gimple_set_location (fork, loc);
7165 gimple_set_lhs (fork, ddvar);
7166
7167 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7168 join_kind, ddvar, place);
7169 gimple_set_location (join, loc);
7170 gimple_set_lhs (join, ddvar);
7171
7172 /* Mark the beginning of this level sequence. */
7173 if (inner)
7174 lower_oacc_loop_marker (loc, ddvar, true,
7175 build_int_cst (integer_type_node, count),
7176 &fork_seq);
7177 lower_oacc_loop_marker (loc, ddvar, false,
7178 build_int_cst (integer_type_node, done),
7179 &join_seq);
7180
7181 lower_oacc_reductions (loc, clauses, place, inner,
7182 fork, join, &fork_seq, &join_seq, ctx);
7183
7184 /* Append this level to head. */
7185 gimple_seq_add_seq (head, fork_seq);
7186 /* Prepend it to tail. */
7187 gimple_seq_add_seq (&join_seq, *tail);
7188 *tail = join_seq;
7189
7190 inner = true;
7191 }
7192
7193 /* Mark the end of the sequence. */
7194 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7195 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7196 }
7197
7198 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7199 catch handler and return it. This prevents programs from violating the
7200 structured block semantics with throws. */
7201
7202 static gimple_seq
7203 maybe_catch_exception (gimple_seq body)
7204 {
7205 gimple *g;
7206 tree decl;
7207
7208 if (!flag_exceptions)
7209 return body;
7210
7211 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7212 decl = lang_hooks.eh_protect_cleanup_actions ();
7213 else
7214 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7215
7216 g = gimple_build_eh_must_not_throw (decl);
7217 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7218 GIMPLE_TRY_CATCH);
7219
7220 return gimple_seq_alloc_with_stmt (g);
7221 }
7222
7223 \f
7224 /* Routines to lower OMP directives into OMP-GIMPLE. */
7225
7226 /* If ctx is a worksharing context inside of a cancellable parallel
7227 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7228 and conditional branch to parallel's cancel_label to handle
7229 cancellation in the implicit barrier. */
7230
7231 static void
7232 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7233 gimple_seq *body)
7234 {
7235 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7236 if (gimple_omp_return_nowait_p (omp_return))
7237 return;
7238 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7239 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7240 && outer->cancellable)
7241 {
7242 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7243 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7244 tree lhs = create_tmp_var (c_bool_type);
7245 gimple_omp_return_set_lhs (omp_return, lhs);
7246 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7247 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7248 fold_convert (c_bool_type,
7249 boolean_false_node),
7250 outer->cancel_label, fallthru_label);
7251 gimple_seq_add_stmt (body, g);
7252 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7253 }
7254 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7255 return;
7256 }
7257
7258 /* Find the first task_reduction or reduction clause or return NULL
7259 if there are none. */
7260
7261 static inline tree
7262 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7263 enum omp_clause_code ccode)
7264 {
7265 while (1)
7266 {
7267 clauses = omp_find_clause (clauses, ccode);
7268 if (clauses == NULL_TREE)
7269 return NULL_TREE;
7270 if (ccode != OMP_CLAUSE_REDUCTION
7271 || code == OMP_TASKLOOP
7272 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7273 return clauses;
7274 clauses = OMP_CLAUSE_CHAIN (clauses);
7275 }
7276 }
7277
7278 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7279 gimple_seq *, gimple_seq *);
7280
7281 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7282 CTX is the enclosing OMP context for the current statement. */
7283
7284 static void
7285 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7286 {
7287 tree block, control;
7288 gimple_stmt_iterator tgsi;
7289 gomp_sections *stmt;
7290 gimple *t;
7291 gbind *new_stmt, *bind;
7292 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7293
7294 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7295
7296 push_gimplify_context ();
7297
7298 dlist = NULL;
7299 ilist = NULL;
7300
7301 tree rclauses
7302 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7303 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7304 tree rtmp = NULL_TREE;
7305 if (rclauses)
7306 {
7307 tree type = build_pointer_type (pointer_sized_int_node);
7308 tree temp = create_tmp_var (type);
7309 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7310 OMP_CLAUSE_DECL (c) = temp;
7311 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7312 gimple_omp_sections_set_clauses (stmt, c);
7313 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7314 gimple_omp_sections_clauses (stmt),
7315 &ilist, &tred_dlist);
7316 rclauses = c;
7317 rtmp = make_ssa_name (type);
7318 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7319 }
7320
7321 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7322 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7323
7324 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7325 &ilist, &dlist, ctx, NULL);
7326
7327 control = create_tmp_var (unsigned_type_node, ".section");
7328 gimple_omp_sections_set_control (stmt, control);
7329
7330 new_body = gimple_omp_body (stmt);
7331 gimple_omp_set_body (stmt, NULL);
7332 tgsi = gsi_start (new_body);
7333 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7334 {
7335 omp_context *sctx;
7336 gimple *sec_start;
7337
7338 sec_start = gsi_stmt (tgsi);
7339 sctx = maybe_lookup_ctx (sec_start);
7340 gcc_assert (sctx);
7341
7342 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7343 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7344 GSI_CONTINUE_LINKING);
7345 gimple_omp_set_body (sec_start, NULL);
7346
7347 if (gsi_one_before_end_p (tgsi))
7348 {
7349 gimple_seq l = NULL;
7350 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7351 &ilist, &l, &clist, ctx);
7352 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7353 gimple_omp_section_set_last (sec_start);
7354 }
7355
7356 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7357 GSI_CONTINUE_LINKING);
7358 }
7359
7360 block = make_node (BLOCK);
7361 bind = gimple_build_bind (NULL, new_body, block);
7362
7363 olist = NULL;
7364 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7365 &clist, ctx);
7366 if (clist)
7367 {
7368 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7369 gcall *g = gimple_build_call (fndecl, 0);
7370 gimple_seq_add_stmt (&olist, g);
7371 gimple_seq_add_seq (&olist, clist);
7372 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7373 g = gimple_build_call (fndecl, 0);
7374 gimple_seq_add_stmt (&olist, g);
7375 }
7376
7377 block = make_node (BLOCK);
7378 new_stmt = gimple_build_bind (NULL, NULL, block);
7379 gsi_replace (gsi_p, new_stmt, true);
7380
7381 pop_gimplify_context (new_stmt);
7382 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7383 BLOCK_VARS (block) = gimple_bind_vars (bind);
7384 if (BLOCK_VARS (block))
7385 TREE_USED (block) = 1;
7386
7387 new_body = NULL;
7388 gimple_seq_add_seq (&new_body, ilist);
7389 gimple_seq_add_stmt (&new_body, stmt);
7390 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7391 gimple_seq_add_stmt (&new_body, bind);
7392
7393 t = gimple_build_omp_continue (control, control);
7394 gimple_seq_add_stmt (&new_body, t);
7395
7396 gimple_seq_add_seq (&new_body, olist);
7397 if (ctx->cancellable)
7398 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7399 gimple_seq_add_seq (&new_body, dlist);
7400
7401 new_body = maybe_catch_exception (new_body);
7402
7403 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7404 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7405 t = gimple_build_omp_return (nowait);
7406 gimple_seq_add_stmt (&new_body, t);
7407 gimple_seq_add_seq (&new_body, tred_dlist);
7408 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7409
7410 if (rclauses)
7411 OMP_CLAUSE_DECL (rclauses) = rtmp;
7412
7413 gimple_bind_set_body (new_stmt, new_body);
7414 }
7415
7416
7417 /* A subroutine of lower_omp_single. Expand the simple form of
7418 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7419
7420 if (GOMP_single_start ())
7421 BODY;
7422 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7423
7424 FIXME. It may be better to delay expanding the logic of this until
7425 pass_expand_omp. The expanded logic may make the job more difficult
7426 to a synchronization analysis pass. */
7427
7428 static void
7429 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7430 {
7431 location_t loc = gimple_location (single_stmt);
7432 tree tlabel = create_artificial_label (loc);
7433 tree flabel = create_artificial_label (loc);
7434 gimple *call, *cond;
7435 tree lhs, decl;
7436
7437 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7438 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7439 call = gimple_build_call (decl, 0);
7440 gimple_call_set_lhs (call, lhs);
7441 gimple_seq_add_stmt (pre_p, call);
7442
7443 cond = gimple_build_cond (EQ_EXPR, lhs,
7444 fold_convert_loc (loc, TREE_TYPE (lhs),
7445 boolean_true_node),
7446 tlabel, flabel);
7447 gimple_seq_add_stmt (pre_p, cond);
7448 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7449 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7450 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7451 }
7452
7453
7454 /* A subroutine of lower_omp_single. Expand the simple form of
7455 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7456
7457 #pragma omp single copyprivate (a, b, c)
7458
7459 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7460
7461 {
7462 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7463 {
7464 BODY;
7465 copyout.a = a;
7466 copyout.b = b;
7467 copyout.c = c;
7468 GOMP_single_copy_end (&copyout);
7469 }
7470 else
7471 {
7472 a = copyout_p->a;
7473 b = copyout_p->b;
7474 c = copyout_p->c;
7475 }
7476 GOMP_barrier ();
7477 }
7478
7479 FIXME. It may be better to delay expanding the logic of this until
7480 pass_expand_omp. The expanded logic may make the job more difficult
7481 to a synchronization analysis pass. */
7482
7483 static void
7484 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7485 omp_context *ctx)
7486 {
7487 tree ptr_type, t, l0, l1, l2, bfn_decl;
7488 gimple_seq copyin_seq;
7489 location_t loc = gimple_location (single_stmt);
7490
7491 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7492
7493 ptr_type = build_pointer_type (ctx->record_type);
7494 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7495
7496 l0 = create_artificial_label (loc);
7497 l1 = create_artificial_label (loc);
7498 l2 = create_artificial_label (loc);
7499
7500 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7501 t = build_call_expr_loc (loc, bfn_decl, 0);
7502 t = fold_convert_loc (loc, ptr_type, t);
7503 gimplify_assign (ctx->receiver_decl, t, pre_p);
7504
7505 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7506 build_int_cst (ptr_type, 0));
7507 t = build3 (COND_EXPR, void_type_node, t,
7508 build_and_jump (&l0), build_and_jump (&l1));
7509 gimplify_and_add (t, pre_p);
7510
7511 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7512
7513 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7514
7515 copyin_seq = NULL;
7516 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7517 &copyin_seq, ctx);
7518
7519 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7520 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7521 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7522 gimplify_and_add (t, pre_p);
7523
7524 t = build_and_jump (&l2);
7525 gimplify_and_add (t, pre_p);
7526
7527 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7528
7529 gimple_seq_add_seq (pre_p, copyin_seq);
7530
7531 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7532 }
7533
7534
7535 /* Expand code for an OpenMP single directive. */
7536
7537 static void
7538 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7539 {
7540 tree block;
7541 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7542 gbind *bind;
7543 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7544
7545 push_gimplify_context ();
7546
7547 block = make_node (BLOCK);
7548 bind = gimple_build_bind (NULL, NULL, block);
7549 gsi_replace (gsi_p, bind, true);
7550 bind_body = NULL;
7551 dlist = NULL;
7552 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7553 &bind_body, &dlist, ctx, NULL);
7554 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7555
7556 gimple_seq_add_stmt (&bind_body, single_stmt);
7557
7558 if (ctx->record_type)
7559 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7560 else
7561 lower_omp_single_simple (single_stmt, &bind_body);
7562
7563 gimple_omp_set_body (single_stmt, NULL);
7564
7565 gimple_seq_add_seq (&bind_body, dlist);
7566
7567 bind_body = maybe_catch_exception (bind_body);
7568
7569 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7570 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7571 gimple *g = gimple_build_omp_return (nowait);
7572 gimple_seq_add_stmt (&bind_body_tail, g);
7573 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7574 if (ctx->record_type)
7575 {
7576 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7577 tree clobber = build_constructor (ctx->record_type, NULL);
7578 TREE_THIS_VOLATILE (clobber) = 1;
7579 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7580 clobber), GSI_SAME_STMT);
7581 }
7582 gimple_seq_add_seq (&bind_body, bind_body_tail);
7583 gimple_bind_set_body (bind, bind_body);
7584
7585 pop_gimplify_context (bind);
7586
7587 gimple_bind_append_vars (bind, ctx->block_vars);
7588 BLOCK_VARS (block) = ctx->block_vars;
7589 if (BLOCK_VARS (block))
7590 TREE_USED (block) = 1;
7591 }
7592
7593
7594 /* Expand code for an OpenMP master directive. */
7595
7596 static void
7597 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7598 {
7599 tree block, lab = NULL, x, bfn_decl;
7600 gimple *stmt = gsi_stmt (*gsi_p);
7601 gbind *bind;
7602 location_t loc = gimple_location (stmt);
7603 gimple_seq tseq;
7604
7605 push_gimplify_context ();
7606
7607 block = make_node (BLOCK);
7608 bind = gimple_build_bind (NULL, NULL, block);
7609 gsi_replace (gsi_p, bind, true);
7610 gimple_bind_add_stmt (bind, stmt);
7611
7612 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7613 x = build_call_expr_loc (loc, bfn_decl, 0);
7614 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7615 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7616 tseq = NULL;
7617 gimplify_and_add (x, &tseq);
7618 gimple_bind_add_seq (bind, tseq);
7619
7620 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7621 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7622 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7623 gimple_omp_set_body (stmt, NULL);
7624
7625 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7626
7627 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7628
7629 pop_gimplify_context (bind);
7630
7631 gimple_bind_append_vars (bind, ctx->block_vars);
7632 BLOCK_VARS (block) = ctx->block_vars;
7633 }
7634
7635 /* Helper function for lower_omp_task_reductions. For a specific PASS
7636 find out the current clause it should be processed, or return false
7637 if all have been processed already. */
7638
7639 static inline bool
7640 omp_task_reduction_iterate (int pass, enum tree_code code,
7641 enum omp_clause_code ccode, tree *c, tree *decl,
7642 tree *type, tree *next)
7643 {
7644 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7645 {
7646 if (ccode == OMP_CLAUSE_REDUCTION
7647 && code != OMP_TASKLOOP
7648 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7649 continue;
7650 *decl = OMP_CLAUSE_DECL (*c);
7651 *type = TREE_TYPE (*decl);
7652 if (TREE_CODE (*decl) == MEM_REF)
7653 {
7654 if (pass != 1)
7655 continue;
7656 }
7657 else
7658 {
7659 if (omp_is_reference (*decl))
7660 *type = TREE_TYPE (*type);
7661 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7662 continue;
7663 }
7664 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7665 return true;
7666 }
7667 *decl = NULL_TREE;
7668 *type = NULL_TREE;
7669 *next = NULL_TREE;
7670 return false;
7671 }
7672
7673 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7674 OMP_TASKGROUP only with task modifier). Register mapping of those in
7675 START sequence and reducing them and unregister them in the END sequence. */
7676
7677 static void
7678 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7679 gimple_seq *start, gimple_seq *end)
7680 {
7681 enum omp_clause_code ccode
7682 = (code == OMP_TASKGROUP
7683 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7684 tree cancellable = NULL_TREE;
7685 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7686 if (clauses == NULL_TREE)
7687 return;
7688 if (code == OMP_FOR || code == OMP_SECTIONS)
7689 {
7690 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7691 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7692 && outer->cancellable)
7693 {
7694 cancellable = error_mark_node;
7695 break;
7696 }
7697 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7698 break;
7699 }
7700 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7701 tree *last = &TYPE_FIELDS (record_type);
7702 unsigned cnt = 0;
7703 if (cancellable)
7704 {
7705 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7706 ptr_type_node);
7707 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7708 integer_type_node);
7709 *last = field;
7710 DECL_CHAIN (field) = ifield;
7711 last = &DECL_CHAIN (ifield);
7712 DECL_CONTEXT (field) = record_type;
7713 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7714 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7715 DECL_CONTEXT (ifield) = record_type;
7716 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7717 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7718 }
7719 for (int pass = 0; pass < 2; pass++)
7720 {
7721 tree decl, type, next;
7722 for (tree c = clauses;
7723 omp_task_reduction_iterate (pass, code, ccode,
7724 &c, &decl, &type, &next); c = next)
7725 {
7726 ++cnt;
7727 tree new_type = type;
7728 if (ctx->outer)
7729 new_type = remap_type (type, &ctx->outer->cb);
7730 tree field
7731 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7732 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7733 new_type);
7734 if (DECL_P (decl) && type == TREE_TYPE (decl))
7735 {
7736 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7737 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7738 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7739 }
7740 else
7741 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7742 DECL_CONTEXT (field) = record_type;
7743 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7744 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7745 *last = field;
7746 last = &DECL_CHAIN (field);
7747 tree bfield
7748 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7749 boolean_type_node);
7750 DECL_CONTEXT (bfield) = record_type;
7751 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7752 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7753 *last = bfield;
7754 last = &DECL_CHAIN (bfield);
7755 }
7756 }
7757 *last = NULL_TREE;
7758 layout_type (record_type);
7759
7760 /* Build up an array which registers with the runtime all the reductions
7761 and deregisters them at the end. Format documented in libgomp/task.c. */
7762 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7763 tree avar = create_tmp_var_raw (atype);
7764 gimple_add_tmp_var (avar);
7765 TREE_ADDRESSABLE (avar) = 1;
7766 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7767 NULL_TREE, NULL_TREE);
7768 tree t = build_int_cst (pointer_sized_int_node, cnt);
7769 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7770 gimple_seq seq = NULL;
7771 tree sz = fold_convert (pointer_sized_int_node,
7772 TYPE_SIZE_UNIT (record_type));
7773 int cachesz = 64;
7774 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7775 build_int_cst (pointer_sized_int_node, cachesz - 1));
7776 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7777 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7778 ctx->task_reductions.create (1 + cnt);
7779 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7780 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7781 ? sz : NULL_TREE);
7782 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7783 gimple_seq_add_seq (start, seq);
7784 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7785 NULL_TREE, NULL_TREE);
7786 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7787 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7788 NULL_TREE, NULL_TREE);
7789 t = build_int_cst (pointer_sized_int_node,
7790 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7791 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7792 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7793 NULL_TREE, NULL_TREE);
7794 t = build_int_cst (pointer_sized_int_node, -1);
7795 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7796 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7797 NULL_TREE, NULL_TREE);
7798 t = build_int_cst (pointer_sized_int_node, 0);
7799 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7800
7801 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7802 and for each task reduction checks a bool right after the private variable
7803 within that thread's chunk; if the bool is clear, it hasn't been
7804 initialized and thus isn't going to be reduced nor destructed, otherwise
7805 reduce and destruct it. */
7806 tree idx = create_tmp_var (size_type_node);
7807 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7808 tree num_thr_sz = create_tmp_var (size_type_node);
7809 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7810 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7811 tree lab3 = NULL_TREE;
7812 gimple *g;
7813 if (code == OMP_FOR || code == OMP_SECTIONS)
7814 {
7815 /* For worksharing constructs, only perform it in the master thread,
7816 with the exception of cancelled implicit barriers - then only handle
7817 the current thread. */
7818 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7819 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7820 tree thr_num = create_tmp_var (integer_type_node);
7821 g = gimple_build_call (t, 0);
7822 gimple_call_set_lhs (g, thr_num);
7823 gimple_seq_add_stmt (end, g);
7824 if (cancellable)
7825 {
7826 tree c;
7827 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7828 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7829 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7830 if (code == OMP_FOR)
7831 c = gimple_omp_for_clauses (ctx->stmt);
7832 else /* if (code == OMP_SECTIONS) */
7833 c = gimple_omp_sections_clauses (ctx->stmt);
7834 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7835 cancellable = c;
7836 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7837 lab5, lab6);
7838 gimple_seq_add_stmt (end, g);
7839 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7840 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7841 gimple_seq_add_stmt (end, g);
7842 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7843 build_one_cst (TREE_TYPE (idx)));
7844 gimple_seq_add_stmt (end, g);
7845 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7846 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7847 }
7848 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7849 gimple_seq_add_stmt (end, g);
7850 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7851 }
7852 if (code != OMP_PARALLEL)
7853 {
7854 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7855 tree num_thr = create_tmp_var (integer_type_node);
7856 g = gimple_build_call (t, 0);
7857 gimple_call_set_lhs (g, num_thr);
7858 gimple_seq_add_stmt (end, g);
7859 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7860 gimple_seq_add_stmt (end, g);
7861 if (cancellable)
7862 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7863 }
7864 else
7865 {
7866 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7867 OMP_CLAUSE__REDUCTEMP_);
7868 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7869 t = fold_convert (size_type_node, t);
7870 gimplify_assign (num_thr_sz, t, end);
7871 }
7872 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7873 NULL_TREE, NULL_TREE);
7874 tree data = create_tmp_var (pointer_sized_int_node);
7875 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7876 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7877 tree ptr;
7878 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7879 ptr = create_tmp_var (build_pointer_type (record_type));
7880 else
7881 ptr = create_tmp_var (ptr_type_node);
7882 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7883
7884 tree field = TYPE_FIELDS (record_type);
7885 cnt = 0;
7886 if (cancellable)
7887 field = DECL_CHAIN (DECL_CHAIN (field));
7888 for (int pass = 0; pass < 2; pass++)
7889 {
7890 tree decl, type, next;
7891 for (tree c = clauses;
7892 omp_task_reduction_iterate (pass, code, ccode,
7893 &c, &decl, &type, &next); c = next)
7894 {
7895 tree var = decl, ref;
7896 if (TREE_CODE (decl) == MEM_REF)
7897 {
7898 var = TREE_OPERAND (var, 0);
7899 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7900 var = TREE_OPERAND (var, 0);
7901 tree v = var;
7902 if (TREE_CODE (var) == ADDR_EXPR)
7903 var = TREE_OPERAND (var, 0);
7904 else if (TREE_CODE (var) == INDIRECT_REF)
7905 var = TREE_OPERAND (var, 0);
7906 tree orig_var = var;
7907 if (is_variable_sized (var))
7908 {
7909 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7910 var = DECL_VALUE_EXPR (var);
7911 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7912 var = TREE_OPERAND (var, 0);
7913 gcc_assert (DECL_P (var));
7914 }
7915 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7916 if (orig_var != var)
7917 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7918 else if (TREE_CODE (v) == ADDR_EXPR)
7919 t = build_fold_addr_expr (t);
7920 else if (TREE_CODE (v) == INDIRECT_REF)
7921 t = build_fold_indirect_ref (t);
7922 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7923 {
7924 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7925 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7926 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7927 }
7928 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7929 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7930 fold_convert (size_type_node,
7931 TREE_OPERAND (decl, 1)));
7932 }
7933 else
7934 {
7935 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7936 if (!omp_is_reference (decl))
7937 t = build_fold_addr_expr (t);
7938 }
7939 t = fold_convert (pointer_sized_int_node, t);
7940 seq = NULL;
7941 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7942 gimple_seq_add_seq (start, seq);
7943 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7944 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7945 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7946 t = unshare_expr (byte_position (field));
7947 t = fold_convert (pointer_sized_int_node, t);
7948 ctx->task_reduction_map->put (c, cnt);
7949 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7950 ? t : NULL_TREE);
7951 seq = NULL;
7952 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7953 gimple_seq_add_seq (start, seq);
7954 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7955 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7956 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7957
7958 tree bfield = DECL_CHAIN (field);
7959 tree cond;
7960 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7961 /* In parallel or worksharing all threads unconditionally
7962 initialize all their task reduction private variables. */
7963 cond = boolean_true_node;
7964 else if (TREE_TYPE (ptr) == ptr_type_node)
7965 {
7966 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7967 unshare_expr (byte_position (bfield)));
7968 seq = NULL;
7969 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7970 gimple_seq_add_seq (end, seq);
7971 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7972 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7973 build_int_cst (pbool, 0));
7974 }
7975 else
7976 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7977 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7978 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7979 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7980 tree condv = create_tmp_var (boolean_type_node);
7981 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7982 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7983 lab3, lab4);
7984 gimple_seq_add_stmt (end, g);
7985 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7986 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7987 {
7988 /* If this reduction doesn't need destruction and parallel
7989 has been cancelled, there is nothing to do for this
7990 reduction, so jump around the merge operation. */
7991 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7992 g = gimple_build_cond (NE_EXPR, cancellable,
7993 build_zero_cst (TREE_TYPE (cancellable)),
7994 lab4, lab5);
7995 gimple_seq_add_stmt (end, g);
7996 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7997 }
7998
7999 tree new_var;
8000 if (TREE_TYPE (ptr) == ptr_type_node)
8001 {
8002 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8003 unshare_expr (byte_position (field)));
8004 seq = NULL;
8005 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8006 gimple_seq_add_seq (end, seq);
8007 tree pbool = build_pointer_type (TREE_TYPE (field));
8008 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8009 build_int_cst (pbool, 0));
8010 }
8011 else
8012 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8013 build_simple_mem_ref (ptr), field, NULL_TREE);
8014
8015 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8016 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8017 ref = build_simple_mem_ref (ref);
8018 /* reduction(-:var) sums up the partial results, so it acts
8019 identically to reduction(+:var). */
8020 if (rcode == MINUS_EXPR)
8021 rcode = PLUS_EXPR;
8022 if (TREE_CODE (decl) == MEM_REF)
8023 {
8024 tree type = TREE_TYPE (new_var);
8025 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8026 tree i = create_tmp_var (TREE_TYPE (v));
8027 tree ptype = build_pointer_type (TREE_TYPE (type));
8028 if (DECL_P (v))
8029 {
8030 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8031 tree vv = create_tmp_var (TREE_TYPE (v));
8032 gimplify_assign (vv, v, start);
8033 v = vv;
8034 }
8035 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8036 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8037 new_var = build_fold_addr_expr (new_var);
8038 new_var = fold_convert (ptype, new_var);
8039 ref = fold_convert (ptype, ref);
8040 tree m = create_tmp_var (ptype);
8041 gimplify_assign (m, new_var, end);
8042 new_var = m;
8043 m = create_tmp_var (ptype);
8044 gimplify_assign (m, ref, end);
8045 ref = m;
8046 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8047 tree body = create_artificial_label (UNKNOWN_LOCATION);
8048 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8049 gimple_seq_add_stmt (end, gimple_build_label (body));
8050 tree priv = build_simple_mem_ref (new_var);
8051 tree out = build_simple_mem_ref (ref);
8052 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8053 {
8054 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8055 tree decl_placeholder
8056 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8057 tree lab6 = NULL_TREE;
8058 if (cancellable)
8059 {
8060 /* If this reduction needs destruction and parallel
8061 has been cancelled, jump around the merge operation
8062 to the destruction. */
8063 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8064 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8065 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8066 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8067 lab6, lab5);
8068 gimple_seq_add_stmt (end, g);
8069 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8070 }
8071 SET_DECL_VALUE_EXPR (placeholder, out);
8072 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8073 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8074 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8075 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8076 gimple_seq_add_seq (end,
8077 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8078 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8079 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8080 {
8081 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8082 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8083 }
8084 if (cancellable)
8085 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8086 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8087 if (x)
8088 {
8089 gimple_seq tseq = NULL;
8090 gimplify_stmt (&x, &tseq);
8091 gimple_seq_add_seq (end, tseq);
8092 }
8093 }
8094 else
8095 {
8096 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8097 out = unshare_expr (out);
8098 gimplify_assign (out, x, end);
8099 }
8100 gimple *g
8101 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8102 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8103 gimple_seq_add_stmt (end, g);
8104 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8105 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8106 gimple_seq_add_stmt (end, g);
8107 g = gimple_build_assign (i, PLUS_EXPR, i,
8108 build_int_cst (TREE_TYPE (i), 1));
8109 gimple_seq_add_stmt (end, g);
8110 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8111 gimple_seq_add_stmt (end, g);
8112 gimple_seq_add_stmt (end, gimple_build_label (endl));
8113 }
8114 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8115 {
8116 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8117 tree oldv = NULL_TREE;
8118 tree lab6 = NULL_TREE;
8119 if (cancellable)
8120 {
8121 /* If this reduction needs destruction and parallel
8122 has been cancelled, jump around the merge operation
8123 to the destruction. */
8124 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8125 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8126 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8127 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8128 lab6, lab5);
8129 gimple_seq_add_stmt (end, g);
8130 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8131 }
8132 if (omp_is_reference (decl)
8133 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8134 TREE_TYPE (ref)))
8135 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8136 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8137 tree refv = create_tmp_var (TREE_TYPE (ref));
8138 gimplify_assign (refv, ref, end);
8139 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8140 SET_DECL_VALUE_EXPR (placeholder, ref);
8141 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8142 tree d = maybe_lookup_decl (decl, ctx);
8143 gcc_assert (d);
8144 if (DECL_HAS_VALUE_EXPR_P (d))
8145 oldv = DECL_VALUE_EXPR (d);
8146 if (omp_is_reference (var))
8147 {
8148 tree v = fold_convert (TREE_TYPE (d),
8149 build_fold_addr_expr (new_var));
8150 SET_DECL_VALUE_EXPR (d, v);
8151 }
8152 else
8153 SET_DECL_VALUE_EXPR (d, new_var);
8154 DECL_HAS_VALUE_EXPR_P (d) = 1;
8155 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8156 if (oldv)
8157 SET_DECL_VALUE_EXPR (d, oldv);
8158 else
8159 {
8160 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8161 DECL_HAS_VALUE_EXPR_P (d) = 0;
8162 }
8163 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8164 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8165 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8166 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8167 if (cancellable)
8168 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8169 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8170 if (x)
8171 {
8172 gimple_seq tseq = NULL;
8173 gimplify_stmt (&x, &tseq);
8174 gimple_seq_add_seq (end, tseq);
8175 }
8176 }
8177 else
8178 {
8179 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8180 ref = unshare_expr (ref);
8181 gimplify_assign (ref, x, end);
8182 }
8183 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8184 ++cnt;
8185 field = DECL_CHAIN (bfield);
8186 }
8187 }
8188
8189 if (code == OMP_TASKGROUP)
8190 {
8191 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8192 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8193 gimple_seq_add_stmt (start, g);
8194 }
8195 else
8196 {
8197 tree c;
8198 if (code == OMP_FOR)
8199 c = gimple_omp_for_clauses (ctx->stmt);
8200 else if (code == OMP_SECTIONS)
8201 c = gimple_omp_sections_clauses (ctx->stmt);
8202 else
8203 c = gimple_omp_taskreg_clauses (ctx->stmt);
8204 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8205 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8206 build_fold_addr_expr (avar));
8207 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8208 }
8209
8210 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8211 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8212 size_one_node));
8213 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8214 gimple_seq_add_stmt (end, g);
8215 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8216 if (code == OMP_FOR || code == OMP_SECTIONS)
8217 {
8218 enum built_in_function bfn
8219 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8220 t = builtin_decl_explicit (bfn);
8221 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8222 tree arg;
8223 if (cancellable)
8224 {
8225 arg = create_tmp_var (c_bool_type);
8226 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8227 cancellable));
8228 }
8229 else
8230 arg = build_int_cst (c_bool_type, 0);
8231 g = gimple_build_call (t, 1, arg);
8232 }
8233 else
8234 {
8235 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8236 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8237 }
8238 gimple_seq_add_stmt (end, g);
8239 t = build_constructor (atype, NULL);
8240 TREE_THIS_VOLATILE (t) = 1;
8241 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8242 }
8243
8244 /* Expand code for an OpenMP taskgroup directive. */
8245
8246 static void
8247 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8248 {
8249 gimple *stmt = gsi_stmt (*gsi_p);
8250 gcall *x;
8251 gbind *bind;
8252 gimple_seq dseq = NULL;
8253 tree block = make_node (BLOCK);
8254
8255 bind = gimple_build_bind (NULL, NULL, block);
8256 gsi_replace (gsi_p, bind, true);
8257 gimple_bind_add_stmt (bind, stmt);
8258
8259 push_gimplify_context ();
8260
8261 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8262 0);
8263 gimple_bind_add_stmt (bind, x);
8264
8265 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8266 gimple_omp_taskgroup_clauses (stmt),
8267 gimple_bind_body_ptr (bind), &dseq);
8268
8269 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8270 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8271 gimple_omp_set_body (stmt, NULL);
8272
8273 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8274 gimple_bind_add_seq (bind, dseq);
8275
8276 pop_gimplify_context (bind);
8277
8278 gimple_bind_append_vars (bind, ctx->block_vars);
8279 BLOCK_VARS (block) = ctx->block_vars;
8280 }
8281
8282
8283 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8284
8285 static void
8286 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8287 omp_context *ctx)
8288 {
8289 struct omp_for_data fd;
8290 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8291 return;
8292
8293 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8294 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8295 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8296 if (!fd.ordered)
8297 return;
8298
8299 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8300 tree c = gimple_omp_ordered_clauses (ord_stmt);
8301 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8302 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8303 {
8304 /* Merge depend clauses from multiple adjacent
8305 #pragma omp ordered depend(sink:...) constructs
8306 into one #pragma omp ordered depend(sink:...), so that
8307 we can optimize them together. */
8308 gimple_stmt_iterator gsi = *gsi_p;
8309 gsi_next (&gsi);
8310 while (!gsi_end_p (gsi))
8311 {
8312 gimple *stmt = gsi_stmt (gsi);
8313 if (is_gimple_debug (stmt)
8314 || gimple_code (stmt) == GIMPLE_NOP)
8315 {
8316 gsi_next (&gsi);
8317 continue;
8318 }
8319 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8320 break;
8321 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8322 c = gimple_omp_ordered_clauses (ord_stmt2);
8323 if (c == NULL_TREE
8324 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8325 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8326 break;
8327 while (*list_p)
8328 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8329 *list_p = c;
8330 gsi_remove (&gsi, true);
8331 }
8332 }
8333
8334 /* Canonicalize sink dependence clauses into one folded clause if
8335 possible.
8336
8337 The basic algorithm is to create a sink vector whose first
8338 element is the GCD of all the first elements, and whose remaining
8339 elements are the minimum of the subsequent columns.
8340
8341 We ignore dependence vectors whose first element is zero because
8342 such dependencies are known to be executed by the same thread.
8343
8344 We take into account the direction of the loop, so a minimum
8345 becomes a maximum if the loop is iterating forwards. We also
8346 ignore sink clauses where the loop direction is unknown, or where
8347 the offsets are clearly invalid because they are not a multiple
8348 of the loop increment.
8349
8350 For example:
8351
8352 #pragma omp for ordered(2)
8353 for (i=0; i < N; ++i)
8354 for (j=0; j < M; ++j)
8355 {
8356 #pragma omp ordered \
8357 depend(sink:i-8,j-2) \
8358 depend(sink:i,j-1) \ // Completely ignored because i+0.
8359 depend(sink:i-4,j-3) \
8360 depend(sink:i-6,j-4)
8361 #pragma omp ordered depend(source)
8362 }
8363
8364 Folded clause is:
8365
8366 depend(sink:-gcd(8,4,6),-min(2,3,4))
8367 -or-
8368 depend(sink:-2,-2)
8369 */
8370
8371 /* FIXME: Computing GCD's where the first element is zero is
8372 non-trivial in the presence of collapsed loops. Do this later. */
8373 if (fd.collapse > 1)
8374 return;
8375
8376 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8377
8378 /* wide_int is not a POD so it must be default-constructed. */
8379 for (unsigned i = 0; i != 2 * len - 1; ++i)
8380 new (static_cast<void*>(folded_deps + i)) wide_int ();
8381
8382 tree folded_dep = NULL_TREE;
8383 /* TRUE if the first dimension's offset is negative. */
8384 bool neg_offset_p = false;
8385
8386 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8387 unsigned int i;
8388 while ((c = *list_p) != NULL)
8389 {
8390 bool remove = false;
8391
8392 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8393 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8394 goto next_ordered_clause;
8395
8396 tree vec;
8397 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8398 vec && TREE_CODE (vec) == TREE_LIST;
8399 vec = TREE_CHAIN (vec), ++i)
8400 {
8401 gcc_assert (i < len);
8402
8403 /* omp_extract_for_data has canonicalized the condition. */
8404 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8405 || fd.loops[i].cond_code == GT_EXPR);
8406 bool forward = fd.loops[i].cond_code == LT_EXPR;
8407 bool maybe_lexically_later = true;
8408
8409 /* While the committee makes up its mind, bail if we have any
8410 non-constant steps. */
8411 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8412 goto lower_omp_ordered_ret;
8413
8414 tree itype = TREE_TYPE (TREE_VALUE (vec));
8415 if (POINTER_TYPE_P (itype))
8416 itype = sizetype;
8417 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8418 TYPE_PRECISION (itype),
8419 TYPE_SIGN (itype));
8420
8421 /* Ignore invalid offsets that are not multiples of the step. */
8422 if (!wi::multiple_of_p (wi::abs (offset),
8423 wi::abs (wi::to_wide (fd.loops[i].step)),
8424 UNSIGNED))
8425 {
8426 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8427 "ignoring sink clause with offset that is not "
8428 "a multiple of the loop step");
8429 remove = true;
8430 goto next_ordered_clause;
8431 }
8432
8433 /* Calculate the first dimension. The first dimension of
8434 the folded dependency vector is the GCD of the first
8435 elements, while ignoring any first elements whose offset
8436 is 0. */
8437 if (i == 0)
8438 {
8439 /* Ignore dependence vectors whose first dimension is 0. */
8440 if (offset == 0)
8441 {
8442 remove = true;
8443 goto next_ordered_clause;
8444 }
8445 else
8446 {
8447 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8448 {
8449 error_at (OMP_CLAUSE_LOCATION (c),
8450 "first offset must be in opposite direction "
8451 "of loop iterations");
8452 goto lower_omp_ordered_ret;
8453 }
8454 if (forward)
8455 offset = -offset;
8456 neg_offset_p = forward;
8457 /* Initialize the first time around. */
8458 if (folded_dep == NULL_TREE)
8459 {
8460 folded_dep = c;
8461 folded_deps[0] = offset;
8462 }
8463 else
8464 folded_deps[0] = wi::gcd (folded_deps[0],
8465 offset, UNSIGNED);
8466 }
8467 }
8468 /* Calculate minimum for the remaining dimensions. */
8469 else
8470 {
8471 folded_deps[len + i - 1] = offset;
8472 if (folded_dep == c)
8473 folded_deps[i] = offset;
8474 else if (maybe_lexically_later
8475 && !wi::eq_p (folded_deps[i], offset))
8476 {
8477 if (forward ^ wi::gts_p (folded_deps[i], offset))
8478 {
8479 unsigned int j;
8480 folded_dep = c;
8481 for (j = 1; j <= i; j++)
8482 folded_deps[j] = folded_deps[len + j - 1];
8483 }
8484 else
8485 maybe_lexically_later = false;
8486 }
8487 }
8488 }
8489 gcc_assert (i == len);
8490
8491 remove = true;
8492
8493 next_ordered_clause:
8494 if (remove)
8495 *list_p = OMP_CLAUSE_CHAIN (c);
8496 else
8497 list_p = &OMP_CLAUSE_CHAIN (c);
8498 }
8499
8500 if (folded_dep)
8501 {
8502 if (neg_offset_p)
8503 folded_deps[0] = -folded_deps[0];
8504
8505 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8506 if (POINTER_TYPE_P (itype))
8507 itype = sizetype;
8508
8509 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8510 = wide_int_to_tree (itype, folded_deps[0]);
8511 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8512 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8513 }
8514
8515 lower_omp_ordered_ret:
8516
8517 /* Ordered without clauses is #pragma omp threads, while we want
8518 a nop instead if we remove all clauses. */
8519 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8520 gsi_replace (gsi_p, gimple_build_nop (), true);
8521 }
8522
8523
8524 /* Expand code for an OpenMP ordered directive. */
8525
8526 static void
8527 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8528 {
8529 tree block;
8530 gimple *stmt = gsi_stmt (*gsi_p), *g;
8531 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8532 gcall *x;
8533 gbind *bind;
8534 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8535 OMP_CLAUSE_SIMD);
8536 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8537 loop. */
8538 bool maybe_simt
8539 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8540 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8541 OMP_CLAUSE_THREADS);
8542
8543 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8544 OMP_CLAUSE_DEPEND))
8545 {
8546 /* FIXME: This is needs to be moved to the expansion to verify various
8547 conditions only testable on cfg with dominators computed, and also
8548 all the depend clauses to be merged still might need to be available
8549 for the runtime checks. */
8550 if (0)
8551 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8552 return;
8553 }
8554
8555 push_gimplify_context ();
8556
8557 block = make_node (BLOCK);
8558 bind = gimple_build_bind (NULL, NULL, block);
8559 gsi_replace (gsi_p, bind, true);
8560 gimple_bind_add_stmt (bind, stmt);
8561
8562 if (simd)
8563 {
8564 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8565 build_int_cst (NULL_TREE, threads));
8566 cfun->has_simduid_loops = true;
8567 }
8568 else
8569 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8570 0);
8571 gimple_bind_add_stmt (bind, x);
8572
8573 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8574 if (maybe_simt)
8575 {
8576 counter = create_tmp_var (integer_type_node);
8577 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8578 gimple_call_set_lhs (g, counter);
8579 gimple_bind_add_stmt (bind, g);
8580
8581 body = create_artificial_label (UNKNOWN_LOCATION);
8582 test = create_artificial_label (UNKNOWN_LOCATION);
8583 gimple_bind_add_stmt (bind, gimple_build_label (body));
8584
8585 tree simt_pred = create_tmp_var (integer_type_node);
8586 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8587 gimple_call_set_lhs (g, simt_pred);
8588 gimple_bind_add_stmt (bind, g);
8589
8590 tree t = create_artificial_label (UNKNOWN_LOCATION);
8591 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8592 gimple_bind_add_stmt (bind, g);
8593
8594 gimple_bind_add_stmt (bind, gimple_build_label (t));
8595 }
8596 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8597 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8598 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8599 gimple_omp_set_body (stmt, NULL);
8600
8601 if (maybe_simt)
8602 {
8603 gimple_bind_add_stmt (bind, gimple_build_label (test));
8604 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8605 gimple_bind_add_stmt (bind, g);
8606
8607 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8608 tree nonneg = create_tmp_var (integer_type_node);
8609 gimple_seq tseq = NULL;
8610 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8611 gimple_bind_add_seq (bind, tseq);
8612
8613 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8614 gimple_call_set_lhs (g, nonneg);
8615 gimple_bind_add_stmt (bind, g);
8616
8617 tree end = create_artificial_label (UNKNOWN_LOCATION);
8618 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8619 gimple_bind_add_stmt (bind, g);
8620
8621 gimple_bind_add_stmt (bind, gimple_build_label (end));
8622 }
8623 if (simd)
8624 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8625 build_int_cst (NULL_TREE, threads));
8626 else
8627 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8628 0);
8629 gimple_bind_add_stmt (bind, x);
8630
8631 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8632
8633 pop_gimplify_context (bind);
8634
8635 gimple_bind_append_vars (bind, ctx->block_vars);
8636 BLOCK_VARS (block) = gimple_bind_vars (bind);
8637 }
8638
8639
8640 /* Expand code for an OpenMP scan directive and the structured block
8641 before the scan directive. */
8642
8643 static void
8644 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8645 {
8646 gimple *stmt = gsi_stmt (*gsi_p);
8647 bool has_clauses
8648 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8649 tree lane = NULL_TREE;
8650 gimple_seq before = NULL;
8651 omp_context *octx = ctx->outer;
8652 gcc_assert (octx);
8653 if (!octx->scan_inclusive && !has_clauses)
8654 {
8655 gimple_stmt_iterator gsi2 = *gsi_p;
8656 gsi_next (&gsi2);
8657 gimple *stmt2 = gsi_stmt (gsi2);
8658 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8659 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8660 the one with exclusive clause(s), comes first. */
8661 if (stmt2
8662 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8663 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8664 {
8665 gsi_remove (gsi_p, false);
8666 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8667 ctx = maybe_lookup_ctx (stmt2);
8668 gcc_assert (ctx);
8669 lower_omp_scan (gsi_p, ctx);
8670 return;
8671 }
8672 }
8673
8674 bool input_phase = has_clauses ^ octx->scan_inclusive;
8675 if (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8676 && (gimple_omp_for_kind (octx->stmt) & GF_OMP_FOR_SIMD)
8677 && !gimple_omp_for_combined_into_p (octx->stmt))
8678 {
8679 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
8680 OMP_CLAUSE__SIMDUID_))
8681 {
8682 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
8683 lane = create_tmp_var (unsigned_type_node);
8684 tree t = build_int_cst (integer_type_node,
8685 input_phase ? 1
8686 : octx->scan_inclusive ? 2 : 3);
8687 gimple *g
8688 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
8689 gimple_call_set_lhs (g, lane);
8690 gimple_seq_add_stmt (&before, g);
8691 }
8692 for (tree c = gimple_omp_for_clauses (octx->stmt);
8693 c; c = OMP_CLAUSE_CHAIN (c))
8694 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8695 && OMP_CLAUSE_REDUCTION_INSCAN (c))
8696 {
8697 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8698 tree var = OMP_CLAUSE_DECL (c);
8699 tree new_var = lookup_decl (var, octx);
8700 tree val = new_var;
8701 tree var2 = NULL_TREE;
8702 tree var3 = NULL_TREE;
8703 tree var4 = NULL_TREE;
8704 tree lane0 = NULL_TREE;
8705 tree new_vard = new_var;
8706 if (omp_is_reference (var))
8707 {
8708 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8709 val = new_var;
8710 }
8711 if (DECL_HAS_VALUE_EXPR_P (new_vard))
8712 {
8713 val = DECL_VALUE_EXPR (new_vard);
8714 if (omp_is_reference (var))
8715 {
8716 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
8717 val = TREE_OPERAND (val, 0);
8718 }
8719 if (TREE_CODE (val) == ARRAY_REF
8720 && VAR_P (TREE_OPERAND (val, 0)))
8721 {
8722 tree v = TREE_OPERAND (val, 0);
8723 if (lookup_attribute ("omp simd array",
8724 DECL_ATTRIBUTES (v)))
8725 {
8726 val = unshare_expr (val);
8727 lane0 = TREE_OPERAND (val, 1);
8728 TREE_OPERAND (val, 1) = lane;
8729 var2 = lookup_decl (v, octx);
8730 if (!octx->scan_inclusive)
8731 var4 = lookup_decl (var2, octx);
8732 if (input_phase
8733 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8734 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
8735 if (!input_phase)
8736 {
8737 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
8738 var2, lane, NULL_TREE, NULL_TREE);
8739 TREE_THIS_NOTRAP (var2) = 1;
8740 if (!octx->scan_inclusive)
8741 {
8742 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
8743 var4, lane, NULL_TREE,
8744 NULL_TREE);
8745 TREE_THIS_NOTRAP (var4) = 1;
8746 }
8747 }
8748 else
8749 var2 = val;
8750 }
8751 }
8752 gcc_assert (var2);
8753 }
8754 else
8755 {
8756 var2 = build_outer_var_ref (var, octx);
8757 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8758 {
8759 var3 = maybe_lookup_decl (new_vard, octx);
8760 if (var3 == new_vard || var3 == NULL_TREE)
8761 var3 = NULL_TREE;
8762 else if (!octx->scan_inclusive && !input_phase)
8763 {
8764 var4 = maybe_lookup_decl (var3, octx);
8765 if (var4 == var3 || var4 == NULL_TREE)
8766 {
8767 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
8768 {
8769 var4 = var3;
8770 var3 = NULL_TREE;
8771 }
8772 else
8773 var4 = NULL_TREE;
8774 }
8775 }
8776 }
8777 if (!octx->scan_inclusive && !input_phase && var4 == NULL_TREE)
8778 var4 = create_tmp_var (TREE_TYPE (val));
8779 }
8780 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8781 {
8782 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8783 if (input_phase)
8784 {
8785 if (var3)
8786 {
8787 /* If we've added a separate identity element
8788 variable, copy it over into val. */
8789 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
8790 var3);
8791 gimplify_and_add (x, &before);
8792 }
8793 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
8794 {
8795 /* Otherwise, assign to it the identity element. */
8796 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
8797 tree ref = build_outer_var_ref (var, octx);
8798 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8799 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8800 if (x)
8801 {
8802 if (omp_is_reference (var))
8803 val = build_fold_addr_expr_loc (clause_loc, val);
8804 SET_DECL_VALUE_EXPR (new_vard, val);
8805 }
8806 SET_DECL_VALUE_EXPR (placeholder, ref);
8807 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8808 lower_omp (&tseq, octx);
8809 if (x)
8810 SET_DECL_VALUE_EXPR (new_vard, x);
8811 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8812 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8813 gimple_seq_add_seq (&before, tseq);
8814 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8815 }
8816 }
8817 else
8818 {
8819 tree x;
8820 if (!octx->scan_inclusive)
8821 {
8822 tree v4 = unshare_expr (var4);
8823 tree v2 = unshare_expr (var2);
8824 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
8825 gimplify_and_add (x, &before);
8826 }
8827 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
8828 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8829 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8830 tree vexpr = val;
8831 if (x && omp_is_reference (var))
8832 vexpr = build_fold_addr_expr_loc (clause_loc, val);
8833 if (x)
8834 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8835 SET_DECL_VALUE_EXPR (placeholder, var2);
8836 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8837 lower_omp (&tseq, octx);
8838 gimple_seq_add_seq (&before, tseq);
8839 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8840 if (x)
8841 SET_DECL_VALUE_EXPR (new_vard, x);
8842 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8843 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8844 if (octx->scan_inclusive)
8845 {
8846 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8847 var2);
8848 gimplify_and_add (x, &before);
8849 }
8850 else if (lane0 == NULL_TREE)
8851 {
8852 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8853 var4);
8854 gimplify_and_add (x, &before);
8855 }
8856 }
8857 }
8858 else
8859 {
8860 if (input_phase)
8861 {
8862 /* input phase. Set val to initializer before
8863 the body. */
8864 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
8865 gimplify_assign (val, x, &before);
8866 }
8867 else
8868 {
8869 /* scan phase. */
8870 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
8871 if (code == MINUS_EXPR)
8872 code = PLUS_EXPR;
8873
8874 tree x = build2 (code, TREE_TYPE (var2),
8875 unshare_expr (var2), unshare_expr (val));
8876 if (octx->scan_inclusive)
8877 {
8878 gimplify_assign (unshare_expr (var2), x, &before);
8879 gimplify_assign (val, var2, &before);
8880 }
8881 else
8882 {
8883 gimplify_assign (unshare_expr (var4),
8884 unshare_expr (var2), &before);
8885 gimplify_assign (var2, x, &before);
8886 if (lane0 == NULL_TREE)
8887 gimplify_assign (val, var4, &before);
8888 }
8889 }
8890 }
8891 if (!octx->scan_inclusive && !input_phase && lane0)
8892 {
8893 tree vexpr = unshare_expr (var4);
8894 TREE_OPERAND (vexpr, 1) = lane0;
8895 if (omp_is_reference (var))
8896 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
8897 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8898 }
8899 }
8900 }
8901 else if (has_clauses)
8902 sorry_at (gimple_location (stmt),
8903 "%<#pragma omp scan%> not supported yet");
8904 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
8905 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
8906 gsi_replace (gsi_p, gimple_build_nop (), true);
8907 }
8908
8909
8910 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8911 substitution of a couple of function calls. But in the NAMED case,
8912 requires that languages coordinate a symbol name. It is therefore
8913 best put here in common code. */
8914
8915 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
8916
8917 static void
8918 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8919 {
8920 tree block;
8921 tree name, lock, unlock;
8922 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8923 gbind *bind;
8924 location_t loc = gimple_location (stmt);
8925 gimple_seq tbody;
8926
8927 name = gimple_omp_critical_name (stmt);
8928 if (name)
8929 {
8930 tree decl;
8931
8932 if (!critical_name_mutexes)
8933 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
8934
8935 tree *n = critical_name_mutexes->get (name);
8936 if (n == NULL)
8937 {
8938 char *new_str;
8939
8940 decl = create_tmp_var_raw (ptr_type_node);
8941
8942 new_str = ACONCAT ((".gomp_critical_user_",
8943 IDENTIFIER_POINTER (name), NULL));
8944 DECL_NAME (decl) = get_identifier (new_str);
8945 TREE_PUBLIC (decl) = 1;
8946 TREE_STATIC (decl) = 1;
8947 DECL_COMMON (decl) = 1;
8948 DECL_ARTIFICIAL (decl) = 1;
8949 DECL_IGNORED_P (decl) = 1;
8950
8951 varpool_node::finalize_decl (decl);
8952
8953 critical_name_mutexes->put (name, decl);
8954 }
8955 else
8956 decl = *n;
8957
8958 /* If '#pragma omp critical' is inside offloaded region or
8959 inside function marked as offloadable, the symbol must be
8960 marked as offloadable too. */
8961 omp_context *octx;
8962 if (cgraph_node::get (current_function_decl)->offloadable)
8963 varpool_node::get_create (decl)->offloadable = 1;
8964 else
8965 for (octx = ctx->outer; octx; octx = octx->outer)
8966 if (is_gimple_omp_offloaded (octx->stmt))
8967 {
8968 varpool_node::get_create (decl)->offloadable = 1;
8969 break;
8970 }
8971
8972 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8973 lock = build_call_expr_loc (loc, lock, 1,
8974 build_fold_addr_expr_loc (loc, decl));
8975
8976 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8977 unlock = build_call_expr_loc (loc, unlock, 1,
8978 build_fold_addr_expr_loc (loc, decl));
8979 }
8980 else
8981 {
8982 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8983 lock = build_call_expr_loc (loc, lock, 0);
8984
8985 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8986 unlock = build_call_expr_loc (loc, unlock, 0);
8987 }
8988
8989 push_gimplify_context ();
8990
8991 block = make_node (BLOCK);
8992 bind = gimple_build_bind (NULL, NULL, block);
8993 gsi_replace (gsi_p, bind, true);
8994 gimple_bind_add_stmt (bind, stmt);
8995
8996 tbody = gimple_bind_body (bind);
8997 gimplify_and_add (lock, &tbody);
8998 gimple_bind_set_body (bind, tbody);
8999
9000 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9001 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9002 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9003 gimple_omp_set_body (stmt, NULL);
9004
9005 tbody = gimple_bind_body (bind);
9006 gimplify_and_add (unlock, &tbody);
9007 gimple_bind_set_body (bind, tbody);
9008
9009 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9010
9011 pop_gimplify_context (bind);
9012 gimple_bind_append_vars (bind, ctx->block_vars);
9013 BLOCK_VARS (block) = gimple_bind_vars (bind);
9014 }
9015
9016 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9017 for a lastprivate clause. Given a loop control predicate of (V
9018 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9019 is appended to *DLIST, iterator initialization is appended to
9020 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9021 to be emitted in a critical section. */
9022
9023 static void
9024 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9025 gimple_seq *dlist, gimple_seq *clist,
9026 struct omp_context *ctx)
9027 {
9028 tree clauses, cond, vinit;
9029 enum tree_code cond_code;
9030 gimple_seq stmts;
9031
9032 cond_code = fd->loop.cond_code;
9033 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9034
9035 /* When possible, use a strict equality expression. This can let VRP
9036 type optimizations deduce the value and remove a copy. */
9037 if (tree_fits_shwi_p (fd->loop.step))
9038 {
9039 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9040 if (step == 1 || step == -1)
9041 cond_code = EQ_EXPR;
9042 }
9043
9044 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9045 || gimple_omp_for_grid_phony (fd->for_stmt))
9046 cond = omp_grid_lastprivate_predicate (fd);
9047 else
9048 {
9049 tree n2 = fd->loop.n2;
9050 if (fd->collapse > 1
9051 && TREE_CODE (n2) != INTEGER_CST
9052 && gimple_omp_for_combined_into_p (fd->for_stmt))
9053 {
9054 struct omp_context *taskreg_ctx = NULL;
9055 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9056 {
9057 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9058 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9059 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9060 {
9061 if (gimple_omp_for_combined_into_p (gfor))
9062 {
9063 gcc_assert (ctx->outer->outer
9064 && is_parallel_ctx (ctx->outer->outer));
9065 taskreg_ctx = ctx->outer->outer;
9066 }
9067 else
9068 {
9069 struct omp_for_data outer_fd;
9070 omp_extract_for_data (gfor, &outer_fd, NULL);
9071 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9072 }
9073 }
9074 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9075 taskreg_ctx = ctx->outer->outer;
9076 }
9077 else if (is_taskreg_ctx (ctx->outer))
9078 taskreg_ctx = ctx->outer;
9079 if (taskreg_ctx)
9080 {
9081 int i;
9082 tree taskreg_clauses
9083 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9084 tree innerc = omp_find_clause (taskreg_clauses,
9085 OMP_CLAUSE__LOOPTEMP_);
9086 gcc_assert (innerc);
9087 for (i = 0; i < fd->collapse; i++)
9088 {
9089 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9090 OMP_CLAUSE__LOOPTEMP_);
9091 gcc_assert (innerc);
9092 }
9093 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9094 OMP_CLAUSE__LOOPTEMP_);
9095 if (innerc)
9096 n2 = fold_convert (TREE_TYPE (n2),
9097 lookup_decl (OMP_CLAUSE_DECL (innerc),
9098 taskreg_ctx));
9099 }
9100 }
9101 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9102 }
9103
9104 clauses = gimple_omp_for_clauses (fd->for_stmt);
9105 stmts = NULL;
9106 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9107 if (!gimple_seq_empty_p (stmts))
9108 {
9109 gimple_seq_add_seq (&stmts, *dlist);
9110 *dlist = stmts;
9111
9112 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9113 vinit = fd->loop.n1;
9114 if (cond_code == EQ_EXPR
9115 && tree_fits_shwi_p (fd->loop.n2)
9116 && ! integer_zerop (fd->loop.n2))
9117 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9118 else
9119 vinit = unshare_expr (vinit);
9120
9121 /* Initialize the iterator variable, so that threads that don't execute
9122 any iterations don't execute the lastprivate clauses by accident. */
9123 gimplify_assign (fd->loop.v, vinit, body_p);
9124 }
9125 }
9126
9127
9128 /* Lower code for an OMP loop directive. */
9129
9130 static void
9131 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9132 {
9133 tree *rhs_p, block;
9134 struct omp_for_data fd, *fdp = NULL;
9135 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
9136 gbind *new_stmt;
9137 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
9138 gimple_seq cnt_list = NULL, clist = NULL;
9139 gimple_seq oacc_head = NULL, oacc_tail = NULL;
9140 size_t i;
9141
9142 push_gimplify_context ();
9143
9144 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
9145
9146 block = make_node (BLOCK);
9147 new_stmt = gimple_build_bind (NULL, NULL, block);
9148 /* Replace at gsi right away, so that 'stmt' is no member
9149 of a sequence anymore as we're going to add to a different
9150 one below. */
9151 gsi_replace (gsi_p, new_stmt, true);
9152
9153 /* Move declaration of temporaries in the loop body before we make
9154 it go away. */
9155 omp_for_body = gimple_omp_body (stmt);
9156 if (!gimple_seq_empty_p (omp_for_body)
9157 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
9158 {
9159 gbind *inner_bind
9160 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
9161 tree vars = gimple_bind_vars (inner_bind);
9162 gimple_bind_append_vars (new_stmt, vars);
9163 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
9164 keep them on the inner_bind and it's block. */
9165 gimple_bind_set_vars (inner_bind, NULL_TREE);
9166 if (gimple_bind_block (inner_bind))
9167 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
9168 }
9169
9170 if (gimple_omp_for_combined_into_p (stmt))
9171 {
9172 omp_extract_for_data (stmt, &fd, NULL);
9173 fdp = &fd;
9174
9175 /* We need two temporaries with fd.loop.v type (istart/iend)
9176 and then (fd.collapse - 1) temporaries with the same
9177 type for count2 ... countN-1 vars if not constant. */
9178 size_t count = 2;
9179 tree type = fd.iter_type;
9180 if (fd.collapse > 1
9181 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
9182 count += fd.collapse - 1;
9183 bool taskreg_for
9184 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
9185 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
9186 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
9187 tree simtc = NULL;
9188 tree clauses = *pc;
9189 if (taskreg_for)
9190 outerc
9191 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
9192 OMP_CLAUSE__LOOPTEMP_);
9193 if (ctx->simt_stmt)
9194 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
9195 OMP_CLAUSE__LOOPTEMP_);
9196 for (i = 0; i < count; i++)
9197 {
9198 tree temp;
9199 if (taskreg_for)
9200 {
9201 gcc_assert (outerc);
9202 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
9203 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
9204 OMP_CLAUSE__LOOPTEMP_);
9205 }
9206 else
9207 {
9208 /* If there are 2 adjacent SIMD stmts, one with _simt_
9209 clause, another without, make sure they have the same
9210 decls in _looptemp_ clauses, because the outer stmt
9211 they are combined into will look up just one inner_stmt. */
9212 if (ctx->simt_stmt)
9213 temp = OMP_CLAUSE_DECL (simtc);
9214 else
9215 temp = create_tmp_var (type);
9216 insert_decl_map (&ctx->outer->cb, temp, temp);
9217 }
9218 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
9219 OMP_CLAUSE_DECL (*pc) = temp;
9220 pc = &OMP_CLAUSE_CHAIN (*pc);
9221 if (ctx->simt_stmt)
9222 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
9223 OMP_CLAUSE__LOOPTEMP_);
9224 }
9225 *pc = clauses;
9226 }
9227
9228 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
9229 dlist = NULL;
9230 body = NULL;
9231 tree rclauses
9232 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
9233 OMP_CLAUSE_REDUCTION);
9234 tree rtmp = NULL_TREE;
9235 if (rclauses)
9236 {
9237 tree type = build_pointer_type (pointer_sized_int_node);
9238 tree temp = create_tmp_var (type);
9239 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
9240 OMP_CLAUSE_DECL (c) = temp;
9241 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
9242 gimple_omp_for_set_clauses (stmt, c);
9243 lower_omp_task_reductions (ctx, OMP_FOR,
9244 gimple_omp_for_clauses (stmt),
9245 &tred_ilist, &tred_dlist);
9246 rclauses = c;
9247 rtmp = make_ssa_name (type);
9248 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
9249 }
9250
9251 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
9252 ctx);
9253
9254 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
9255 fdp);
9256 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
9257 gimple_omp_for_pre_body (stmt));
9258
9259 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9260
9261 /* Lower the header expressions. At this point, we can assume that
9262 the header is of the form:
9263
9264 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
9265
9266 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
9267 using the .omp_data_s mapping, if needed. */
9268 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
9269 {
9270 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
9271 if (!is_gimple_min_invariant (*rhs_p))
9272 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9273 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9274 recompute_tree_invariant_for_addr_expr (*rhs_p);
9275
9276 rhs_p = gimple_omp_for_final_ptr (stmt, i);
9277 if (!is_gimple_min_invariant (*rhs_p))
9278 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9279 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9280 recompute_tree_invariant_for_addr_expr (*rhs_p);
9281
9282 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
9283 if (!is_gimple_min_invariant (*rhs_p))
9284 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9285 }
9286 if (rclauses)
9287 gimple_seq_add_seq (&tred_ilist, cnt_list);
9288 else
9289 gimple_seq_add_seq (&body, cnt_list);
9290
9291 /* Once lowered, extract the bounds and clauses. */
9292 omp_extract_for_data (stmt, &fd, NULL);
9293
9294 if (is_gimple_omp_oacc (ctx->stmt)
9295 && !ctx_in_oacc_kernels_region (ctx))
9296 lower_oacc_head_tail (gimple_location (stmt),
9297 gimple_omp_for_clauses (stmt),
9298 &oacc_head, &oacc_tail, ctx);
9299
9300 /* Add OpenACC partitioning and reduction markers just before the loop. */
9301 if (oacc_head)
9302 gimple_seq_add_seq (&body, oacc_head);
9303
9304 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
9305
9306 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
9307 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9308 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9309 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9310 {
9311 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
9312 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
9313 OMP_CLAUSE_LINEAR_STEP (c)
9314 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
9315 ctx);
9316 }
9317
9318 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
9319 && gimple_omp_for_grid_phony (stmt));
9320 if (!phony_loop)
9321 gimple_seq_add_stmt (&body, stmt);
9322 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
9323
9324 if (!phony_loop)
9325 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
9326 fd.loop.v));
9327
9328 /* After the loop, add exit clauses. */
9329 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
9330
9331 if (clist)
9332 {
9333 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
9334 gcall *g = gimple_build_call (fndecl, 0);
9335 gimple_seq_add_stmt (&body, g);
9336 gimple_seq_add_seq (&body, clist);
9337 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
9338 g = gimple_build_call (fndecl, 0);
9339 gimple_seq_add_stmt (&body, g);
9340 }
9341
9342 if (ctx->cancellable)
9343 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
9344
9345 gimple_seq_add_seq (&body, dlist);
9346
9347 if (rclauses)
9348 {
9349 gimple_seq_add_seq (&tred_ilist, body);
9350 body = tred_ilist;
9351 }
9352
9353 body = maybe_catch_exception (body);
9354
9355 if (!phony_loop)
9356 {
9357 /* Region exit marker goes at the end of the loop body. */
9358 gimple *g = gimple_build_omp_return (fd.have_nowait);
9359 gimple_seq_add_stmt (&body, g);
9360
9361 gimple_seq_add_seq (&body, tred_dlist);
9362
9363 maybe_add_implicit_barrier_cancel (ctx, g, &body);
9364
9365 if (rclauses)
9366 OMP_CLAUSE_DECL (rclauses) = rtmp;
9367 }
9368
9369 /* Add OpenACC joining and reduction markers just after the loop. */
9370 if (oacc_tail)
9371 gimple_seq_add_seq (&body, oacc_tail);
9372
9373 pop_gimplify_context (new_stmt);
9374
9375 gimple_bind_append_vars (new_stmt, ctx->block_vars);
9376 maybe_remove_omp_member_access_dummy_vars (new_stmt);
9377 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
9378 if (BLOCK_VARS (block))
9379 TREE_USED (block) = 1;
9380
9381 gimple_bind_set_body (new_stmt, body);
9382 gimple_omp_set_body (stmt, NULL);
9383 gimple_omp_for_set_pre_body (stmt, NULL);
9384 }
9385
9386 /* Callback for walk_stmts. Check if the current statement only contains
9387 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
9388
9389 static tree
9390 check_combined_parallel (gimple_stmt_iterator *gsi_p,
9391 bool *handled_ops_p,
9392 struct walk_stmt_info *wi)
9393 {
9394 int *info = (int *) wi->info;
9395 gimple *stmt = gsi_stmt (*gsi_p);
9396
9397 *handled_ops_p = true;
9398 switch (gimple_code (stmt))
9399 {
9400 WALK_SUBSTMTS;
9401
9402 case GIMPLE_DEBUG:
9403 break;
9404 case GIMPLE_OMP_FOR:
9405 case GIMPLE_OMP_SECTIONS:
9406 *info = *info == 0 ? 1 : -1;
9407 break;
9408 default:
9409 *info = -1;
9410 break;
9411 }
9412 return NULL;
9413 }
9414
9415 struct omp_taskcopy_context
9416 {
9417 /* This field must be at the beginning, as we do "inheritance": Some
9418 callback functions for tree-inline.c (e.g., omp_copy_decl)
9419 receive a copy_body_data pointer that is up-casted to an
9420 omp_context pointer. */
9421 copy_body_data cb;
9422 omp_context *ctx;
9423 };
9424
9425 static tree
9426 task_copyfn_copy_decl (tree var, copy_body_data *cb)
9427 {
9428 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9429
9430 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
9431 return create_tmp_var (TREE_TYPE (var));
9432
9433 return var;
9434 }
9435
9436 static tree
9437 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9438 {
9439 tree name, new_fields = NULL, type, f;
9440
9441 type = lang_hooks.types.make_type (RECORD_TYPE);
9442 name = DECL_NAME (TYPE_NAME (orig_type));
9443 name = build_decl (gimple_location (tcctx->ctx->stmt),
9444 TYPE_DECL, name, type);
9445 TYPE_NAME (type) = name;
9446
9447 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9448 {
9449 tree new_f = copy_node (f);
9450 DECL_CONTEXT (new_f) = type;
9451 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
9452 TREE_CHAIN (new_f) = new_fields;
9453 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9454 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9455 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
9456 &tcctx->cb, NULL);
9457 new_fields = new_f;
9458 tcctx->cb.decl_map->put (f, new_f);
9459 }
9460 TYPE_FIELDS (type) = nreverse (new_fields);
9461 layout_type (type);
9462 return type;
9463 }
9464
9465 /* Create task copyfn. */
9466
9467 static void
9468 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
9469 {
9470 struct function *child_cfun;
9471 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
9472 tree record_type, srecord_type, bind, list;
9473 bool record_needs_remap = false, srecord_needs_remap = false;
9474 splay_tree_node n;
9475 struct omp_taskcopy_context tcctx;
9476 location_t loc = gimple_location (task_stmt);
9477 size_t looptempno = 0;
9478
9479 child_fn = gimple_omp_task_copy_fn (task_stmt);
9480 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
9481 gcc_assert (child_cfun->cfg == NULL);
9482 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9483
9484 /* Reset DECL_CONTEXT on function arguments. */
9485 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
9486 DECL_CONTEXT (t) = child_fn;
9487
9488 /* Populate the function. */
9489 push_gimplify_context ();
9490 push_cfun (child_cfun);
9491
9492 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
9493 TREE_SIDE_EFFECTS (bind) = 1;
9494 list = NULL;
9495 DECL_SAVED_TREE (child_fn) = bind;
9496 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
9497
9498 /* Remap src and dst argument types if needed. */
9499 record_type = ctx->record_type;
9500 srecord_type = ctx->srecord_type;
9501 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
9502 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9503 {
9504 record_needs_remap = true;
9505 break;
9506 }
9507 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
9508 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9509 {
9510 srecord_needs_remap = true;
9511 break;
9512 }
9513
9514 if (record_needs_remap || srecord_needs_remap)
9515 {
9516 memset (&tcctx, '\0', sizeof (tcctx));
9517 tcctx.cb.src_fn = ctx->cb.src_fn;
9518 tcctx.cb.dst_fn = child_fn;
9519 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
9520 gcc_checking_assert (tcctx.cb.src_node);
9521 tcctx.cb.dst_node = tcctx.cb.src_node;
9522 tcctx.cb.src_cfun = ctx->cb.src_cfun;
9523 tcctx.cb.copy_decl = task_copyfn_copy_decl;
9524 tcctx.cb.eh_lp_nr = 0;
9525 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
9526 tcctx.cb.decl_map = new hash_map<tree, tree>;
9527 tcctx.ctx = ctx;
9528
9529 if (record_needs_remap)
9530 record_type = task_copyfn_remap_type (&tcctx, record_type);
9531 if (srecord_needs_remap)
9532 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9533 }
9534 else
9535 tcctx.cb.decl_map = NULL;
9536
9537 arg = DECL_ARGUMENTS (child_fn);
9538 TREE_TYPE (arg) = build_pointer_type (record_type);
9539 sarg = DECL_CHAIN (arg);
9540 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9541
9542 /* First pass: initialize temporaries used in record_type and srecord_type
9543 sizes and field offsets. */
9544 if (tcctx.cb.decl_map)
9545 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9546 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9547 {
9548 tree *p;
9549
9550 decl = OMP_CLAUSE_DECL (c);
9551 p = tcctx.cb.decl_map->get (decl);
9552 if (p == NULL)
9553 continue;
9554 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9555 sf = (tree) n->value;
9556 sf = *tcctx.cb.decl_map->get (sf);
9557 src = build_simple_mem_ref_loc (loc, sarg);
9558 src = omp_build_component_ref (src, sf);
9559 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
9560 append_to_statement_list (t, &list);
9561 }
9562
9563 /* Second pass: copy shared var pointers and copy construct non-VLA
9564 firstprivate vars. */
9565 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9566 switch (OMP_CLAUSE_CODE (c))
9567 {
9568 splay_tree_key key;
9569 case OMP_CLAUSE_SHARED:
9570 decl = OMP_CLAUSE_DECL (c);
9571 key = (splay_tree_key) decl;
9572 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
9573 key = (splay_tree_key) &DECL_UID (decl);
9574 n = splay_tree_lookup (ctx->field_map, key);
9575 if (n == NULL)
9576 break;
9577 f = (tree) n->value;
9578 if (tcctx.cb.decl_map)
9579 f = *tcctx.cb.decl_map->get (f);
9580 n = splay_tree_lookup (ctx->sfield_map, key);
9581 sf = (tree) n->value;
9582 if (tcctx.cb.decl_map)
9583 sf = *tcctx.cb.decl_map->get (sf);
9584 src = build_simple_mem_ref_loc (loc, sarg);
9585 src = omp_build_component_ref (src, sf);
9586 dst = build_simple_mem_ref_loc (loc, arg);
9587 dst = omp_build_component_ref (dst, f);
9588 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9589 append_to_statement_list (t, &list);
9590 break;
9591 case OMP_CLAUSE_REDUCTION:
9592 case OMP_CLAUSE_IN_REDUCTION:
9593 decl = OMP_CLAUSE_DECL (c);
9594 if (TREE_CODE (decl) == MEM_REF)
9595 {
9596 decl = TREE_OPERAND (decl, 0);
9597 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
9598 decl = TREE_OPERAND (decl, 0);
9599 if (TREE_CODE (decl) == INDIRECT_REF
9600 || TREE_CODE (decl) == ADDR_EXPR)
9601 decl = TREE_OPERAND (decl, 0);
9602 }
9603 key = (splay_tree_key) decl;
9604 n = splay_tree_lookup (ctx->field_map, key);
9605 if (n == NULL)
9606 break;
9607 f = (tree) n->value;
9608 if (tcctx.cb.decl_map)
9609 f = *tcctx.cb.decl_map->get (f);
9610 n = splay_tree_lookup (ctx->sfield_map, key);
9611 sf = (tree) n->value;
9612 if (tcctx.cb.decl_map)
9613 sf = *tcctx.cb.decl_map->get (sf);
9614 src = build_simple_mem_ref_loc (loc, sarg);
9615 src = omp_build_component_ref (src, sf);
9616 if (decl != OMP_CLAUSE_DECL (c)
9617 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9618 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9619 src = build_simple_mem_ref_loc (loc, src);
9620 dst = build_simple_mem_ref_loc (loc, arg);
9621 dst = omp_build_component_ref (dst, f);
9622 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9623 append_to_statement_list (t, &list);
9624 break;
9625 case OMP_CLAUSE__LOOPTEMP_:
9626 /* Fields for first two _looptemp_ clauses are initialized by
9627 GOMP_taskloop*, the rest are handled like firstprivate. */
9628 if (looptempno < 2)
9629 {
9630 looptempno++;
9631 break;
9632 }
9633 /* FALLTHRU */
9634 case OMP_CLAUSE__REDUCTEMP_:
9635 case OMP_CLAUSE_FIRSTPRIVATE:
9636 decl = OMP_CLAUSE_DECL (c);
9637 if (is_variable_sized (decl))
9638 break;
9639 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9640 if (n == NULL)
9641 break;
9642 f = (tree) n->value;
9643 if (tcctx.cb.decl_map)
9644 f = *tcctx.cb.decl_map->get (f);
9645 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9646 if (n != NULL)
9647 {
9648 sf = (tree) n->value;
9649 if (tcctx.cb.decl_map)
9650 sf = *tcctx.cb.decl_map->get (sf);
9651 src = build_simple_mem_ref_loc (loc, sarg);
9652 src = omp_build_component_ref (src, sf);
9653 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
9654 src = build_simple_mem_ref_loc (loc, src);
9655 }
9656 else
9657 src = decl;
9658 dst = build_simple_mem_ref_loc (loc, arg);
9659 dst = omp_build_component_ref (dst, f);
9660 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
9661 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9662 else
9663 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9664 append_to_statement_list (t, &list);
9665 break;
9666 case OMP_CLAUSE_PRIVATE:
9667 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9668 break;
9669 decl = OMP_CLAUSE_DECL (c);
9670 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9671 f = (tree) n->value;
9672 if (tcctx.cb.decl_map)
9673 f = *tcctx.cb.decl_map->get (f);
9674 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9675 if (n != NULL)
9676 {
9677 sf = (tree) n->value;
9678 if (tcctx.cb.decl_map)
9679 sf = *tcctx.cb.decl_map->get (sf);
9680 src = build_simple_mem_ref_loc (loc, sarg);
9681 src = omp_build_component_ref (src, sf);
9682 if (use_pointer_for_field (decl, NULL))
9683 src = build_simple_mem_ref_loc (loc, src);
9684 }
9685 else
9686 src = decl;
9687 dst = build_simple_mem_ref_loc (loc, arg);
9688 dst = omp_build_component_ref (dst, f);
9689 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9690 append_to_statement_list (t, &list);
9691 break;
9692 default:
9693 break;
9694 }
9695
9696 /* Last pass: handle VLA firstprivates. */
9697 if (tcctx.cb.decl_map)
9698 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9700 {
9701 tree ind, ptr, df;
9702
9703 decl = OMP_CLAUSE_DECL (c);
9704 if (!is_variable_sized (decl))
9705 continue;
9706 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9707 if (n == NULL)
9708 continue;
9709 f = (tree) n->value;
9710 f = *tcctx.cb.decl_map->get (f);
9711 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9712 ind = DECL_VALUE_EXPR (decl);
9713 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9714 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9715 n = splay_tree_lookup (ctx->sfield_map,
9716 (splay_tree_key) TREE_OPERAND (ind, 0));
9717 sf = (tree) n->value;
9718 sf = *tcctx.cb.decl_map->get (sf);
9719 src = build_simple_mem_ref_loc (loc, sarg);
9720 src = omp_build_component_ref (src, sf);
9721 src = build_simple_mem_ref_loc (loc, src);
9722 dst = build_simple_mem_ref_loc (loc, arg);
9723 dst = omp_build_component_ref (dst, f);
9724 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9725 append_to_statement_list (t, &list);
9726 n = splay_tree_lookup (ctx->field_map,
9727 (splay_tree_key) TREE_OPERAND (ind, 0));
9728 df = (tree) n->value;
9729 df = *tcctx.cb.decl_map->get (df);
9730 ptr = build_simple_mem_ref_loc (loc, arg);
9731 ptr = omp_build_component_ref (ptr, df);
9732 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
9733 build_fold_addr_expr_loc (loc, dst));
9734 append_to_statement_list (t, &list);
9735 }
9736
9737 t = build1 (RETURN_EXPR, void_type_node, NULL);
9738 append_to_statement_list (t, &list);
9739
9740 if (tcctx.cb.decl_map)
9741 delete tcctx.cb.decl_map;
9742 pop_gimplify_context (NULL);
9743 BIND_EXPR_BODY (bind) = list;
9744 pop_cfun ();
9745 }
9746
9747 static void
9748 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
9749 {
9750 tree c, clauses;
9751 gimple *g;
9752 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
9753
9754 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
9755 gcc_assert (clauses);
9756 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9757 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9758 switch (OMP_CLAUSE_DEPEND_KIND (c))
9759 {
9760 case OMP_CLAUSE_DEPEND_LAST:
9761 /* Lowering already done at gimplification. */
9762 return;
9763 case OMP_CLAUSE_DEPEND_IN:
9764 cnt[2]++;
9765 break;
9766 case OMP_CLAUSE_DEPEND_OUT:
9767 case OMP_CLAUSE_DEPEND_INOUT:
9768 cnt[0]++;
9769 break;
9770 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9771 cnt[1]++;
9772 break;
9773 case OMP_CLAUSE_DEPEND_DEPOBJ:
9774 cnt[3]++;
9775 break;
9776 case OMP_CLAUSE_DEPEND_SOURCE:
9777 case OMP_CLAUSE_DEPEND_SINK:
9778 /* FALLTHRU */
9779 default:
9780 gcc_unreachable ();
9781 }
9782 if (cnt[1] || cnt[3])
9783 idx = 5;
9784 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
9785 tree type = build_array_type_nelts (ptr_type_node, total + idx);
9786 tree array = create_tmp_var (type);
9787 TREE_ADDRESSABLE (array) = 1;
9788 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9789 NULL_TREE);
9790 if (idx == 5)
9791 {
9792 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
9793 gimple_seq_add_stmt (iseq, g);
9794 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9795 NULL_TREE);
9796 }
9797 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
9798 gimple_seq_add_stmt (iseq, g);
9799 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
9800 {
9801 r = build4 (ARRAY_REF, ptr_type_node, array,
9802 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
9803 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
9804 gimple_seq_add_stmt (iseq, g);
9805 }
9806 for (i = 0; i < 4; i++)
9807 {
9808 if (cnt[i] == 0)
9809 continue;
9810 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9811 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
9812 continue;
9813 else
9814 {
9815 switch (OMP_CLAUSE_DEPEND_KIND (c))
9816 {
9817 case OMP_CLAUSE_DEPEND_IN:
9818 if (i != 2)
9819 continue;
9820 break;
9821 case OMP_CLAUSE_DEPEND_OUT:
9822 case OMP_CLAUSE_DEPEND_INOUT:
9823 if (i != 0)
9824 continue;
9825 break;
9826 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9827 if (i != 1)
9828 continue;
9829 break;
9830 case OMP_CLAUSE_DEPEND_DEPOBJ:
9831 if (i != 3)
9832 continue;
9833 break;
9834 default:
9835 gcc_unreachable ();
9836 }
9837 tree t = OMP_CLAUSE_DECL (c);
9838 t = fold_convert (ptr_type_node, t);
9839 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9840 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9841 NULL_TREE, NULL_TREE);
9842 g = gimple_build_assign (r, t);
9843 gimple_seq_add_stmt (iseq, g);
9844 }
9845 }
9846 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9847 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9848 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9849 OMP_CLAUSE_CHAIN (c) = *pclauses;
9850 *pclauses = c;
9851 tree clobber = build_constructor (type, NULL);
9852 TREE_THIS_VOLATILE (clobber) = 1;
9853 g = gimple_build_assign (array, clobber);
9854 gimple_seq_add_stmt (oseq, g);
9855 }
9856
9857 /* Lower the OpenMP parallel or task directive in the current statement
9858 in GSI_P. CTX holds context information for the directive. */
9859
9860 static void
9861 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9862 {
9863 tree clauses;
9864 tree child_fn, t;
9865 gimple *stmt = gsi_stmt (*gsi_p);
9866 gbind *par_bind, *bind, *dep_bind = NULL;
9867 gimple_seq par_body;
9868 location_t loc = gimple_location (stmt);
9869
9870 clauses = gimple_omp_taskreg_clauses (stmt);
9871 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9872 && gimple_omp_task_taskwait_p (stmt))
9873 {
9874 par_bind = NULL;
9875 par_body = NULL;
9876 }
9877 else
9878 {
9879 par_bind
9880 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
9881 par_body = gimple_bind_body (par_bind);
9882 }
9883 child_fn = ctx->cb.dst_fn;
9884 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9885 && !gimple_omp_parallel_combined_p (stmt))
9886 {
9887 struct walk_stmt_info wi;
9888 int ws_num = 0;
9889
9890 memset (&wi, 0, sizeof (wi));
9891 wi.info = &ws_num;
9892 wi.val_only = true;
9893 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
9894 if (ws_num == 1)
9895 gimple_omp_parallel_set_combined_p (stmt, true);
9896 }
9897 gimple_seq dep_ilist = NULL;
9898 gimple_seq dep_olist = NULL;
9899 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9900 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9901 {
9902 push_gimplify_context ();
9903 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9904 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
9905 &dep_ilist, &dep_olist);
9906 }
9907
9908 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9909 && gimple_omp_task_taskwait_p (stmt))
9910 {
9911 if (dep_bind)
9912 {
9913 gsi_replace (gsi_p, dep_bind, true);
9914 gimple_bind_add_seq (dep_bind, dep_ilist);
9915 gimple_bind_add_stmt (dep_bind, stmt);
9916 gimple_bind_add_seq (dep_bind, dep_olist);
9917 pop_gimplify_context (dep_bind);
9918 }
9919 return;
9920 }
9921
9922 if (ctx->srecord_type)
9923 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9924
9925 gimple_seq tskred_ilist = NULL;
9926 gimple_seq tskred_olist = NULL;
9927 if ((is_task_ctx (ctx)
9928 && gimple_omp_task_taskloop_p (ctx->stmt)
9929 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
9930 OMP_CLAUSE_REDUCTION))
9931 || (is_parallel_ctx (ctx)
9932 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
9933 OMP_CLAUSE__REDUCTEMP_)))
9934 {
9935 if (dep_bind == NULL)
9936 {
9937 push_gimplify_context ();
9938 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9939 }
9940 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
9941 : OMP_PARALLEL,
9942 gimple_omp_taskreg_clauses (ctx->stmt),
9943 &tskred_ilist, &tskred_olist);
9944 }
9945
9946 push_gimplify_context ();
9947
9948 gimple_seq par_olist = NULL;
9949 gimple_seq par_ilist = NULL;
9950 gimple_seq par_rlist = NULL;
9951 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9952 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
9953 if (phony_construct && ctx->record_type)
9954 {
9955 gcc_checking_assert (!ctx->receiver_decl);
9956 ctx->receiver_decl = create_tmp_var
9957 (build_reference_type (ctx->record_type), ".omp_rec");
9958 }
9959 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
9960 lower_omp (&par_body, ctx);
9961 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
9962 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
9963
9964 /* Declare all the variables created by mapping and the variables
9965 declared in the scope of the parallel body. */
9966 record_vars_into (ctx->block_vars, child_fn);
9967 maybe_remove_omp_member_access_dummy_vars (par_bind);
9968 record_vars_into (gimple_bind_vars (par_bind), child_fn);
9969
9970 if (ctx->record_type)
9971 {
9972 ctx->sender_decl
9973 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9974 : ctx->record_type, ".omp_data_o");
9975 DECL_NAMELESS (ctx->sender_decl) = 1;
9976 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9977 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
9978 }
9979
9980 gimple_seq olist = NULL;
9981 gimple_seq ilist = NULL;
9982 lower_send_clauses (clauses, &ilist, &olist, ctx);
9983 lower_send_shared_vars (&ilist, &olist, ctx);
9984
9985 if (ctx->record_type)
9986 {
9987 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9988 TREE_THIS_VOLATILE (clobber) = 1;
9989 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9990 clobber));
9991 }
9992
9993 /* Once all the expansions are done, sequence all the different
9994 fragments inside gimple_omp_body. */
9995
9996 gimple_seq new_body = NULL;
9997
9998 if (ctx->record_type)
9999 {
10000 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10001 /* fixup_child_record_type might have changed receiver_decl's type. */
10002 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10003 gimple_seq_add_stmt (&new_body,
10004 gimple_build_assign (ctx->receiver_decl, t));
10005 }
10006
10007 gimple_seq_add_seq (&new_body, par_ilist);
10008 gimple_seq_add_seq (&new_body, par_body);
10009 gimple_seq_add_seq (&new_body, par_rlist);
10010 if (ctx->cancellable)
10011 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
10012 gimple_seq_add_seq (&new_body, par_olist);
10013 new_body = maybe_catch_exception (new_body);
10014 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
10015 gimple_seq_add_stmt (&new_body,
10016 gimple_build_omp_continue (integer_zero_node,
10017 integer_zero_node));
10018 if (!phony_construct)
10019 {
10020 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10021 gimple_omp_set_body (stmt, new_body);
10022 }
10023
10024 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
10025 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10026 else
10027 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
10028 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10029 gimple_bind_add_seq (bind, ilist);
10030 if (!phony_construct)
10031 gimple_bind_add_stmt (bind, stmt);
10032 else
10033 gimple_bind_add_seq (bind, new_body);
10034 gimple_bind_add_seq (bind, olist);
10035
10036 pop_gimplify_context (NULL);
10037
10038 if (dep_bind)
10039 {
10040 gimple_bind_add_seq (dep_bind, dep_ilist);
10041 gimple_bind_add_seq (dep_bind, tskred_ilist);
10042 gimple_bind_add_stmt (dep_bind, bind);
10043 gimple_bind_add_seq (dep_bind, tskred_olist);
10044 gimple_bind_add_seq (dep_bind, dep_olist);
10045 pop_gimplify_context (dep_bind);
10046 }
10047 }
10048
10049 /* Lower the GIMPLE_OMP_TARGET in the current statement
10050 in GSI_P. CTX holds context information for the directive. */
10051
10052 static void
10053 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10054 {
10055 tree clauses;
10056 tree child_fn, t, c;
10057 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
10058 gbind *tgt_bind, *bind, *dep_bind = NULL;
10059 gimple_seq tgt_body, olist, ilist, fplist, new_body;
10060 location_t loc = gimple_location (stmt);
10061 bool offloaded, data_region;
10062 unsigned int map_cnt = 0;
10063
10064 offloaded = is_gimple_omp_offloaded (stmt);
10065 switch (gimple_omp_target_kind (stmt))
10066 {
10067 case GF_OMP_TARGET_KIND_REGION:
10068 case GF_OMP_TARGET_KIND_UPDATE:
10069 case GF_OMP_TARGET_KIND_ENTER_DATA:
10070 case GF_OMP_TARGET_KIND_EXIT_DATA:
10071 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
10072 case GF_OMP_TARGET_KIND_OACC_KERNELS:
10073 case GF_OMP_TARGET_KIND_OACC_UPDATE:
10074 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
10075 case GF_OMP_TARGET_KIND_OACC_DECLARE:
10076 data_region = false;
10077 break;
10078 case GF_OMP_TARGET_KIND_DATA:
10079 case GF_OMP_TARGET_KIND_OACC_DATA:
10080 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
10081 data_region = true;
10082 break;
10083 default:
10084 gcc_unreachable ();
10085 }
10086
10087 clauses = gimple_omp_target_clauses (stmt);
10088
10089 gimple_seq dep_ilist = NULL;
10090 gimple_seq dep_olist = NULL;
10091 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10092 {
10093 push_gimplify_context ();
10094 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10095 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
10096 &dep_ilist, &dep_olist);
10097 }
10098
10099 tgt_bind = NULL;
10100 tgt_body = NULL;
10101 if (offloaded)
10102 {
10103 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
10104 tgt_body = gimple_bind_body (tgt_bind);
10105 }
10106 else if (data_region)
10107 tgt_body = gimple_omp_body (stmt);
10108 child_fn = ctx->cb.dst_fn;
10109
10110 push_gimplify_context ();
10111 fplist = NULL;
10112
10113 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10114 switch (OMP_CLAUSE_CODE (c))
10115 {
10116 tree var, x;
10117
10118 default:
10119 break;
10120 case OMP_CLAUSE_MAP:
10121 #if CHECKING_P
10122 /* First check what we're prepared to handle in the following. */
10123 switch (OMP_CLAUSE_MAP_KIND (c))
10124 {
10125 case GOMP_MAP_ALLOC:
10126 case GOMP_MAP_TO:
10127 case GOMP_MAP_FROM:
10128 case GOMP_MAP_TOFROM:
10129 case GOMP_MAP_POINTER:
10130 case GOMP_MAP_TO_PSET:
10131 case GOMP_MAP_DELETE:
10132 case GOMP_MAP_RELEASE:
10133 case GOMP_MAP_ALWAYS_TO:
10134 case GOMP_MAP_ALWAYS_FROM:
10135 case GOMP_MAP_ALWAYS_TOFROM:
10136 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10137 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10138 case GOMP_MAP_STRUCT:
10139 case GOMP_MAP_ALWAYS_POINTER:
10140 break;
10141 case GOMP_MAP_FORCE_ALLOC:
10142 case GOMP_MAP_FORCE_TO:
10143 case GOMP_MAP_FORCE_FROM:
10144 case GOMP_MAP_FORCE_TOFROM:
10145 case GOMP_MAP_FORCE_PRESENT:
10146 case GOMP_MAP_FORCE_DEVICEPTR:
10147 case GOMP_MAP_DEVICE_RESIDENT:
10148 case GOMP_MAP_LINK:
10149 gcc_assert (is_gimple_omp_oacc (stmt));
10150 break;
10151 default:
10152 gcc_unreachable ();
10153 }
10154 #endif
10155 /* FALLTHRU */
10156 case OMP_CLAUSE_TO:
10157 case OMP_CLAUSE_FROM:
10158 oacc_firstprivate:
10159 var = OMP_CLAUSE_DECL (c);
10160 if (!DECL_P (var))
10161 {
10162 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
10163 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10164 && (OMP_CLAUSE_MAP_KIND (c)
10165 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
10166 map_cnt++;
10167 continue;
10168 }
10169
10170 if (DECL_SIZE (var)
10171 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10172 {
10173 tree var2 = DECL_VALUE_EXPR (var);
10174 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10175 var2 = TREE_OPERAND (var2, 0);
10176 gcc_assert (DECL_P (var2));
10177 var = var2;
10178 }
10179
10180 if (offloaded
10181 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10182 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10183 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10184 {
10185 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10186 {
10187 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
10188 && varpool_node::get_create (var)->offloadable)
10189 continue;
10190
10191 tree type = build_pointer_type (TREE_TYPE (var));
10192 tree new_var = lookup_decl (var, ctx);
10193 x = create_tmp_var_raw (type, get_name (new_var));
10194 gimple_add_tmp_var (x);
10195 x = build_simple_mem_ref (x);
10196 SET_DECL_VALUE_EXPR (new_var, x);
10197 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10198 }
10199 continue;
10200 }
10201
10202 if (!maybe_lookup_field (var, ctx))
10203 continue;
10204
10205 /* Don't remap oacc parallel reduction variables, because the
10206 intermediate result must be local to each gang. */
10207 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10208 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
10209 {
10210 x = build_receiver_ref (var, true, ctx);
10211 tree new_var = lookup_decl (var, ctx);
10212
10213 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10214 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
10215 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10216 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10217 x = build_simple_mem_ref (x);
10218 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10219 {
10220 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
10221 if (omp_is_reference (new_var)
10222 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
10223 {
10224 /* Create a local object to hold the instance
10225 value. */
10226 tree type = TREE_TYPE (TREE_TYPE (new_var));
10227 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
10228 tree inst = create_tmp_var (type, id);
10229 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
10230 x = build_fold_addr_expr (inst);
10231 }
10232 gimplify_assign (new_var, x, &fplist);
10233 }
10234 else if (DECL_P (new_var))
10235 {
10236 SET_DECL_VALUE_EXPR (new_var, x);
10237 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10238 }
10239 else
10240 gcc_unreachable ();
10241 }
10242 map_cnt++;
10243 break;
10244
10245 case OMP_CLAUSE_FIRSTPRIVATE:
10246 if (is_oacc_parallel (ctx))
10247 goto oacc_firstprivate;
10248 map_cnt++;
10249 var = OMP_CLAUSE_DECL (c);
10250 if (!omp_is_reference (var)
10251 && !is_gimple_reg_type (TREE_TYPE (var)))
10252 {
10253 tree new_var = lookup_decl (var, ctx);
10254 if (is_variable_sized (var))
10255 {
10256 tree pvar = DECL_VALUE_EXPR (var);
10257 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10258 pvar = TREE_OPERAND (pvar, 0);
10259 gcc_assert (DECL_P (pvar));
10260 tree new_pvar = lookup_decl (pvar, ctx);
10261 x = build_fold_indirect_ref (new_pvar);
10262 TREE_THIS_NOTRAP (x) = 1;
10263 }
10264 else
10265 x = build_receiver_ref (var, true, ctx);
10266 SET_DECL_VALUE_EXPR (new_var, x);
10267 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10268 }
10269 break;
10270
10271 case OMP_CLAUSE_PRIVATE:
10272 if (is_gimple_omp_oacc (ctx->stmt))
10273 break;
10274 var = OMP_CLAUSE_DECL (c);
10275 if (is_variable_sized (var))
10276 {
10277 tree new_var = lookup_decl (var, ctx);
10278 tree pvar = DECL_VALUE_EXPR (var);
10279 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10280 pvar = TREE_OPERAND (pvar, 0);
10281 gcc_assert (DECL_P (pvar));
10282 tree new_pvar = lookup_decl (pvar, ctx);
10283 x = build_fold_indirect_ref (new_pvar);
10284 TREE_THIS_NOTRAP (x) = 1;
10285 SET_DECL_VALUE_EXPR (new_var, x);
10286 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10287 }
10288 break;
10289
10290 case OMP_CLAUSE_USE_DEVICE_PTR:
10291 case OMP_CLAUSE_IS_DEVICE_PTR:
10292 var = OMP_CLAUSE_DECL (c);
10293 map_cnt++;
10294 if (is_variable_sized (var))
10295 {
10296 tree new_var = lookup_decl (var, ctx);
10297 tree pvar = DECL_VALUE_EXPR (var);
10298 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10299 pvar = TREE_OPERAND (pvar, 0);
10300 gcc_assert (DECL_P (pvar));
10301 tree new_pvar = lookup_decl (pvar, ctx);
10302 x = build_fold_indirect_ref (new_pvar);
10303 TREE_THIS_NOTRAP (x) = 1;
10304 SET_DECL_VALUE_EXPR (new_var, x);
10305 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10306 }
10307 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10308 {
10309 tree new_var = lookup_decl (var, ctx);
10310 tree type = build_pointer_type (TREE_TYPE (var));
10311 x = create_tmp_var_raw (type, get_name (new_var));
10312 gimple_add_tmp_var (x);
10313 x = build_simple_mem_ref (x);
10314 SET_DECL_VALUE_EXPR (new_var, x);
10315 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10316 }
10317 else
10318 {
10319 tree new_var = lookup_decl (var, ctx);
10320 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
10321 gimple_add_tmp_var (x);
10322 SET_DECL_VALUE_EXPR (new_var, x);
10323 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10324 }
10325 break;
10326 }
10327
10328 if (offloaded)
10329 {
10330 target_nesting_level++;
10331 lower_omp (&tgt_body, ctx);
10332 target_nesting_level--;
10333 }
10334 else if (data_region)
10335 lower_omp (&tgt_body, ctx);
10336
10337 if (offloaded)
10338 {
10339 /* Declare all the variables created by mapping and the variables
10340 declared in the scope of the target body. */
10341 record_vars_into (ctx->block_vars, child_fn);
10342 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
10343 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
10344 }
10345
10346 olist = NULL;
10347 ilist = NULL;
10348 if (ctx->record_type)
10349 {
10350 ctx->sender_decl
10351 = create_tmp_var (ctx->record_type, ".omp_data_arr");
10352 DECL_NAMELESS (ctx->sender_decl) = 1;
10353 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
10354 t = make_tree_vec (3);
10355 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
10356 TREE_VEC_ELT (t, 1)
10357 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
10358 ".omp_data_sizes");
10359 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
10360 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
10361 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
10362 tree tkind_type = short_unsigned_type_node;
10363 int talign_shift = 8;
10364 TREE_VEC_ELT (t, 2)
10365 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
10366 ".omp_data_kinds");
10367 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
10368 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
10369 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
10370 gimple_omp_target_set_data_arg (stmt, t);
10371
10372 vec<constructor_elt, va_gc> *vsize;
10373 vec<constructor_elt, va_gc> *vkind;
10374 vec_alloc (vsize, map_cnt);
10375 vec_alloc (vkind, map_cnt);
10376 unsigned int map_idx = 0;
10377
10378 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10379 switch (OMP_CLAUSE_CODE (c))
10380 {
10381 tree ovar, nc, s, purpose, var, x, type;
10382 unsigned int talign;
10383
10384 default:
10385 break;
10386
10387 case OMP_CLAUSE_MAP:
10388 case OMP_CLAUSE_TO:
10389 case OMP_CLAUSE_FROM:
10390 oacc_firstprivate_map:
10391 nc = c;
10392 ovar = OMP_CLAUSE_DECL (c);
10393 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10394 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10395 || (OMP_CLAUSE_MAP_KIND (c)
10396 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10397 break;
10398 if (!DECL_P (ovar))
10399 {
10400 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10401 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
10402 {
10403 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
10404 == get_base_address (ovar));
10405 nc = OMP_CLAUSE_CHAIN (c);
10406 ovar = OMP_CLAUSE_DECL (nc);
10407 }
10408 else
10409 {
10410 tree x = build_sender_ref (ovar, ctx);
10411 tree v
10412 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
10413 gimplify_assign (x, v, &ilist);
10414 nc = NULL_TREE;
10415 }
10416 }
10417 else
10418 {
10419 if (DECL_SIZE (ovar)
10420 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
10421 {
10422 tree ovar2 = DECL_VALUE_EXPR (ovar);
10423 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
10424 ovar2 = TREE_OPERAND (ovar2, 0);
10425 gcc_assert (DECL_P (ovar2));
10426 ovar = ovar2;
10427 }
10428 if (!maybe_lookup_field (ovar, ctx))
10429 continue;
10430 }
10431
10432 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
10433 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
10434 talign = DECL_ALIGN_UNIT (ovar);
10435 if (nc)
10436 {
10437 var = lookup_decl_in_outer_ctx (ovar, ctx);
10438 x = build_sender_ref (ovar, ctx);
10439
10440 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10441 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
10442 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10443 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
10444 {
10445 gcc_assert (offloaded);
10446 tree avar
10447 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
10448 mark_addressable (avar);
10449 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
10450 talign = DECL_ALIGN_UNIT (avar);
10451 avar = build_fold_addr_expr (avar);
10452 gimplify_assign (x, avar, &ilist);
10453 }
10454 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10455 {
10456 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
10457 if (!omp_is_reference (var))
10458 {
10459 if (is_gimple_reg (var)
10460 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10461 TREE_NO_WARNING (var) = 1;
10462 var = build_fold_addr_expr (var);
10463 }
10464 else
10465 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10466 gimplify_assign (x, var, &ilist);
10467 }
10468 else if (is_gimple_reg (var))
10469 {
10470 gcc_assert (offloaded);
10471 tree avar = create_tmp_var (TREE_TYPE (var));
10472 mark_addressable (avar);
10473 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
10474 if (GOMP_MAP_COPY_TO_P (map_kind)
10475 || map_kind == GOMP_MAP_POINTER
10476 || map_kind == GOMP_MAP_TO_PSET
10477 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
10478 {
10479 /* If we need to initialize a temporary
10480 with VAR because it is not addressable, and
10481 the variable hasn't been initialized yet, then
10482 we'll get a warning for the store to avar.
10483 Don't warn in that case, the mapping might
10484 be implicit. */
10485 TREE_NO_WARNING (var) = 1;
10486 gimplify_assign (avar, var, &ilist);
10487 }
10488 avar = build_fold_addr_expr (avar);
10489 gimplify_assign (x, avar, &ilist);
10490 if ((GOMP_MAP_COPY_FROM_P (map_kind)
10491 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
10492 && !TYPE_READONLY (TREE_TYPE (var)))
10493 {
10494 x = unshare_expr (x);
10495 x = build_simple_mem_ref (x);
10496 gimplify_assign (var, x, &olist);
10497 }
10498 }
10499 else
10500 {
10501 var = build_fold_addr_expr (var);
10502 gimplify_assign (x, var, &ilist);
10503 }
10504 }
10505 s = NULL_TREE;
10506 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10507 {
10508 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
10509 s = TREE_TYPE (ovar);
10510 if (TREE_CODE (s) == REFERENCE_TYPE)
10511 s = TREE_TYPE (s);
10512 s = TYPE_SIZE_UNIT (s);
10513 }
10514 else
10515 s = OMP_CLAUSE_SIZE (c);
10516 if (s == NULL_TREE)
10517 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
10518 s = fold_convert (size_type_node, s);
10519 purpose = size_int (map_idx++);
10520 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10521 if (TREE_CODE (s) != INTEGER_CST)
10522 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
10523
10524 unsigned HOST_WIDE_INT tkind, tkind_zero;
10525 switch (OMP_CLAUSE_CODE (c))
10526 {
10527 case OMP_CLAUSE_MAP:
10528 tkind = OMP_CLAUSE_MAP_KIND (c);
10529 tkind_zero = tkind;
10530 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
10531 switch (tkind)
10532 {
10533 case GOMP_MAP_ALLOC:
10534 case GOMP_MAP_TO:
10535 case GOMP_MAP_FROM:
10536 case GOMP_MAP_TOFROM:
10537 case GOMP_MAP_ALWAYS_TO:
10538 case GOMP_MAP_ALWAYS_FROM:
10539 case GOMP_MAP_ALWAYS_TOFROM:
10540 case GOMP_MAP_RELEASE:
10541 case GOMP_MAP_FORCE_TO:
10542 case GOMP_MAP_FORCE_FROM:
10543 case GOMP_MAP_FORCE_TOFROM:
10544 case GOMP_MAP_FORCE_PRESENT:
10545 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
10546 break;
10547 case GOMP_MAP_DELETE:
10548 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
10549 default:
10550 break;
10551 }
10552 if (tkind_zero != tkind)
10553 {
10554 if (integer_zerop (s))
10555 tkind = tkind_zero;
10556 else if (integer_nonzerop (s))
10557 tkind_zero = tkind;
10558 }
10559 break;
10560 case OMP_CLAUSE_FIRSTPRIVATE:
10561 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
10562 tkind = GOMP_MAP_TO;
10563 tkind_zero = tkind;
10564 break;
10565 case OMP_CLAUSE_TO:
10566 tkind = GOMP_MAP_TO;
10567 tkind_zero = tkind;
10568 break;
10569 case OMP_CLAUSE_FROM:
10570 tkind = GOMP_MAP_FROM;
10571 tkind_zero = tkind;
10572 break;
10573 default:
10574 gcc_unreachable ();
10575 }
10576 gcc_checking_assert (tkind
10577 < (HOST_WIDE_INT_C (1U) << talign_shift));
10578 gcc_checking_assert (tkind_zero
10579 < (HOST_WIDE_INT_C (1U) << talign_shift));
10580 talign = ceil_log2 (talign);
10581 tkind |= talign << talign_shift;
10582 tkind_zero |= talign << talign_shift;
10583 gcc_checking_assert (tkind
10584 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10585 gcc_checking_assert (tkind_zero
10586 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10587 if (tkind == tkind_zero)
10588 x = build_int_cstu (tkind_type, tkind);
10589 else
10590 {
10591 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
10592 x = build3 (COND_EXPR, tkind_type,
10593 fold_build2 (EQ_EXPR, boolean_type_node,
10594 unshare_expr (s), size_zero_node),
10595 build_int_cstu (tkind_type, tkind_zero),
10596 build_int_cstu (tkind_type, tkind));
10597 }
10598 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
10599 if (nc && nc != c)
10600 c = nc;
10601 break;
10602
10603 case OMP_CLAUSE_FIRSTPRIVATE:
10604 if (is_oacc_parallel (ctx))
10605 goto oacc_firstprivate_map;
10606 ovar = OMP_CLAUSE_DECL (c);
10607 if (omp_is_reference (ovar))
10608 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10609 else
10610 talign = DECL_ALIGN_UNIT (ovar);
10611 var = lookup_decl_in_outer_ctx (ovar, ctx);
10612 x = build_sender_ref (ovar, ctx);
10613 tkind = GOMP_MAP_FIRSTPRIVATE;
10614 type = TREE_TYPE (ovar);
10615 if (omp_is_reference (ovar))
10616 type = TREE_TYPE (type);
10617 if ((INTEGRAL_TYPE_P (type)
10618 && TYPE_PRECISION (type) <= POINTER_SIZE)
10619 || TREE_CODE (type) == POINTER_TYPE)
10620 {
10621 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
10622 tree t = var;
10623 if (omp_is_reference (var))
10624 t = build_simple_mem_ref (var);
10625 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10626 TREE_NO_WARNING (var) = 1;
10627 if (TREE_CODE (type) != POINTER_TYPE)
10628 t = fold_convert (pointer_sized_int_node, t);
10629 t = fold_convert (TREE_TYPE (x), t);
10630 gimplify_assign (x, t, &ilist);
10631 }
10632 else if (omp_is_reference (var))
10633 gimplify_assign (x, var, &ilist);
10634 else if (is_gimple_reg (var))
10635 {
10636 tree avar = create_tmp_var (TREE_TYPE (var));
10637 mark_addressable (avar);
10638 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10639 TREE_NO_WARNING (var) = 1;
10640 gimplify_assign (avar, var, &ilist);
10641 avar = build_fold_addr_expr (avar);
10642 gimplify_assign (x, avar, &ilist);
10643 }
10644 else
10645 {
10646 var = build_fold_addr_expr (var);
10647 gimplify_assign (x, var, &ilist);
10648 }
10649 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
10650 s = size_int (0);
10651 else if (omp_is_reference (ovar))
10652 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10653 else
10654 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
10655 s = fold_convert (size_type_node, s);
10656 purpose = size_int (map_idx++);
10657 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10658 if (TREE_CODE (s) != INTEGER_CST)
10659 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
10660
10661 gcc_checking_assert (tkind
10662 < (HOST_WIDE_INT_C (1U) << talign_shift));
10663 talign = ceil_log2 (talign);
10664 tkind |= talign << talign_shift;
10665 gcc_checking_assert (tkind
10666 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10667 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
10668 build_int_cstu (tkind_type, tkind));
10669 break;
10670
10671 case OMP_CLAUSE_USE_DEVICE_PTR:
10672 case OMP_CLAUSE_IS_DEVICE_PTR:
10673 ovar = OMP_CLAUSE_DECL (c);
10674 var = lookup_decl_in_outer_ctx (ovar, ctx);
10675 x = build_sender_ref (ovar, ctx);
10676 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
10677 tkind = GOMP_MAP_USE_DEVICE_PTR;
10678 else
10679 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
10680 type = TREE_TYPE (ovar);
10681 if (TREE_CODE (type) == ARRAY_TYPE)
10682 var = build_fold_addr_expr (var);
10683 else
10684 {
10685 if (omp_is_reference (ovar))
10686 {
10687 type = TREE_TYPE (type);
10688 if (TREE_CODE (type) != ARRAY_TYPE)
10689 var = build_simple_mem_ref (var);
10690 var = fold_convert (TREE_TYPE (x), var);
10691 }
10692 }
10693 gimplify_assign (x, var, &ilist);
10694 s = size_int (0);
10695 purpose = size_int (map_idx++);
10696 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10697 gcc_checking_assert (tkind
10698 < (HOST_WIDE_INT_C (1U) << talign_shift));
10699 gcc_checking_assert (tkind
10700 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10701 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
10702 build_int_cstu (tkind_type, tkind));
10703 break;
10704 }
10705
10706 gcc_assert (map_idx == map_cnt);
10707
10708 DECL_INITIAL (TREE_VEC_ELT (t, 1))
10709 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
10710 DECL_INITIAL (TREE_VEC_ELT (t, 2))
10711 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
10712 for (int i = 1; i <= 2; i++)
10713 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
10714 {
10715 gimple_seq initlist = NULL;
10716 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
10717 TREE_VEC_ELT (t, i)),
10718 &initlist, true, NULL_TREE);
10719 gimple_seq_add_seq (&ilist, initlist);
10720
10721 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
10722 NULL);
10723 TREE_THIS_VOLATILE (clobber) = 1;
10724 gimple_seq_add_stmt (&olist,
10725 gimple_build_assign (TREE_VEC_ELT (t, i),
10726 clobber));
10727 }
10728
10729 tree clobber = build_constructor (ctx->record_type, NULL);
10730 TREE_THIS_VOLATILE (clobber) = 1;
10731 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
10732 clobber));
10733 }
10734
10735 /* Once all the expansions are done, sequence all the different
10736 fragments inside gimple_omp_body. */
10737
10738 new_body = NULL;
10739
10740 if (offloaded
10741 && ctx->record_type)
10742 {
10743 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10744 /* fixup_child_record_type might have changed receiver_decl's type. */
10745 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10746 gimple_seq_add_stmt (&new_body,
10747 gimple_build_assign (ctx->receiver_decl, t));
10748 }
10749 gimple_seq_add_seq (&new_body, fplist);
10750
10751 if (offloaded || data_region)
10752 {
10753 tree prev = NULL_TREE;
10754 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10755 switch (OMP_CLAUSE_CODE (c))
10756 {
10757 tree var, x;
10758 default:
10759 break;
10760 case OMP_CLAUSE_FIRSTPRIVATE:
10761 if (is_gimple_omp_oacc (ctx->stmt))
10762 break;
10763 var = OMP_CLAUSE_DECL (c);
10764 if (omp_is_reference (var)
10765 || is_gimple_reg_type (TREE_TYPE (var)))
10766 {
10767 tree new_var = lookup_decl (var, ctx);
10768 tree type;
10769 type = TREE_TYPE (var);
10770 if (omp_is_reference (var))
10771 type = TREE_TYPE (type);
10772 if ((INTEGRAL_TYPE_P (type)
10773 && TYPE_PRECISION (type) <= POINTER_SIZE)
10774 || TREE_CODE (type) == POINTER_TYPE)
10775 {
10776 x = build_receiver_ref (var, false, ctx);
10777 if (TREE_CODE (type) != POINTER_TYPE)
10778 x = fold_convert (pointer_sized_int_node, x);
10779 x = fold_convert (type, x);
10780 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10781 fb_rvalue);
10782 if (omp_is_reference (var))
10783 {
10784 tree v = create_tmp_var_raw (type, get_name (var));
10785 gimple_add_tmp_var (v);
10786 TREE_ADDRESSABLE (v) = 1;
10787 gimple_seq_add_stmt (&new_body,
10788 gimple_build_assign (v, x));
10789 x = build_fold_addr_expr (v);
10790 }
10791 gimple_seq_add_stmt (&new_body,
10792 gimple_build_assign (new_var, x));
10793 }
10794 else
10795 {
10796 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
10797 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10798 fb_rvalue);
10799 gimple_seq_add_stmt (&new_body,
10800 gimple_build_assign (new_var, x));
10801 }
10802 }
10803 else if (is_variable_sized (var))
10804 {
10805 tree pvar = DECL_VALUE_EXPR (var);
10806 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10807 pvar = TREE_OPERAND (pvar, 0);
10808 gcc_assert (DECL_P (pvar));
10809 tree new_var = lookup_decl (pvar, ctx);
10810 x = build_receiver_ref (var, false, ctx);
10811 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10812 gimple_seq_add_stmt (&new_body,
10813 gimple_build_assign (new_var, x));
10814 }
10815 break;
10816 case OMP_CLAUSE_PRIVATE:
10817 if (is_gimple_omp_oacc (ctx->stmt))
10818 break;
10819 var = OMP_CLAUSE_DECL (c);
10820 if (omp_is_reference (var))
10821 {
10822 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10823 tree new_var = lookup_decl (var, ctx);
10824 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10825 if (TREE_CONSTANT (x))
10826 {
10827 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
10828 get_name (var));
10829 gimple_add_tmp_var (x);
10830 TREE_ADDRESSABLE (x) = 1;
10831 x = build_fold_addr_expr_loc (clause_loc, x);
10832 }
10833 else
10834 break;
10835
10836 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10837 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10838 gimple_seq_add_stmt (&new_body,
10839 gimple_build_assign (new_var, x));
10840 }
10841 break;
10842 case OMP_CLAUSE_USE_DEVICE_PTR:
10843 case OMP_CLAUSE_IS_DEVICE_PTR:
10844 var = OMP_CLAUSE_DECL (c);
10845 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
10846 x = build_sender_ref (var, ctx);
10847 else
10848 x = build_receiver_ref (var, false, ctx);
10849 if (is_variable_sized (var))
10850 {
10851 tree pvar = DECL_VALUE_EXPR (var);
10852 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10853 pvar = TREE_OPERAND (pvar, 0);
10854 gcc_assert (DECL_P (pvar));
10855 tree new_var = lookup_decl (pvar, ctx);
10856 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10857 gimple_seq_add_stmt (&new_body,
10858 gimple_build_assign (new_var, x));
10859 }
10860 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10861 {
10862 tree new_var = lookup_decl (var, ctx);
10863 new_var = DECL_VALUE_EXPR (new_var);
10864 gcc_assert (TREE_CODE (new_var) == MEM_REF);
10865 new_var = TREE_OPERAND (new_var, 0);
10866 gcc_assert (DECL_P (new_var));
10867 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10868 gimple_seq_add_stmt (&new_body,
10869 gimple_build_assign (new_var, x));
10870 }
10871 else
10872 {
10873 tree type = TREE_TYPE (var);
10874 tree new_var = lookup_decl (var, ctx);
10875 if (omp_is_reference (var))
10876 {
10877 type = TREE_TYPE (type);
10878 if (TREE_CODE (type) != ARRAY_TYPE)
10879 {
10880 tree v = create_tmp_var_raw (type, get_name (var));
10881 gimple_add_tmp_var (v);
10882 TREE_ADDRESSABLE (v) = 1;
10883 x = fold_convert (type, x);
10884 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10885 fb_rvalue);
10886 gimple_seq_add_stmt (&new_body,
10887 gimple_build_assign (v, x));
10888 x = build_fold_addr_expr (v);
10889 }
10890 }
10891 new_var = DECL_VALUE_EXPR (new_var);
10892 x = fold_convert (TREE_TYPE (new_var), x);
10893 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10894 gimple_seq_add_stmt (&new_body,
10895 gimple_build_assign (new_var, x));
10896 }
10897 break;
10898 }
10899 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
10900 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
10901 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
10902 or references to VLAs. */
10903 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10904 switch (OMP_CLAUSE_CODE (c))
10905 {
10906 tree var;
10907 default:
10908 break;
10909 case OMP_CLAUSE_MAP:
10910 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10911 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10912 {
10913 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10914 poly_int64 offset = 0;
10915 gcc_assert (prev);
10916 var = OMP_CLAUSE_DECL (c);
10917 if (DECL_P (var)
10918 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
10919 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
10920 ctx))
10921 && varpool_node::get_create (var)->offloadable)
10922 break;
10923 if (TREE_CODE (var) == INDIRECT_REF
10924 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
10925 var = TREE_OPERAND (var, 0);
10926 if (TREE_CODE (var) == COMPONENT_REF)
10927 {
10928 var = get_addr_base_and_unit_offset (var, &offset);
10929 gcc_assert (var != NULL_TREE && DECL_P (var));
10930 }
10931 else if (DECL_SIZE (var)
10932 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10933 {
10934 tree var2 = DECL_VALUE_EXPR (var);
10935 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10936 var2 = TREE_OPERAND (var2, 0);
10937 gcc_assert (DECL_P (var2));
10938 var = var2;
10939 }
10940 tree new_var = lookup_decl (var, ctx), x;
10941 tree type = TREE_TYPE (new_var);
10942 bool is_ref;
10943 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
10944 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10945 == COMPONENT_REF))
10946 {
10947 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
10948 is_ref = true;
10949 new_var = build2 (MEM_REF, type,
10950 build_fold_addr_expr (new_var),
10951 build_int_cst (build_pointer_type (type),
10952 offset));
10953 }
10954 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
10955 {
10956 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
10957 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
10958 new_var = build2 (MEM_REF, type,
10959 build_fold_addr_expr (new_var),
10960 build_int_cst (build_pointer_type (type),
10961 offset));
10962 }
10963 else
10964 is_ref = omp_is_reference (var);
10965 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10966 is_ref = false;
10967 bool ref_to_array = false;
10968 if (is_ref)
10969 {
10970 type = TREE_TYPE (type);
10971 if (TREE_CODE (type) == ARRAY_TYPE)
10972 {
10973 type = build_pointer_type (type);
10974 ref_to_array = true;
10975 }
10976 }
10977 else if (TREE_CODE (type) == ARRAY_TYPE)
10978 {
10979 tree decl2 = DECL_VALUE_EXPR (new_var);
10980 gcc_assert (TREE_CODE (decl2) == MEM_REF);
10981 decl2 = TREE_OPERAND (decl2, 0);
10982 gcc_assert (DECL_P (decl2));
10983 new_var = decl2;
10984 type = TREE_TYPE (new_var);
10985 }
10986 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10987 x = fold_convert_loc (clause_loc, type, x);
10988 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10989 {
10990 tree bias = OMP_CLAUSE_SIZE (c);
10991 if (DECL_P (bias))
10992 bias = lookup_decl (bias, ctx);
10993 bias = fold_convert_loc (clause_loc, sizetype, bias);
10994 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10995 bias);
10996 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10997 TREE_TYPE (x), x, bias);
10998 }
10999 if (ref_to_array)
11000 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11001 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11002 if (is_ref && !ref_to_array)
11003 {
11004 tree t = create_tmp_var_raw (type, get_name (var));
11005 gimple_add_tmp_var (t);
11006 TREE_ADDRESSABLE (t) = 1;
11007 gimple_seq_add_stmt (&new_body,
11008 gimple_build_assign (t, x));
11009 x = build_fold_addr_expr_loc (clause_loc, t);
11010 }
11011 gimple_seq_add_stmt (&new_body,
11012 gimple_build_assign (new_var, x));
11013 prev = NULL_TREE;
11014 }
11015 else if (OMP_CLAUSE_CHAIN (c)
11016 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
11017 == OMP_CLAUSE_MAP
11018 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11019 == GOMP_MAP_FIRSTPRIVATE_POINTER
11020 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11021 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11022 prev = c;
11023 break;
11024 case OMP_CLAUSE_PRIVATE:
11025 var = OMP_CLAUSE_DECL (c);
11026 if (is_variable_sized (var))
11027 {
11028 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11029 tree new_var = lookup_decl (var, ctx);
11030 tree pvar = DECL_VALUE_EXPR (var);
11031 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11032 pvar = TREE_OPERAND (pvar, 0);
11033 gcc_assert (DECL_P (pvar));
11034 tree new_pvar = lookup_decl (pvar, ctx);
11035 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11036 tree al = size_int (DECL_ALIGN (var));
11037 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
11038 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11039 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
11040 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11041 gimple_seq_add_stmt (&new_body,
11042 gimple_build_assign (new_pvar, x));
11043 }
11044 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
11045 {
11046 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11047 tree new_var = lookup_decl (var, ctx);
11048 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11049 if (TREE_CONSTANT (x))
11050 break;
11051 else
11052 {
11053 tree atmp
11054 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11055 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
11056 tree al = size_int (TYPE_ALIGN (rtype));
11057 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11058 }
11059
11060 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11061 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11062 gimple_seq_add_stmt (&new_body,
11063 gimple_build_assign (new_var, x));
11064 }
11065 break;
11066 }
11067
11068 gimple_seq fork_seq = NULL;
11069 gimple_seq join_seq = NULL;
11070
11071 if (is_oacc_parallel (ctx))
11072 {
11073 /* If there are reductions on the offloaded region itself, treat
11074 them as a dummy GANG loop. */
11075 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
11076
11077 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
11078 false, NULL, NULL, &fork_seq, &join_seq, ctx);
11079 }
11080
11081 gimple_seq_add_seq (&new_body, fork_seq);
11082 gimple_seq_add_seq (&new_body, tgt_body);
11083 gimple_seq_add_seq (&new_body, join_seq);
11084
11085 if (offloaded)
11086 new_body = maybe_catch_exception (new_body);
11087
11088 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11089 gimple_omp_set_body (stmt, new_body);
11090 }
11091
11092 bind = gimple_build_bind (NULL, NULL,
11093 tgt_bind ? gimple_bind_block (tgt_bind)
11094 : NULL_TREE);
11095 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11096 gimple_bind_add_seq (bind, ilist);
11097 gimple_bind_add_stmt (bind, stmt);
11098 gimple_bind_add_seq (bind, olist);
11099
11100 pop_gimplify_context (NULL);
11101
11102 if (dep_bind)
11103 {
11104 gimple_bind_add_seq (dep_bind, dep_ilist);
11105 gimple_bind_add_stmt (dep_bind, bind);
11106 gimple_bind_add_seq (dep_bind, dep_olist);
11107 pop_gimplify_context (dep_bind);
11108 }
11109 }
11110
11111 /* Expand code for an OpenMP teams directive. */
11112
11113 static void
11114 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11115 {
11116 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
11117 push_gimplify_context ();
11118
11119 tree block = make_node (BLOCK);
11120 gbind *bind = gimple_build_bind (NULL, NULL, block);
11121 gsi_replace (gsi_p, bind, true);
11122 gimple_seq bind_body = NULL;
11123 gimple_seq dlist = NULL;
11124 gimple_seq olist = NULL;
11125
11126 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11127 OMP_CLAUSE_NUM_TEAMS);
11128 if (num_teams == NULL_TREE)
11129 num_teams = build_int_cst (unsigned_type_node, 0);
11130 else
11131 {
11132 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
11133 num_teams = fold_convert (unsigned_type_node, num_teams);
11134 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
11135 }
11136 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11137 OMP_CLAUSE_THREAD_LIMIT);
11138 if (thread_limit == NULL_TREE)
11139 thread_limit = build_int_cst (unsigned_type_node, 0);
11140 else
11141 {
11142 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
11143 thread_limit = fold_convert (unsigned_type_node, thread_limit);
11144 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
11145 fb_rvalue);
11146 }
11147
11148 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
11149 &bind_body, &dlist, ctx, NULL);
11150 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
11151 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
11152 NULL, ctx);
11153 if (!gimple_omp_teams_grid_phony (teams_stmt))
11154 {
11155 gimple_seq_add_stmt (&bind_body, teams_stmt);
11156 location_t loc = gimple_location (teams_stmt);
11157 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
11158 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
11159 gimple_set_location (call, loc);
11160 gimple_seq_add_stmt (&bind_body, call);
11161 }
11162
11163 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
11164 gimple_omp_set_body (teams_stmt, NULL);
11165 gimple_seq_add_seq (&bind_body, olist);
11166 gimple_seq_add_seq (&bind_body, dlist);
11167 if (!gimple_omp_teams_grid_phony (teams_stmt))
11168 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
11169 gimple_bind_set_body (bind, bind_body);
11170
11171 pop_gimplify_context (bind);
11172
11173 gimple_bind_append_vars (bind, ctx->block_vars);
11174 BLOCK_VARS (block) = ctx->block_vars;
11175 if (BLOCK_VARS (block))
11176 TREE_USED (block) = 1;
11177 }
11178
11179 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
11180
11181 static void
11182 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11183 {
11184 gimple *stmt = gsi_stmt (*gsi_p);
11185 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11186 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
11187 gimple_build_omp_return (false));
11188 }
11189
11190
11191 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
11192 regimplified. If DATA is non-NULL, lower_omp_1 is outside
11193 of OMP context, but with task_shared_vars set. */
11194
11195 static tree
11196 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
11197 void *data)
11198 {
11199 tree t = *tp;
11200
11201 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
11202 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
11203 return t;
11204
11205 if (task_shared_vars
11206 && DECL_P (t)
11207 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
11208 return t;
11209
11210 /* If a global variable has been privatized, TREE_CONSTANT on
11211 ADDR_EXPR might be wrong. */
11212 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
11213 recompute_tree_invariant_for_addr_expr (t);
11214
11215 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
11216 return NULL_TREE;
11217 }
11218
11219 /* Data to be communicated between lower_omp_regimplify_operands and
11220 lower_omp_regimplify_operands_p. */
11221
11222 struct lower_omp_regimplify_operands_data
11223 {
11224 omp_context *ctx;
11225 vec<tree> *decls;
11226 };
11227
11228 /* Helper function for lower_omp_regimplify_operands. Find
11229 omp_member_access_dummy_var vars and adjust temporarily their
11230 DECL_VALUE_EXPRs if needed. */
11231
11232 static tree
11233 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
11234 void *data)
11235 {
11236 tree t = omp_member_access_dummy_var (*tp);
11237 if (t)
11238 {
11239 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
11240 lower_omp_regimplify_operands_data *ldata
11241 = (lower_omp_regimplify_operands_data *) wi->info;
11242 tree o = maybe_lookup_decl (t, ldata->ctx);
11243 if (o != t)
11244 {
11245 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
11246 ldata->decls->safe_push (*tp);
11247 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
11248 SET_DECL_VALUE_EXPR (*tp, v);
11249 }
11250 }
11251 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
11252 return NULL_TREE;
11253 }
11254
11255 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
11256 of omp_member_access_dummy_var vars during regimplification. */
11257
11258 static void
11259 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
11260 gimple_stmt_iterator *gsi_p)
11261 {
11262 auto_vec<tree, 10> decls;
11263 if (ctx)
11264 {
11265 struct walk_stmt_info wi;
11266 memset (&wi, '\0', sizeof (wi));
11267 struct lower_omp_regimplify_operands_data data;
11268 data.ctx = ctx;
11269 data.decls = &decls;
11270 wi.info = &data;
11271 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
11272 }
11273 gimple_regimplify_operands (stmt, gsi_p);
11274 while (!decls.is_empty ())
11275 {
11276 tree t = decls.pop ();
11277 tree v = decls.pop ();
11278 SET_DECL_VALUE_EXPR (t, v);
11279 }
11280 }
11281
11282 static void
11283 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11284 {
11285 gimple *stmt = gsi_stmt (*gsi_p);
11286 struct walk_stmt_info wi;
11287 gcall *call_stmt;
11288
11289 if (gimple_has_location (stmt))
11290 input_location = gimple_location (stmt);
11291
11292 if (task_shared_vars)
11293 memset (&wi, '\0', sizeof (wi));
11294
11295 /* If we have issued syntax errors, avoid doing any heavy lifting.
11296 Just replace the OMP directives with a NOP to avoid
11297 confusing RTL expansion. */
11298 if (seen_error () && is_gimple_omp (stmt))
11299 {
11300 gsi_replace (gsi_p, gimple_build_nop (), true);
11301 return;
11302 }
11303
11304 switch (gimple_code (stmt))
11305 {
11306 case GIMPLE_COND:
11307 {
11308 gcond *cond_stmt = as_a <gcond *> (stmt);
11309 if ((ctx || task_shared_vars)
11310 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
11311 lower_omp_regimplify_p,
11312 ctx ? NULL : &wi, NULL)
11313 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
11314 lower_omp_regimplify_p,
11315 ctx ? NULL : &wi, NULL)))
11316 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
11317 }
11318 break;
11319 case GIMPLE_CATCH:
11320 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
11321 break;
11322 case GIMPLE_EH_FILTER:
11323 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
11324 break;
11325 case GIMPLE_TRY:
11326 lower_omp (gimple_try_eval_ptr (stmt), ctx);
11327 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
11328 break;
11329 case GIMPLE_TRANSACTION:
11330 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
11331 ctx);
11332 break;
11333 case GIMPLE_BIND:
11334 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
11335 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
11336 break;
11337 case GIMPLE_OMP_PARALLEL:
11338 case GIMPLE_OMP_TASK:
11339 ctx = maybe_lookup_ctx (stmt);
11340 gcc_assert (ctx);
11341 if (ctx->cancellable)
11342 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
11343 lower_omp_taskreg (gsi_p, ctx);
11344 break;
11345 case GIMPLE_OMP_FOR:
11346 ctx = maybe_lookup_ctx (stmt);
11347 gcc_assert (ctx);
11348 if (ctx->cancellable)
11349 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
11350 lower_omp_for (gsi_p, ctx);
11351 break;
11352 case GIMPLE_OMP_SECTIONS:
11353 ctx = maybe_lookup_ctx (stmt);
11354 gcc_assert (ctx);
11355 if (ctx->cancellable)
11356 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
11357 lower_omp_sections (gsi_p, ctx);
11358 break;
11359 case GIMPLE_OMP_SINGLE:
11360 ctx = maybe_lookup_ctx (stmt);
11361 gcc_assert (ctx);
11362 lower_omp_single (gsi_p, ctx);
11363 break;
11364 case GIMPLE_OMP_MASTER:
11365 ctx = maybe_lookup_ctx (stmt);
11366 gcc_assert (ctx);
11367 lower_omp_master (gsi_p, ctx);
11368 break;
11369 case GIMPLE_OMP_TASKGROUP:
11370 ctx = maybe_lookup_ctx (stmt);
11371 gcc_assert (ctx);
11372 lower_omp_taskgroup (gsi_p, ctx);
11373 break;
11374 case GIMPLE_OMP_ORDERED:
11375 ctx = maybe_lookup_ctx (stmt);
11376 gcc_assert (ctx);
11377 lower_omp_ordered (gsi_p, ctx);
11378 break;
11379 case GIMPLE_OMP_SCAN:
11380 ctx = maybe_lookup_ctx (stmt);
11381 gcc_assert (ctx);
11382 lower_omp_scan (gsi_p, ctx);
11383 break;
11384 case GIMPLE_OMP_CRITICAL:
11385 ctx = maybe_lookup_ctx (stmt);
11386 gcc_assert (ctx);
11387 lower_omp_critical (gsi_p, ctx);
11388 break;
11389 case GIMPLE_OMP_ATOMIC_LOAD:
11390 if ((ctx || task_shared_vars)
11391 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
11392 as_a <gomp_atomic_load *> (stmt)),
11393 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
11394 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
11395 break;
11396 case GIMPLE_OMP_TARGET:
11397 ctx = maybe_lookup_ctx (stmt);
11398 gcc_assert (ctx);
11399 lower_omp_target (gsi_p, ctx);
11400 break;
11401 case GIMPLE_OMP_TEAMS:
11402 ctx = maybe_lookup_ctx (stmt);
11403 gcc_assert (ctx);
11404 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
11405 lower_omp_taskreg (gsi_p, ctx);
11406 else
11407 lower_omp_teams (gsi_p, ctx);
11408 break;
11409 case GIMPLE_OMP_GRID_BODY:
11410 ctx = maybe_lookup_ctx (stmt);
11411 gcc_assert (ctx);
11412 lower_omp_grid_body (gsi_p, ctx);
11413 break;
11414 case GIMPLE_CALL:
11415 tree fndecl;
11416 call_stmt = as_a <gcall *> (stmt);
11417 fndecl = gimple_call_fndecl (call_stmt);
11418 if (fndecl
11419 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
11420 switch (DECL_FUNCTION_CODE (fndecl))
11421 {
11422 case BUILT_IN_GOMP_BARRIER:
11423 if (ctx == NULL)
11424 break;
11425 /* FALLTHRU */
11426 case BUILT_IN_GOMP_CANCEL:
11427 case BUILT_IN_GOMP_CANCELLATION_POINT:
11428 omp_context *cctx;
11429 cctx = ctx;
11430 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
11431 cctx = cctx->outer;
11432 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
11433 if (!cctx->cancellable)
11434 {
11435 if (DECL_FUNCTION_CODE (fndecl)
11436 == BUILT_IN_GOMP_CANCELLATION_POINT)
11437 {
11438 stmt = gimple_build_nop ();
11439 gsi_replace (gsi_p, stmt, false);
11440 }
11441 break;
11442 }
11443 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
11444 {
11445 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
11446 gimple_call_set_fndecl (call_stmt, fndecl);
11447 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
11448 }
11449 tree lhs;
11450 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
11451 gimple_call_set_lhs (call_stmt, lhs);
11452 tree fallthru_label;
11453 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
11454 gimple *g;
11455 g = gimple_build_label (fallthru_label);
11456 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
11457 g = gimple_build_cond (NE_EXPR, lhs,
11458 fold_convert (TREE_TYPE (lhs),
11459 boolean_false_node),
11460 cctx->cancel_label, fallthru_label);
11461 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
11462 break;
11463 default:
11464 break;
11465 }
11466 goto regimplify;
11467
11468 case GIMPLE_ASSIGN:
11469 for (omp_context *up = ctx; up; up = up->outer)
11470 {
11471 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
11472 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
11473 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
11474 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
11475 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
11476 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
11477 && (gimple_omp_target_kind (up->stmt)
11478 == GF_OMP_TARGET_KIND_DATA)))
11479 continue;
11480 else if (!up->lastprivate_conditional_map)
11481 break;
11482 tree lhs = get_base_address (gimple_assign_lhs (stmt));
11483 if (TREE_CODE (lhs) == MEM_REF
11484 && DECL_P (TREE_OPERAND (lhs, 0))
11485 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
11486 0))) == REFERENCE_TYPE)
11487 lhs = TREE_OPERAND (lhs, 0);
11488 if (DECL_P (lhs))
11489 if (tree *v = up->lastprivate_conditional_map->get (lhs))
11490 {
11491 tree clauses;
11492 if (up->combined_into_simd_safelen0)
11493 up = up->outer;
11494 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
11495 clauses = gimple_omp_for_clauses (up->stmt);
11496 else
11497 clauses = gimple_omp_sections_clauses (up->stmt);
11498 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
11499 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
11500 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
11501 OMP_CLAUSE__CONDTEMP_);
11502 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
11503 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
11504 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
11505 }
11506 }
11507 /* FALLTHRU */
11508
11509 default:
11510 regimplify:
11511 if ((ctx || task_shared_vars)
11512 && walk_gimple_op (stmt, lower_omp_regimplify_p,
11513 ctx ? NULL : &wi))
11514 {
11515 /* Just remove clobbers, this should happen only if we have
11516 "privatized" local addressable variables in SIMD regions,
11517 the clobber isn't needed in that case and gimplifying address
11518 of the ARRAY_REF into a pointer and creating MEM_REF based
11519 clobber would create worse code than we get with the clobber
11520 dropped. */
11521 if (gimple_clobber_p (stmt))
11522 {
11523 gsi_replace (gsi_p, gimple_build_nop (), true);
11524 break;
11525 }
11526 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
11527 }
11528 break;
11529 }
11530 }
11531
11532 static void
11533 lower_omp (gimple_seq *body, omp_context *ctx)
11534 {
11535 location_t saved_location = input_location;
11536 gimple_stmt_iterator gsi;
11537 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
11538 lower_omp_1 (&gsi, ctx);
11539 /* During gimplification, we haven't folded statments inside offloading
11540 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
11541 if (target_nesting_level || taskreg_nesting_level)
11542 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
11543 fold_stmt (&gsi);
11544 input_location = saved_location;
11545 }
11546
11547 /* Main entry point. */
11548
11549 static unsigned int
11550 execute_lower_omp (void)
11551 {
11552 gimple_seq body;
11553 int i;
11554 omp_context *ctx;
11555
11556 /* This pass always runs, to provide PROP_gimple_lomp.
11557 But often, there is nothing to do. */
11558 if (flag_openacc == 0 && flag_openmp == 0
11559 && flag_openmp_simd == 0)
11560 return 0;
11561
11562 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
11563 delete_omp_context);
11564
11565 body = gimple_body (current_function_decl);
11566
11567 if (hsa_gen_requested_p ())
11568 omp_grid_gridify_all_targets (&body);
11569
11570 scan_omp (&body, NULL);
11571 gcc_assert (taskreg_nesting_level == 0);
11572 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
11573 finish_taskreg_scan (ctx);
11574 taskreg_contexts.release ();
11575
11576 if (all_contexts->root)
11577 {
11578 if (task_shared_vars)
11579 push_gimplify_context ();
11580 lower_omp (&body, NULL);
11581 if (task_shared_vars)
11582 pop_gimplify_context (NULL);
11583 }
11584
11585 if (all_contexts)
11586 {
11587 splay_tree_delete (all_contexts);
11588 all_contexts = NULL;
11589 }
11590 BITMAP_FREE (task_shared_vars);
11591
11592 /* If current function is a method, remove artificial dummy VAR_DECL created
11593 for non-static data member privatization, they aren't needed for
11594 debuginfo nor anything else, have been already replaced everywhere in the
11595 IL and cause problems with LTO. */
11596 if (DECL_ARGUMENTS (current_function_decl)
11597 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
11598 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
11599 == POINTER_TYPE))
11600 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
11601 return 0;
11602 }
11603
11604 namespace {
11605
11606 const pass_data pass_data_lower_omp =
11607 {
11608 GIMPLE_PASS, /* type */
11609 "omplower", /* name */
11610 OPTGROUP_OMP, /* optinfo_flags */
11611 TV_NONE, /* tv_id */
11612 PROP_gimple_any, /* properties_required */
11613 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
11614 0, /* properties_destroyed */
11615 0, /* todo_flags_start */
11616 0, /* todo_flags_finish */
11617 };
11618
11619 class pass_lower_omp : public gimple_opt_pass
11620 {
11621 public:
11622 pass_lower_omp (gcc::context *ctxt)
11623 : gimple_opt_pass (pass_data_lower_omp, ctxt)
11624 {}
11625
11626 /* opt_pass methods: */
11627 virtual unsigned int execute (function *) { return execute_lower_omp (); }
11628
11629 }; // class pass_lower_omp
11630
11631 } // anon namespace
11632
11633 gimple_opt_pass *
11634 make_pass_lower_omp (gcc::context *ctxt)
11635 {
11636 return new pass_lower_omp (ctxt);
11637 }
11638 \f
11639 /* The following is a utility to diagnose structured block violations.
11640 It is not part of the "omplower" pass, as that's invoked too late. It
11641 should be invoked by the respective front ends after gimplification. */
11642
11643 static splay_tree all_labels;
11644
11645 /* Check for mismatched contexts and generate an error if needed. Return
11646 true if an error is detected. */
11647
11648 static bool
11649 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
11650 gimple *branch_ctx, gimple *label_ctx)
11651 {
11652 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
11653 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
11654
11655 if (label_ctx == branch_ctx)
11656 return false;
11657
11658 const char* kind = NULL;
11659
11660 if (flag_openacc)
11661 {
11662 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
11663 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
11664 {
11665 gcc_checking_assert (kind == NULL);
11666 kind = "OpenACC";
11667 }
11668 }
11669 if (kind == NULL)
11670 {
11671 gcc_checking_assert (flag_openmp || flag_openmp_simd);
11672 kind = "OpenMP";
11673 }
11674
11675 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
11676 so we could traverse it and issue a correct "exit" or "enter" error
11677 message upon a structured block violation.
11678
11679 We built the context by building a list with tree_cons'ing, but there is
11680 no easy counterpart in gimple tuples. It seems like far too much work
11681 for issuing exit/enter error messages. If someone really misses the
11682 distinct error message... patches welcome. */
11683
11684 #if 0
11685 /* Try to avoid confusing the user by producing and error message
11686 with correct "exit" or "enter" verbiage. We prefer "exit"
11687 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
11688 if (branch_ctx == NULL)
11689 exit_p = false;
11690 else
11691 {
11692 while (label_ctx)
11693 {
11694 if (TREE_VALUE (label_ctx) == branch_ctx)
11695 {
11696 exit_p = false;
11697 break;
11698 }
11699 label_ctx = TREE_CHAIN (label_ctx);
11700 }
11701 }
11702
11703 if (exit_p)
11704 error ("invalid exit from %s structured block", kind);
11705 else
11706 error ("invalid entry to %s structured block", kind);
11707 #endif
11708
11709 /* If it's obvious we have an invalid entry, be specific about the error. */
11710 if (branch_ctx == NULL)
11711 error ("invalid entry to %s structured block", kind);
11712 else
11713 {
11714 /* Otherwise, be vague and lazy, but efficient. */
11715 error ("invalid branch to/from %s structured block", kind);
11716 }
11717
11718 gsi_replace (gsi_p, gimple_build_nop (), false);
11719 return true;
11720 }
11721
11722 /* Pass 1: Create a minimal tree of structured blocks, and record
11723 where each label is found. */
11724
11725 static tree
11726 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
11727 struct walk_stmt_info *wi)
11728 {
11729 gimple *context = (gimple *) wi->info;
11730 gimple *inner_context;
11731 gimple *stmt = gsi_stmt (*gsi_p);
11732
11733 *handled_ops_p = true;
11734
11735 switch (gimple_code (stmt))
11736 {
11737 WALK_SUBSTMTS;
11738
11739 case GIMPLE_OMP_PARALLEL:
11740 case GIMPLE_OMP_TASK:
11741 case GIMPLE_OMP_SECTIONS:
11742 case GIMPLE_OMP_SINGLE:
11743 case GIMPLE_OMP_SECTION:
11744 case GIMPLE_OMP_MASTER:
11745 case GIMPLE_OMP_ORDERED:
11746 case GIMPLE_OMP_SCAN:
11747 case GIMPLE_OMP_CRITICAL:
11748 case GIMPLE_OMP_TARGET:
11749 case GIMPLE_OMP_TEAMS:
11750 case GIMPLE_OMP_TASKGROUP:
11751 /* The minimal context here is just the current OMP construct. */
11752 inner_context = stmt;
11753 wi->info = inner_context;
11754 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
11755 wi->info = context;
11756 break;
11757
11758 case GIMPLE_OMP_FOR:
11759 inner_context = stmt;
11760 wi->info = inner_context;
11761 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11762 walk them. */
11763 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
11764 diagnose_sb_1, NULL, wi);
11765 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
11766 wi->info = context;
11767 break;
11768
11769 case GIMPLE_LABEL:
11770 splay_tree_insert (all_labels,
11771 (splay_tree_key) gimple_label_label (
11772 as_a <glabel *> (stmt)),
11773 (splay_tree_value) context);
11774 break;
11775
11776 default:
11777 break;
11778 }
11779
11780 return NULL_TREE;
11781 }
11782
11783 /* Pass 2: Check each branch and see if its context differs from that of
11784 the destination label's context. */
11785
11786 static tree
11787 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
11788 struct walk_stmt_info *wi)
11789 {
11790 gimple *context = (gimple *) wi->info;
11791 splay_tree_node n;
11792 gimple *stmt = gsi_stmt (*gsi_p);
11793
11794 *handled_ops_p = true;
11795
11796 switch (gimple_code (stmt))
11797 {
11798 WALK_SUBSTMTS;
11799
11800 case GIMPLE_OMP_PARALLEL:
11801 case GIMPLE_OMP_TASK:
11802 case GIMPLE_OMP_SECTIONS:
11803 case GIMPLE_OMP_SINGLE:
11804 case GIMPLE_OMP_SECTION:
11805 case GIMPLE_OMP_MASTER:
11806 case GIMPLE_OMP_ORDERED:
11807 case GIMPLE_OMP_SCAN:
11808 case GIMPLE_OMP_CRITICAL:
11809 case GIMPLE_OMP_TARGET:
11810 case GIMPLE_OMP_TEAMS:
11811 case GIMPLE_OMP_TASKGROUP:
11812 wi->info = stmt;
11813 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
11814 wi->info = context;
11815 break;
11816
11817 case GIMPLE_OMP_FOR:
11818 wi->info = stmt;
11819 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11820 walk them. */
11821 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
11822 diagnose_sb_2, NULL, wi);
11823 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
11824 wi->info = context;
11825 break;
11826
11827 case GIMPLE_COND:
11828 {
11829 gcond *cond_stmt = as_a <gcond *> (stmt);
11830 tree lab = gimple_cond_true_label (cond_stmt);
11831 if (lab)
11832 {
11833 n = splay_tree_lookup (all_labels,
11834 (splay_tree_key) lab);
11835 diagnose_sb_0 (gsi_p, context,
11836 n ? (gimple *) n->value : NULL);
11837 }
11838 lab = gimple_cond_false_label (cond_stmt);
11839 if (lab)
11840 {
11841 n = splay_tree_lookup (all_labels,
11842 (splay_tree_key) lab);
11843 diagnose_sb_0 (gsi_p, context,
11844 n ? (gimple *) n->value : NULL);
11845 }
11846 }
11847 break;
11848
11849 case GIMPLE_GOTO:
11850 {
11851 tree lab = gimple_goto_dest (stmt);
11852 if (TREE_CODE (lab) != LABEL_DECL)
11853 break;
11854
11855 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
11856 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
11857 }
11858 break;
11859
11860 case GIMPLE_SWITCH:
11861 {
11862 gswitch *switch_stmt = as_a <gswitch *> (stmt);
11863 unsigned int i;
11864 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
11865 {
11866 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
11867 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
11868 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
11869 break;
11870 }
11871 }
11872 break;
11873
11874 case GIMPLE_RETURN:
11875 diagnose_sb_0 (gsi_p, context, NULL);
11876 break;
11877
11878 default:
11879 break;
11880 }
11881
11882 return NULL_TREE;
11883 }
11884
11885 static unsigned int
11886 diagnose_omp_structured_block_errors (void)
11887 {
11888 struct walk_stmt_info wi;
11889 gimple_seq body = gimple_body (current_function_decl);
11890
11891 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
11892
11893 memset (&wi, 0, sizeof (wi));
11894 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
11895
11896 memset (&wi, 0, sizeof (wi));
11897 wi.want_locations = true;
11898 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
11899
11900 gimple_set_body (current_function_decl, body);
11901
11902 splay_tree_delete (all_labels);
11903 all_labels = NULL;
11904
11905 return 0;
11906 }
11907
11908 namespace {
11909
11910 const pass_data pass_data_diagnose_omp_blocks =
11911 {
11912 GIMPLE_PASS, /* type */
11913 "*diagnose_omp_blocks", /* name */
11914 OPTGROUP_OMP, /* optinfo_flags */
11915 TV_NONE, /* tv_id */
11916 PROP_gimple_any, /* properties_required */
11917 0, /* properties_provided */
11918 0, /* properties_destroyed */
11919 0, /* todo_flags_start */
11920 0, /* todo_flags_finish */
11921 };
11922
11923 class pass_diagnose_omp_blocks : public gimple_opt_pass
11924 {
11925 public:
11926 pass_diagnose_omp_blocks (gcc::context *ctxt)
11927 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
11928 {}
11929
11930 /* opt_pass methods: */
11931 virtual bool gate (function *)
11932 {
11933 return flag_openacc || flag_openmp || flag_openmp_simd;
11934 }
11935 virtual unsigned int execute (function *)
11936 {
11937 return diagnose_omp_structured_block_errors ();
11938 }
11939
11940 }; // class pass_diagnose_omp_blocks
11941
11942 } // anon namespace
11943
11944 gimple_opt_pass *
11945 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
11946 {
11947 return new pass_diagnose_omp_blocks (ctxt);
11948 }
11949 \f
11950
11951 #include "gt-omp-low.h"