OpenMP] use_device_addr/use_device_ptr with Fortran allocatable/pointer arrays
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen1;
144
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
147
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
150
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase;
153
154 /* True if there is order(concurrent) clause on the construct. */
155 bool order_concurrent;
156
157 /* True if there is bind clause on the construct (i.e. a loop construct). */
158 bool loop_p;
159 };
160
161 static splay_tree all_contexts;
162 static int taskreg_nesting_level;
163 static int target_nesting_level;
164 static bitmap task_shared_vars;
165 static bitmap global_nonaddressable_vars;
166 static vec<omp_context *> taskreg_contexts;
167
168 static void scan_omp (gimple_seq *, omp_context *);
169 static tree scan_omp_1_op (tree *, int *, void *);
170
171 #define WALK_SUBSTMTS \
172 case GIMPLE_BIND: \
173 case GIMPLE_TRY: \
174 case GIMPLE_CATCH: \
175 case GIMPLE_EH_FILTER: \
176 case GIMPLE_TRANSACTION: \
177 /* The sub-statements for these should be walked. */ \
178 *handled_ops_p = false; \
179 break;
180
181 /* Return true if CTX corresponds to an oacc parallel region. */
182
183 static bool
184 is_oacc_parallel (omp_context *ctx)
185 {
186 enum gimple_code outer_type = gimple_code (ctx->stmt);
187 return ((outer_type == GIMPLE_OMP_TARGET)
188 && (gimple_omp_target_kind (ctx->stmt)
189 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
190 }
191
192 /* Return true if CTX corresponds to an oacc kernels region. */
193
194 static bool
195 is_oacc_kernels (omp_context *ctx)
196 {
197 enum gimple_code outer_type = gimple_code (ctx->stmt);
198 return ((outer_type == GIMPLE_OMP_TARGET)
199 && (gimple_omp_target_kind (ctx->stmt)
200 == GF_OMP_TARGET_KIND_OACC_KERNELS));
201 }
202
203 /* If DECL is the artificial dummy VAR_DECL created for non-static
204 data member privatization, return the underlying "this" parameter,
205 otherwise return NULL. */
206
207 tree
208 omp_member_access_dummy_var (tree decl)
209 {
210 if (!VAR_P (decl)
211 || !DECL_ARTIFICIAL (decl)
212 || !DECL_IGNORED_P (decl)
213 || !DECL_HAS_VALUE_EXPR_P (decl)
214 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
215 return NULL_TREE;
216
217 tree v = DECL_VALUE_EXPR (decl);
218 if (TREE_CODE (v) != COMPONENT_REF)
219 return NULL_TREE;
220
221 while (1)
222 switch (TREE_CODE (v))
223 {
224 case COMPONENT_REF:
225 case MEM_REF:
226 case INDIRECT_REF:
227 CASE_CONVERT:
228 case POINTER_PLUS_EXPR:
229 v = TREE_OPERAND (v, 0);
230 continue;
231 case PARM_DECL:
232 if (DECL_CONTEXT (v) == current_function_decl
233 && DECL_ARTIFICIAL (v)
234 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
235 return v;
236 return NULL_TREE;
237 default:
238 return NULL_TREE;
239 }
240 }
241
242 /* Helper for unshare_and_remap, called through walk_tree. */
243
244 static tree
245 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
246 {
247 tree *pair = (tree *) data;
248 if (*tp == pair[0])
249 {
250 *tp = unshare_expr (pair[1]);
251 *walk_subtrees = 0;
252 }
253 else if (IS_TYPE_OR_DECL_P (*tp))
254 *walk_subtrees = 0;
255 return NULL_TREE;
256 }
257
258 /* Return unshare_expr (X) with all occurrences of FROM
259 replaced with TO. */
260
261 static tree
262 unshare_and_remap (tree x, tree from, tree to)
263 {
264 tree pair[2] = { from, to };
265 x = unshare_expr (x);
266 walk_tree (&x, unshare_and_remap_1, pair, NULL);
267 return x;
268 }
269
270 /* Convenience function for calling scan_omp_1_op on tree operands. */
271
272 static inline tree
273 scan_omp_op (tree *tp, omp_context *ctx)
274 {
275 struct walk_stmt_info wi;
276
277 memset (&wi, 0, sizeof (wi));
278 wi.info = ctx;
279 wi.want_locations = true;
280
281 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
282 }
283
284 static void lower_omp (gimple_seq *, omp_context *);
285 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
286 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
287
288 /* Return true if CTX is for an omp parallel. */
289
290 static inline bool
291 is_parallel_ctx (omp_context *ctx)
292 {
293 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
294 }
295
296
297 /* Return true if CTX is for an omp task. */
298
299 static inline bool
300 is_task_ctx (omp_context *ctx)
301 {
302 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
303 }
304
305
306 /* Return true if CTX is for an omp taskloop. */
307
308 static inline bool
309 is_taskloop_ctx (omp_context *ctx)
310 {
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
312 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
313 }
314
315
316 /* Return true if CTX is for a host omp teams. */
317
318 static inline bool
319 is_host_teams_ctx (omp_context *ctx)
320 {
321 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
322 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
323 }
324
325 /* Return true if CTX is for an omp parallel or omp task or host omp teams
326 (the last one is strictly not a task region in OpenMP speak, but we
327 need to treat it similarly). */
328
329 static inline bool
330 is_taskreg_ctx (omp_context *ctx)
331 {
332 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
333 }
334
335 /* Return true if EXPR is variable sized. */
336
337 static inline bool
338 is_variable_sized (const_tree expr)
339 {
340 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
341 }
342
343 /* Lookup variables. The "maybe" form
344 allows for the variable form to not have been entered, otherwise we
345 assert that the variable must have been entered. */
346
347 static inline tree
348 lookup_decl (tree var, omp_context *ctx)
349 {
350 tree *n = ctx->cb.decl_map->get (var);
351 return *n;
352 }
353
354 static inline tree
355 maybe_lookup_decl (const_tree var, omp_context *ctx)
356 {
357 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
358 return n ? *n : NULL_TREE;
359 }
360
361 static inline tree
362 lookup_field (tree var, omp_context *ctx)
363 {
364 splay_tree_node n;
365 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
366 return (tree) n->value;
367 }
368
369 static inline tree
370 lookup_sfield (splay_tree_key key, omp_context *ctx)
371 {
372 splay_tree_node n;
373 n = splay_tree_lookup (ctx->sfield_map
374 ? ctx->sfield_map : ctx->field_map, key);
375 return (tree) n->value;
376 }
377
378 static inline tree
379 lookup_sfield (tree var, omp_context *ctx)
380 {
381 return lookup_sfield ((splay_tree_key) var, ctx);
382 }
383
384 static inline tree
385 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
386 {
387 splay_tree_node n;
388 n = splay_tree_lookup (ctx->field_map, key);
389 return n ? (tree) n->value : NULL_TREE;
390 }
391
392 static inline tree
393 maybe_lookup_field (tree var, omp_context *ctx)
394 {
395 return maybe_lookup_field ((splay_tree_key) var, ctx);
396 }
397
398 /* Return true if DECL should be copied by pointer. SHARED_CTX is
399 the parallel context if DECL is to be shared. */
400
401 static bool
402 use_pointer_for_field (tree decl, omp_context *shared_ctx)
403 {
404 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
405 || TYPE_ATOMIC (TREE_TYPE (decl)))
406 return true;
407
408 /* We can only use copy-in/copy-out semantics for shared variables
409 when we know the value is not accessible from an outer scope. */
410 if (shared_ctx)
411 {
412 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
413
414 /* ??? Trivially accessible from anywhere. But why would we even
415 be passing an address in this case? Should we simply assert
416 this to be false, or should we have a cleanup pass that removes
417 these from the list of mappings? */
418 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
419 return true;
420
421 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
422 without analyzing the expression whether or not its location
423 is accessible to anyone else. In the case of nested parallel
424 regions it certainly may be. */
425 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
426 return true;
427
428 /* Do not use copy-in/copy-out for variables that have their
429 address taken. */
430 if (is_global_var (decl))
431 {
432 /* For file scope vars, track whether we've seen them as
433 non-addressable initially and in that case, keep the same
434 answer for the duration of the pass, even when they are made
435 addressable later on e.g. through reduction expansion. Global
436 variables which weren't addressable before the pass will not
437 have their privatized copies address taken. See PR91216. */
438 if (!TREE_ADDRESSABLE (decl))
439 {
440 if (!global_nonaddressable_vars)
441 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
443 }
444 else if (!global_nonaddressable_vars
445 || !bitmap_bit_p (global_nonaddressable_vars,
446 DECL_UID (decl)))
447 return true;
448 }
449 else if (TREE_ADDRESSABLE (decl))
450 return true;
451
452 /* lower_send_shared_vars only uses copy-in, but not copy-out
453 for these. */
454 if (TREE_READONLY (decl)
455 || ((TREE_CODE (decl) == RESULT_DECL
456 || TREE_CODE (decl) == PARM_DECL)
457 && DECL_BY_REFERENCE (decl)))
458 return false;
459
460 /* Disallow copy-in/out in nested parallel if
461 decl is shared in outer parallel, otherwise
462 each thread could store the shared variable
463 in its own copy-in location, making the
464 variable no longer really shared. */
465 if (shared_ctx->is_nested)
466 {
467 omp_context *up;
468
469 for (up = shared_ctx->outer; up; up = up->outer)
470 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
471 break;
472
473 if (up)
474 {
475 tree c;
476
477 for (c = gimple_omp_taskreg_clauses (up->stmt);
478 c; c = OMP_CLAUSE_CHAIN (c))
479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
480 && OMP_CLAUSE_DECL (c) == decl)
481 break;
482
483 if (c)
484 goto maybe_mark_addressable_and_ret;
485 }
486 }
487
488 /* For tasks avoid using copy-in/out. As tasks can be
489 deferred or executed in different thread, when GOMP_task
490 returns, the task hasn't necessarily terminated. */
491 if (is_task_ctx (shared_ctx))
492 {
493 tree outer;
494 maybe_mark_addressable_and_ret:
495 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
496 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
497 {
498 /* Taking address of OUTER in lower_send_shared_vars
499 might need regimplification of everything that uses the
500 variable. */
501 if (!task_shared_vars)
502 task_shared_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
504 TREE_ADDRESSABLE (outer) = 1;
505 }
506 return true;
507 }
508 }
509
510 return false;
511 }
512
513 /* Construct a new automatic decl similar to VAR. */
514
515 static tree
516 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
517 {
518 tree copy = copy_var_decl (var, name, type);
519
520 DECL_CONTEXT (copy) = current_function_decl;
521 DECL_CHAIN (copy) = ctx->block_vars;
522 /* If VAR is listed in task_shared_vars, it means it wasn't
523 originally addressable and is just because task needs to take
524 it's address. But we don't need to take address of privatizations
525 from that var. */
526 if (TREE_ADDRESSABLE (var)
527 && ((task_shared_vars
528 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
529 || (global_nonaddressable_vars
530 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
531 TREE_ADDRESSABLE (copy) = 0;
532 ctx->block_vars = copy;
533
534 return copy;
535 }
536
537 static tree
538 omp_copy_decl_1 (tree var, omp_context *ctx)
539 {
540 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
541 }
542
543 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
544 as appropriate. */
545 static tree
546 omp_build_component_ref (tree obj, tree field)
547 {
548 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
549 if (TREE_THIS_VOLATILE (field))
550 TREE_THIS_VOLATILE (ret) |= 1;
551 if (TREE_READONLY (field))
552 TREE_READONLY (ret) |= 1;
553 return ret;
554 }
555
556 /* Build tree nodes to access the field for VAR on the receiver side. */
557
558 static tree
559 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
560 {
561 tree x, field = lookup_field (var, ctx);
562
563 /* If the receiver record type was remapped in the child function,
564 remap the field into the new record type. */
565 x = maybe_lookup_field (field, ctx);
566 if (x != NULL)
567 field = x;
568
569 x = build_simple_mem_ref (ctx->receiver_decl);
570 TREE_THIS_NOTRAP (x) = 1;
571 x = omp_build_component_ref (x, field);
572 if (by_ref)
573 {
574 x = build_simple_mem_ref (x);
575 TREE_THIS_NOTRAP (x) = 1;
576 }
577
578 return x;
579 }
580
581 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
582 of a parallel, this is a component reference; for workshare constructs
583 this is some variable. */
584
585 static tree
586 build_outer_var_ref (tree var, omp_context *ctx,
587 enum omp_clause_code code = OMP_CLAUSE_ERROR)
588 {
589 tree x;
590 omp_context *outer = ctx->outer;
591 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
592 outer = outer->outer;
593
594 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
595 x = var;
596 else if (is_variable_sized (var))
597 {
598 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
599 x = build_outer_var_ref (x, ctx, code);
600 x = build_simple_mem_ref (x);
601 }
602 else if (is_taskreg_ctx (ctx))
603 {
604 bool by_ref = use_pointer_for_field (var, NULL);
605 x = build_receiver_ref (var, by_ref, ctx);
606 }
607 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
608 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
609 || ctx->loop_p
610 || (code == OMP_CLAUSE_PRIVATE
611 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
612 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
613 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
614 {
615 /* #pragma omp simd isn't a worksharing construct, and can reference
616 even private vars in its linear etc. clauses.
617 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
618 to private vars in all worksharing constructs. */
619 x = NULL_TREE;
620 if (outer && is_taskreg_ctx (outer))
621 x = lookup_decl (var, outer);
622 else if (outer)
623 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
624 if (x == NULL_TREE)
625 x = var;
626 }
627 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
628 {
629 gcc_assert (outer);
630 splay_tree_node n
631 = splay_tree_lookup (outer->field_map,
632 (splay_tree_key) &DECL_UID (var));
633 if (n == NULL)
634 {
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
636 x = var;
637 else
638 x = lookup_decl (var, outer);
639 }
640 else
641 {
642 tree field = (tree) n->value;
643 /* If the receiver record type was remapped in the child function,
644 remap the field into the new record type. */
645 x = maybe_lookup_field (field, outer);
646 if (x != NULL)
647 field = x;
648
649 x = build_simple_mem_ref (outer->receiver_decl);
650 x = omp_build_component_ref (x, field);
651 if (use_pointer_for_field (var, outer))
652 x = build_simple_mem_ref (x);
653 }
654 }
655 else if (outer)
656 {
657 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
658 {
659 outer = outer->outer;
660 gcc_assert (outer
661 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
662 }
663 x = lookup_decl (var, outer);
664 }
665 else if (omp_is_reference (var))
666 /* This can happen with orphaned constructs. If var is reference, it is
667 possible it is shared and as such valid. */
668 x = var;
669 else if (omp_member_access_dummy_var (var))
670 x = var;
671 else
672 gcc_unreachable ();
673
674 if (x == var)
675 {
676 tree t = omp_member_access_dummy_var (var);
677 if (t)
678 {
679 x = DECL_VALUE_EXPR (var);
680 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
681 if (o != t)
682 x = unshare_and_remap (x, t, o);
683 else
684 x = unshare_expr (x);
685 }
686 }
687
688 if (omp_is_reference (var))
689 x = build_simple_mem_ref (x);
690
691 return x;
692 }
693
694 /* Build tree nodes to access the field for VAR on the sender side. */
695
696 static tree
697 build_sender_ref (splay_tree_key key, omp_context *ctx)
698 {
699 tree field = lookup_sfield (key, ctx);
700 return omp_build_component_ref (ctx->sender_decl, field);
701 }
702
703 static tree
704 build_sender_ref (tree var, omp_context *ctx)
705 {
706 return build_sender_ref ((splay_tree_key) var, ctx);
707 }
708
709 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
710 BASE_POINTERS_RESTRICT, declare the field with restrict. */
711
712 static void
713 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
714 {
715 tree field, type, sfield = NULL_TREE;
716 splay_tree_key key = (splay_tree_key) var;
717
718 if ((mask & 16) != 0)
719 {
720 key = (splay_tree_key) &DECL_NAME (var);
721 gcc_checking_assert (key != (splay_tree_key) var);
722 }
723 if ((mask & 8) != 0)
724 {
725 key = (splay_tree_key) &DECL_UID (var);
726 gcc_checking_assert (key != (splay_tree_key) var);
727 }
728 gcc_assert ((mask & 1) == 0
729 || !splay_tree_lookup (ctx->field_map, key));
730 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
731 || !splay_tree_lookup (ctx->sfield_map, key));
732 gcc_assert ((mask & 3) == 3
733 || !is_gimple_omp_oacc (ctx->stmt));
734
735 type = TREE_TYPE (var);
736 if ((mask & 16) != 0)
737 type = lang_hooks.decls.omp_array_data (var, true);
738
739 /* Prevent redeclaring the var in the split-off function with a restrict
740 pointer type. Note that we only clear type itself, restrict qualifiers in
741 the pointed-to type will be ignored by points-to analysis. */
742 if (POINTER_TYPE_P (type)
743 && TYPE_RESTRICT (type))
744 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
745
746 if (mask & 4)
747 {
748 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
749 type = build_pointer_type (build_pointer_type (type));
750 }
751 else if (by_ref)
752 type = build_pointer_type (type);
753 else if ((mask & 3) == 1 && omp_is_reference (var))
754 type = TREE_TYPE (type);
755
756 field = build_decl (DECL_SOURCE_LOCATION (var),
757 FIELD_DECL, DECL_NAME (var), type);
758
759 /* Remember what variable this field was created for. This does have a
760 side effect of making dwarf2out ignore this member, so for helpful
761 debugging we clear it later in delete_omp_context. */
762 DECL_ABSTRACT_ORIGIN (field) = var;
763 if ((mask & 16) == 0 && type == TREE_TYPE (var))
764 {
765 SET_DECL_ALIGN (field, DECL_ALIGN (var));
766 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
767 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
768 }
769 else
770 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
771
772 if ((mask & 3) == 3)
773 {
774 insert_field_into_struct (ctx->record_type, field);
775 if (ctx->srecord_type)
776 {
777 sfield = build_decl (DECL_SOURCE_LOCATION (var),
778 FIELD_DECL, DECL_NAME (var), type);
779 DECL_ABSTRACT_ORIGIN (sfield) = var;
780 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
781 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
782 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
783 insert_field_into_struct (ctx->srecord_type, sfield);
784 }
785 }
786 else
787 {
788 if (ctx->srecord_type == NULL_TREE)
789 {
790 tree t;
791
792 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
793 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
794 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
795 {
796 sfield = build_decl (DECL_SOURCE_LOCATION (t),
797 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
798 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
799 insert_field_into_struct (ctx->srecord_type, sfield);
800 splay_tree_insert (ctx->sfield_map,
801 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
802 (splay_tree_value) sfield);
803 }
804 }
805 sfield = field;
806 insert_field_into_struct ((mask & 1) ? ctx->record_type
807 : ctx->srecord_type, field);
808 }
809
810 if (mask & 1)
811 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
812 if ((mask & 2) && ctx->sfield_map)
813 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
814 }
815
816 static tree
817 install_var_local (tree var, omp_context *ctx)
818 {
819 tree new_var = omp_copy_decl_1 (var, ctx);
820 insert_decl_map (&ctx->cb, var, new_var);
821 return new_var;
822 }
823
824 /* Adjust the replacement for DECL in CTX for the new context. This means
825 copying the DECL_VALUE_EXPR, and fixing up the type. */
826
827 static void
828 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
829 {
830 tree new_decl, size;
831
832 new_decl = lookup_decl (decl, ctx);
833
834 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
835
836 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
837 && DECL_HAS_VALUE_EXPR_P (decl))
838 {
839 tree ve = DECL_VALUE_EXPR (decl);
840 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
841 SET_DECL_VALUE_EXPR (new_decl, ve);
842 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
843 }
844
845 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
846 {
847 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
848 if (size == error_mark_node)
849 size = TYPE_SIZE (TREE_TYPE (new_decl));
850 DECL_SIZE (new_decl) = size;
851
852 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
853 if (size == error_mark_node)
854 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
855 DECL_SIZE_UNIT (new_decl) = size;
856 }
857 }
858
859 /* The callback for remap_decl. Search all containing contexts for a
860 mapping of the variable; this avoids having to duplicate the splay
861 tree ahead of time. We know a mapping doesn't already exist in the
862 given context. Create new mappings to implement default semantics. */
863
864 static tree
865 omp_copy_decl (tree var, copy_body_data *cb)
866 {
867 omp_context *ctx = (omp_context *) cb;
868 tree new_var;
869
870 if (TREE_CODE (var) == LABEL_DECL)
871 {
872 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
873 return var;
874 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
875 DECL_CONTEXT (new_var) = current_function_decl;
876 insert_decl_map (&ctx->cb, var, new_var);
877 return new_var;
878 }
879
880 while (!is_taskreg_ctx (ctx))
881 {
882 ctx = ctx->outer;
883 if (ctx == NULL)
884 return var;
885 new_var = maybe_lookup_decl (var, ctx);
886 if (new_var)
887 return new_var;
888 }
889
890 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
891 return var;
892
893 return error_mark_node;
894 }
895
896 /* Create a new context, with OUTER_CTX being the surrounding context. */
897
898 static omp_context *
899 new_omp_context (gimple *stmt, omp_context *outer_ctx)
900 {
901 omp_context *ctx = XCNEW (omp_context);
902
903 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
904 (splay_tree_value) ctx);
905 ctx->stmt = stmt;
906
907 if (outer_ctx)
908 {
909 ctx->outer = outer_ctx;
910 ctx->cb = outer_ctx->cb;
911 ctx->cb.block = NULL;
912 ctx->depth = outer_ctx->depth + 1;
913 }
914 else
915 {
916 ctx->cb.src_fn = current_function_decl;
917 ctx->cb.dst_fn = current_function_decl;
918 ctx->cb.src_node = cgraph_node::get (current_function_decl);
919 gcc_checking_assert (ctx->cb.src_node);
920 ctx->cb.dst_node = ctx->cb.src_node;
921 ctx->cb.src_cfun = cfun;
922 ctx->cb.copy_decl = omp_copy_decl;
923 ctx->cb.eh_lp_nr = 0;
924 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
925 ctx->cb.adjust_array_error_bounds = true;
926 ctx->cb.dont_remap_vla_if_no_change = true;
927 ctx->depth = 1;
928 }
929
930 ctx->cb.decl_map = new hash_map<tree, tree>;
931
932 return ctx;
933 }
934
935 static gimple_seq maybe_catch_exception (gimple_seq);
936
937 /* Finalize task copyfn. */
938
939 static void
940 finalize_task_copyfn (gomp_task *task_stmt)
941 {
942 struct function *child_cfun;
943 tree child_fn;
944 gimple_seq seq = NULL, new_seq;
945 gbind *bind;
946
947 child_fn = gimple_omp_task_copy_fn (task_stmt);
948 if (child_fn == NULL_TREE)
949 return;
950
951 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
952 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
953
954 push_cfun (child_cfun);
955 bind = gimplify_body (child_fn, false);
956 gimple_seq_add_stmt (&seq, bind);
957 new_seq = maybe_catch_exception (seq);
958 if (new_seq != seq)
959 {
960 bind = gimple_build_bind (NULL, new_seq, NULL);
961 seq = NULL;
962 gimple_seq_add_stmt (&seq, bind);
963 }
964 gimple_set_body (child_fn, seq);
965 pop_cfun ();
966
967 /* Inform the callgraph about the new function. */
968 cgraph_node *node = cgraph_node::get_create (child_fn);
969 node->parallelized_function = 1;
970 cgraph_node::add_new_function (child_fn, false);
971 }
972
973 /* Destroy a omp_context data structures. Called through the splay tree
974 value delete callback. */
975
976 static void
977 delete_omp_context (splay_tree_value value)
978 {
979 omp_context *ctx = (omp_context *) value;
980
981 delete ctx->cb.decl_map;
982
983 if (ctx->field_map)
984 splay_tree_delete (ctx->field_map);
985 if (ctx->sfield_map)
986 splay_tree_delete (ctx->sfield_map);
987
988 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
989 it produces corrupt debug information. */
990 if (ctx->record_type)
991 {
992 tree t;
993 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
994 DECL_ABSTRACT_ORIGIN (t) = NULL;
995 }
996 if (ctx->srecord_type)
997 {
998 tree t;
999 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1000 DECL_ABSTRACT_ORIGIN (t) = NULL;
1001 }
1002
1003 if (is_task_ctx (ctx))
1004 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1005
1006 if (ctx->task_reduction_map)
1007 {
1008 ctx->task_reductions.release ();
1009 delete ctx->task_reduction_map;
1010 }
1011
1012 delete ctx->lastprivate_conditional_map;
1013
1014 XDELETE (ctx);
1015 }
1016
1017 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1018 context. */
1019
1020 static void
1021 fixup_child_record_type (omp_context *ctx)
1022 {
1023 tree f, type = ctx->record_type;
1024
1025 if (!ctx->receiver_decl)
1026 return;
1027 /* ??? It isn't sufficient to just call remap_type here, because
1028 variably_modified_type_p doesn't work the way we expect for
1029 record types. Testing each field for whether it needs remapping
1030 and creating a new record by hand works, however. */
1031 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1032 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1033 break;
1034 if (f)
1035 {
1036 tree name, new_fields = NULL;
1037
1038 type = lang_hooks.types.make_type (RECORD_TYPE);
1039 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1040 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1041 TYPE_DECL, name, type);
1042 TYPE_NAME (type) = name;
1043
1044 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1045 {
1046 tree new_f = copy_node (f);
1047 DECL_CONTEXT (new_f) = type;
1048 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1049 DECL_CHAIN (new_f) = new_fields;
1050 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1051 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1052 &ctx->cb, NULL);
1053 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1054 &ctx->cb, NULL);
1055 new_fields = new_f;
1056
1057 /* Arrange to be able to look up the receiver field
1058 given the sender field. */
1059 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1060 (splay_tree_value) new_f);
1061 }
1062 TYPE_FIELDS (type) = nreverse (new_fields);
1063 layout_type (type);
1064 }
1065
1066 /* In a target region we never modify any of the pointers in *.omp_data_i,
1067 so attempt to help the optimizers. */
1068 if (is_gimple_omp_offloaded (ctx->stmt))
1069 type = build_qualified_type (type, TYPE_QUAL_CONST);
1070
1071 TREE_TYPE (ctx->receiver_decl)
1072 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1073 }
1074
1075 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1076 specified by CLAUSES. */
1077
1078 static void
1079 scan_sharing_clauses (tree clauses, omp_context *ctx)
1080 {
1081 tree c, decl;
1082 bool scan_array_reductions = false;
1083
1084 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1085 {
1086 bool by_ref;
1087
1088 switch (OMP_CLAUSE_CODE (c))
1089 {
1090 case OMP_CLAUSE_PRIVATE:
1091 decl = OMP_CLAUSE_DECL (c);
1092 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1093 goto do_private;
1094 else if (!is_variable_sized (decl))
1095 install_var_local (decl, ctx);
1096 break;
1097
1098 case OMP_CLAUSE_SHARED:
1099 decl = OMP_CLAUSE_DECL (c);
1100 /* Ignore shared directives in teams construct inside of
1101 target construct. */
1102 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1103 && !is_host_teams_ctx (ctx))
1104 {
1105 /* Global variables don't need to be copied,
1106 the receiver side will use them directly. */
1107 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1108 if (is_global_var (odecl))
1109 break;
1110 insert_decl_map (&ctx->cb, decl, odecl);
1111 break;
1112 }
1113 gcc_assert (is_taskreg_ctx (ctx));
1114 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1115 || !is_variable_sized (decl));
1116 /* Global variables don't need to be copied,
1117 the receiver side will use them directly. */
1118 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1119 break;
1120 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1121 {
1122 use_pointer_for_field (decl, ctx);
1123 break;
1124 }
1125 by_ref = use_pointer_for_field (decl, NULL);
1126 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1127 || TREE_ADDRESSABLE (decl)
1128 || by_ref
1129 || omp_is_reference (decl))
1130 {
1131 by_ref = use_pointer_for_field (decl, ctx);
1132 install_var_field (decl, by_ref, 3, ctx);
1133 install_var_local (decl, ctx);
1134 break;
1135 }
1136 /* We don't need to copy const scalar vars back. */
1137 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1138 goto do_private;
1139
1140 case OMP_CLAUSE_REDUCTION:
1141 case OMP_CLAUSE_IN_REDUCTION:
1142 decl = OMP_CLAUSE_DECL (c);
1143 if (TREE_CODE (decl) == MEM_REF)
1144 {
1145 tree t = TREE_OPERAND (decl, 0);
1146 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1147 t = TREE_OPERAND (t, 0);
1148 if (TREE_CODE (t) == INDIRECT_REF
1149 || TREE_CODE (t) == ADDR_EXPR)
1150 t = TREE_OPERAND (t, 0);
1151 install_var_local (t, ctx);
1152 if (is_taskreg_ctx (ctx)
1153 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1154 || (is_task_ctx (ctx)
1155 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1156 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1157 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1158 == POINTER_TYPE)))))
1159 && !is_variable_sized (t)
1160 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1161 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1162 && !is_task_ctx (ctx))))
1163 {
1164 by_ref = use_pointer_for_field (t, NULL);
1165 if (is_task_ctx (ctx)
1166 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1167 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1168 {
1169 install_var_field (t, false, 1, ctx);
1170 install_var_field (t, by_ref, 2, ctx);
1171 }
1172 else
1173 install_var_field (t, by_ref, 3, ctx);
1174 }
1175 break;
1176 }
1177 if (is_task_ctx (ctx)
1178 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1179 && OMP_CLAUSE_REDUCTION_TASK (c)
1180 && is_parallel_ctx (ctx)))
1181 {
1182 /* Global variables don't need to be copied,
1183 the receiver side will use them directly. */
1184 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1185 {
1186 by_ref = use_pointer_for_field (decl, ctx);
1187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1188 install_var_field (decl, by_ref, 3, ctx);
1189 }
1190 install_var_local (decl, ctx);
1191 break;
1192 }
1193 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1194 && OMP_CLAUSE_REDUCTION_TASK (c))
1195 {
1196 install_var_local (decl, ctx);
1197 break;
1198 }
1199 goto do_private;
1200
1201 case OMP_CLAUSE_LASTPRIVATE:
1202 /* Let the corresponding firstprivate clause create
1203 the variable. */
1204 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1205 break;
1206 /* FALLTHRU */
1207
1208 case OMP_CLAUSE_FIRSTPRIVATE:
1209 case OMP_CLAUSE_LINEAR:
1210 decl = OMP_CLAUSE_DECL (c);
1211 do_private:
1212 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1213 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1214 && is_gimple_omp_offloaded (ctx->stmt))
1215 {
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1217 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1218 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1219 install_var_field (decl, true, 3, ctx);
1220 else
1221 install_var_field (decl, false, 3, ctx);
1222 }
1223 if (is_variable_sized (decl))
1224 {
1225 if (is_task_ctx (ctx))
1226 install_var_field (decl, false, 1, ctx);
1227 break;
1228 }
1229 else if (is_taskreg_ctx (ctx))
1230 {
1231 bool global
1232 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1233 by_ref = use_pointer_for_field (decl, NULL);
1234
1235 if (is_task_ctx (ctx)
1236 && (global || by_ref || omp_is_reference (decl)))
1237 {
1238 install_var_field (decl, false, 1, ctx);
1239 if (!global)
1240 install_var_field (decl, by_ref, 2, ctx);
1241 }
1242 else if (!global)
1243 install_var_field (decl, by_ref, 3, ctx);
1244 }
1245 install_var_local (decl, ctx);
1246 break;
1247
1248 case OMP_CLAUSE_USE_DEVICE_PTR:
1249 case OMP_CLAUSE_USE_DEVICE_ADDR:
1250 decl = OMP_CLAUSE_DECL (c);
1251
1252 /* Fortran array descriptors. */
1253 if (lang_hooks.decls.omp_array_data (decl, true))
1254 install_var_field (decl, false, 19, ctx);
1255 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1256 && !omp_is_reference (decl)
1257 && !omp_is_allocatable_or_ptr (decl))
1258 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1259 install_var_field (decl, true, 11, ctx);
1260 else
1261 install_var_field (decl, false, 11, ctx);
1262 if (DECL_SIZE (decl)
1263 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1264 {
1265 tree decl2 = DECL_VALUE_EXPR (decl);
1266 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1267 decl2 = TREE_OPERAND (decl2, 0);
1268 gcc_assert (DECL_P (decl2));
1269 install_var_local (decl2, ctx);
1270 }
1271 install_var_local (decl, ctx);
1272 break;
1273
1274 case OMP_CLAUSE_IS_DEVICE_PTR:
1275 decl = OMP_CLAUSE_DECL (c);
1276 goto do_private;
1277
1278 case OMP_CLAUSE__LOOPTEMP_:
1279 case OMP_CLAUSE__REDUCTEMP_:
1280 gcc_assert (is_taskreg_ctx (ctx));
1281 decl = OMP_CLAUSE_DECL (c);
1282 install_var_field (decl, false, 3, ctx);
1283 install_var_local (decl, ctx);
1284 break;
1285
1286 case OMP_CLAUSE_COPYPRIVATE:
1287 case OMP_CLAUSE_COPYIN:
1288 decl = OMP_CLAUSE_DECL (c);
1289 by_ref = use_pointer_for_field (decl, NULL);
1290 install_var_field (decl, by_ref, 3, ctx);
1291 break;
1292
1293 case OMP_CLAUSE_FINAL:
1294 case OMP_CLAUSE_IF:
1295 case OMP_CLAUSE_NUM_THREADS:
1296 case OMP_CLAUSE_NUM_TEAMS:
1297 case OMP_CLAUSE_THREAD_LIMIT:
1298 case OMP_CLAUSE_DEVICE:
1299 case OMP_CLAUSE_SCHEDULE:
1300 case OMP_CLAUSE_DIST_SCHEDULE:
1301 case OMP_CLAUSE_DEPEND:
1302 case OMP_CLAUSE_PRIORITY:
1303 case OMP_CLAUSE_GRAINSIZE:
1304 case OMP_CLAUSE_NUM_TASKS:
1305 case OMP_CLAUSE_NUM_GANGS:
1306 case OMP_CLAUSE_NUM_WORKERS:
1307 case OMP_CLAUSE_VECTOR_LENGTH:
1308 if (ctx->outer)
1309 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1310 break;
1311
1312 case OMP_CLAUSE_TO:
1313 case OMP_CLAUSE_FROM:
1314 case OMP_CLAUSE_MAP:
1315 if (ctx->outer)
1316 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1317 decl = OMP_CLAUSE_DECL (c);
1318 /* Global variables with "omp declare target" attribute
1319 don't need to be copied, the receiver side will use them
1320 directly. However, global variables with "omp declare target link"
1321 attribute need to be copied. Or when ALWAYS modifier is used. */
1322 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1323 && DECL_P (decl)
1324 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1325 && (OMP_CLAUSE_MAP_KIND (c)
1326 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1327 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1328 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1329 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1330 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1331 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1332 && varpool_node::get_create (decl)->offloadable
1333 && !lookup_attribute ("omp declare target link",
1334 DECL_ATTRIBUTES (decl)))
1335 break;
1336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1338 {
1339 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1340 not offloaded; there is nothing to map for those. */
1341 if (!is_gimple_omp_offloaded (ctx->stmt)
1342 && !POINTER_TYPE_P (TREE_TYPE (decl))
1343 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1344 break;
1345 }
1346 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1347 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1348 || (OMP_CLAUSE_MAP_KIND (c)
1349 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1350 {
1351 if (TREE_CODE (decl) == COMPONENT_REF
1352 || (TREE_CODE (decl) == INDIRECT_REF
1353 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1354 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1355 == REFERENCE_TYPE)))
1356 break;
1357 if (DECL_SIZE (decl)
1358 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1359 {
1360 tree decl2 = DECL_VALUE_EXPR (decl);
1361 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1362 decl2 = TREE_OPERAND (decl2, 0);
1363 gcc_assert (DECL_P (decl2));
1364 install_var_local (decl2, ctx);
1365 }
1366 install_var_local (decl, ctx);
1367 break;
1368 }
1369 if (DECL_P (decl))
1370 {
1371 if (DECL_SIZE (decl)
1372 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1373 {
1374 tree decl2 = DECL_VALUE_EXPR (decl);
1375 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1376 decl2 = TREE_OPERAND (decl2, 0);
1377 gcc_assert (DECL_P (decl2));
1378 install_var_field (decl2, true, 3, ctx);
1379 install_var_local (decl2, ctx);
1380 install_var_local (decl, ctx);
1381 }
1382 else
1383 {
1384 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1385 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1386 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1387 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1388 install_var_field (decl, true, 7, ctx);
1389 else
1390 install_var_field (decl, true, 3, ctx);
1391 if (is_gimple_omp_offloaded (ctx->stmt)
1392 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1393 install_var_local (decl, ctx);
1394 }
1395 }
1396 else
1397 {
1398 tree base = get_base_address (decl);
1399 tree nc = OMP_CLAUSE_CHAIN (c);
1400 if (DECL_P (base)
1401 && nc != NULL_TREE
1402 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1403 && OMP_CLAUSE_DECL (nc) == base
1404 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1405 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1406 {
1407 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1408 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1409 }
1410 else
1411 {
1412 if (ctx->outer)
1413 {
1414 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1415 decl = OMP_CLAUSE_DECL (c);
1416 }
1417 gcc_assert (!splay_tree_lookup (ctx->field_map,
1418 (splay_tree_key) decl));
1419 tree field
1420 = build_decl (OMP_CLAUSE_LOCATION (c),
1421 FIELD_DECL, NULL_TREE, ptr_type_node);
1422 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1423 insert_field_into_struct (ctx->record_type, field);
1424 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1425 (splay_tree_value) field);
1426 }
1427 }
1428 break;
1429
1430 case OMP_CLAUSE__GRIDDIM_:
1431 if (ctx->outer)
1432 {
1433 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1434 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1435 }
1436 break;
1437
1438 case OMP_CLAUSE_ORDER:
1439 ctx->order_concurrent = true;
1440 break;
1441
1442 case OMP_CLAUSE_BIND:
1443 ctx->loop_p = true;
1444 break;
1445
1446 case OMP_CLAUSE_NOWAIT:
1447 case OMP_CLAUSE_ORDERED:
1448 case OMP_CLAUSE_COLLAPSE:
1449 case OMP_CLAUSE_UNTIED:
1450 case OMP_CLAUSE_MERGEABLE:
1451 case OMP_CLAUSE_PROC_BIND:
1452 case OMP_CLAUSE_SAFELEN:
1453 case OMP_CLAUSE_SIMDLEN:
1454 case OMP_CLAUSE_THREADS:
1455 case OMP_CLAUSE_SIMD:
1456 case OMP_CLAUSE_NOGROUP:
1457 case OMP_CLAUSE_DEFAULTMAP:
1458 case OMP_CLAUSE_ASYNC:
1459 case OMP_CLAUSE_WAIT:
1460 case OMP_CLAUSE_GANG:
1461 case OMP_CLAUSE_WORKER:
1462 case OMP_CLAUSE_VECTOR:
1463 case OMP_CLAUSE_INDEPENDENT:
1464 case OMP_CLAUSE_AUTO:
1465 case OMP_CLAUSE_SEQ:
1466 case OMP_CLAUSE_TILE:
1467 case OMP_CLAUSE__SIMT_:
1468 case OMP_CLAUSE_DEFAULT:
1469 case OMP_CLAUSE_NONTEMPORAL:
1470 case OMP_CLAUSE_IF_PRESENT:
1471 case OMP_CLAUSE_FINALIZE:
1472 case OMP_CLAUSE_TASK_REDUCTION:
1473 break;
1474
1475 case OMP_CLAUSE_ALIGNED:
1476 decl = OMP_CLAUSE_DECL (c);
1477 if (is_global_var (decl)
1478 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1479 install_var_local (decl, ctx);
1480 break;
1481
1482 case OMP_CLAUSE__CONDTEMP_:
1483 decl = OMP_CLAUSE_DECL (c);
1484 if (is_parallel_ctx (ctx))
1485 {
1486 install_var_field (decl, false, 3, ctx);
1487 install_var_local (decl, ctx);
1488 }
1489 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1490 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1491 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1492 install_var_local (decl, ctx);
1493 break;
1494
1495 case OMP_CLAUSE__CACHE_:
1496 default:
1497 gcc_unreachable ();
1498 }
1499 }
1500
1501 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1502 {
1503 switch (OMP_CLAUSE_CODE (c))
1504 {
1505 case OMP_CLAUSE_LASTPRIVATE:
1506 /* Let the corresponding firstprivate clause create
1507 the variable. */
1508 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1509 scan_array_reductions = true;
1510 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1511 break;
1512 /* FALLTHRU */
1513
1514 case OMP_CLAUSE_FIRSTPRIVATE:
1515 case OMP_CLAUSE_PRIVATE:
1516 case OMP_CLAUSE_LINEAR:
1517 case OMP_CLAUSE_IS_DEVICE_PTR:
1518 decl = OMP_CLAUSE_DECL (c);
1519 if (is_variable_sized (decl))
1520 {
1521 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1522 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1523 && is_gimple_omp_offloaded (ctx->stmt))
1524 {
1525 tree decl2 = DECL_VALUE_EXPR (decl);
1526 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1527 decl2 = TREE_OPERAND (decl2, 0);
1528 gcc_assert (DECL_P (decl2));
1529 install_var_local (decl2, ctx);
1530 fixup_remapped_decl (decl2, ctx, false);
1531 }
1532 install_var_local (decl, ctx);
1533 }
1534 fixup_remapped_decl (decl, ctx,
1535 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1536 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1537 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1538 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1539 scan_array_reductions = true;
1540 break;
1541
1542 case OMP_CLAUSE_REDUCTION:
1543 case OMP_CLAUSE_IN_REDUCTION:
1544 decl = OMP_CLAUSE_DECL (c);
1545 if (TREE_CODE (decl) != MEM_REF)
1546 {
1547 if (is_variable_sized (decl))
1548 install_var_local (decl, ctx);
1549 fixup_remapped_decl (decl, ctx, false);
1550 }
1551 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1552 scan_array_reductions = true;
1553 break;
1554
1555 case OMP_CLAUSE_TASK_REDUCTION:
1556 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1557 scan_array_reductions = true;
1558 break;
1559
1560 case OMP_CLAUSE_SHARED:
1561 /* Ignore shared directives in teams construct inside of
1562 target construct. */
1563 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1564 && !is_host_teams_ctx (ctx))
1565 break;
1566 decl = OMP_CLAUSE_DECL (c);
1567 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1568 break;
1569 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1570 {
1571 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1572 ctx->outer)))
1573 break;
1574 bool by_ref = use_pointer_for_field (decl, ctx);
1575 install_var_field (decl, by_ref, 11, ctx);
1576 break;
1577 }
1578 fixup_remapped_decl (decl, ctx, false);
1579 break;
1580
1581 case OMP_CLAUSE_MAP:
1582 if (!is_gimple_omp_offloaded (ctx->stmt))
1583 break;
1584 decl = OMP_CLAUSE_DECL (c);
1585 if (DECL_P (decl)
1586 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1587 && (OMP_CLAUSE_MAP_KIND (c)
1588 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1589 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1590 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1591 && varpool_node::get_create (decl)->offloadable)
1592 break;
1593 if (DECL_P (decl))
1594 {
1595 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1596 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1597 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1598 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1599 {
1600 tree new_decl = lookup_decl (decl, ctx);
1601 TREE_TYPE (new_decl)
1602 = remap_type (TREE_TYPE (decl), &ctx->cb);
1603 }
1604 else if (DECL_SIZE (decl)
1605 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1606 {
1607 tree decl2 = DECL_VALUE_EXPR (decl);
1608 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1609 decl2 = TREE_OPERAND (decl2, 0);
1610 gcc_assert (DECL_P (decl2));
1611 fixup_remapped_decl (decl2, ctx, false);
1612 fixup_remapped_decl (decl, ctx, true);
1613 }
1614 else
1615 fixup_remapped_decl (decl, ctx, false);
1616 }
1617 break;
1618
1619 case OMP_CLAUSE_COPYPRIVATE:
1620 case OMP_CLAUSE_COPYIN:
1621 case OMP_CLAUSE_DEFAULT:
1622 case OMP_CLAUSE_IF:
1623 case OMP_CLAUSE_NUM_THREADS:
1624 case OMP_CLAUSE_NUM_TEAMS:
1625 case OMP_CLAUSE_THREAD_LIMIT:
1626 case OMP_CLAUSE_DEVICE:
1627 case OMP_CLAUSE_SCHEDULE:
1628 case OMP_CLAUSE_DIST_SCHEDULE:
1629 case OMP_CLAUSE_NOWAIT:
1630 case OMP_CLAUSE_ORDERED:
1631 case OMP_CLAUSE_COLLAPSE:
1632 case OMP_CLAUSE_UNTIED:
1633 case OMP_CLAUSE_FINAL:
1634 case OMP_CLAUSE_MERGEABLE:
1635 case OMP_CLAUSE_PROC_BIND:
1636 case OMP_CLAUSE_SAFELEN:
1637 case OMP_CLAUSE_SIMDLEN:
1638 case OMP_CLAUSE_ALIGNED:
1639 case OMP_CLAUSE_DEPEND:
1640 case OMP_CLAUSE__LOOPTEMP_:
1641 case OMP_CLAUSE__REDUCTEMP_:
1642 case OMP_CLAUSE_TO:
1643 case OMP_CLAUSE_FROM:
1644 case OMP_CLAUSE_PRIORITY:
1645 case OMP_CLAUSE_GRAINSIZE:
1646 case OMP_CLAUSE_NUM_TASKS:
1647 case OMP_CLAUSE_THREADS:
1648 case OMP_CLAUSE_SIMD:
1649 case OMP_CLAUSE_NOGROUP:
1650 case OMP_CLAUSE_DEFAULTMAP:
1651 case OMP_CLAUSE_ORDER:
1652 case OMP_CLAUSE_BIND:
1653 case OMP_CLAUSE_USE_DEVICE_PTR:
1654 case OMP_CLAUSE_USE_DEVICE_ADDR:
1655 case OMP_CLAUSE_NONTEMPORAL:
1656 case OMP_CLAUSE_ASYNC:
1657 case OMP_CLAUSE_WAIT:
1658 case OMP_CLAUSE_NUM_GANGS:
1659 case OMP_CLAUSE_NUM_WORKERS:
1660 case OMP_CLAUSE_VECTOR_LENGTH:
1661 case OMP_CLAUSE_GANG:
1662 case OMP_CLAUSE_WORKER:
1663 case OMP_CLAUSE_VECTOR:
1664 case OMP_CLAUSE_INDEPENDENT:
1665 case OMP_CLAUSE_AUTO:
1666 case OMP_CLAUSE_SEQ:
1667 case OMP_CLAUSE_TILE:
1668 case OMP_CLAUSE__GRIDDIM_:
1669 case OMP_CLAUSE__SIMT_:
1670 case OMP_CLAUSE_IF_PRESENT:
1671 case OMP_CLAUSE_FINALIZE:
1672 case OMP_CLAUSE__CONDTEMP_:
1673 break;
1674
1675 case OMP_CLAUSE__CACHE_:
1676 default:
1677 gcc_unreachable ();
1678 }
1679 }
1680
1681 gcc_checking_assert (!scan_array_reductions
1682 || !is_gimple_omp_oacc (ctx->stmt));
1683 if (scan_array_reductions)
1684 {
1685 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1686 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1687 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1688 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1689 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1690 {
1691 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1692 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1693 }
1694 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1695 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1696 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1697 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1698 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1699 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1700 }
1701 }
1702
1703 /* Create a new name for omp child function. Returns an identifier. */
1704
1705 static tree
1706 create_omp_child_function_name (bool task_copy)
1707 {
1708 return clone_function_name_numbered (current_function_decl,
1709 task_copy ? "_omp_cpyfn" : "_omp_fn");
1710 }
1711
1712 /* Return true if CTX may belong to offloaded code: either if current function
1713 is offloaded, or any enclosing context corresponds to a target region. */
1714
1715 static bool
1716 omp_maybe_offloaded_ctx (omp_context *ctx)
1717 {
1718 if (cgraph_node::get (current_function_decl)->offloadable)
1719 return true;
1720 for (; ctx; ctx = ctx->outer)
1721 if (is_gimple_omp_offloaded (ctx->stmt))
1722 return true;
1723 return false;
1724 }
1725
1726 /* Build a decl for the omp child function. It'll not contain a body
1727 yet, just the bare decl. */
1728
1729 static void
1730 create_omp_child_function (omp_context *ctx, bool task_copy)
1731 {
1732 tree decl, type, name, t;
1733
1734 name = create_omp_child_function_name (task_copy);
1735 if (task_copy)
1736 type = build_function_type_list (void_type_node, ptr_type_node,
1737 ptr_type_node, NULL_TREE);
1738 else
1739 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1740
1741 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1742
1743 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1744 || !task_copy);
1745 if (!task_copy)
1746 ctx->cb.dst_fn = decl;
1747 else
1748 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1749
1750 TREE_STATIC (decl) = 1;
1751 TREE_USED (decl) = 1;
1752 DECL_ARTIFICIAL (decl) = 1;
1753 DECL_IGNORED_P (decl) = 0;
1754 TREE_PUBLIC (decl) = 0;
1755 DECL_UNINLINABLE (decl) = 1;
1756 DECL_EXTERNAL (decl) = 0;
1757 DECL_CONTEXT (decl) = NULL_TREE;
1758 DECL_INITIAL (decl) = make_node (BLOCK);
1759 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1760 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1761 /* Remove omp declare simd attribute from the new attributes. */
1762 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1763 {
1764 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1765 a = a2;
1766 a = TREE_CHAIN (a);
1767 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1768 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1769 *p = TREE_CHAIN (*p);
1770 else
1771 {
1772 tree chain = TREE_CHAIN (*p);
1773 *p = copy_node (*p);
1774 p = &TREE_CHAIN (*p);
1775 *p = chain;
1776 }
1777 }
1778 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1779 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1780 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1781 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1782 DECL_FUNCTION_VERSIONED (decl)
1783 = DECL_FUNCTION_VERSIONED (current_function_decl);
1784
1785 if (omp_maybe_offloaded_ctx (ctx))
1786 {
1787 cgraph_node::get_create (decl)->offloadable = 1;
1788 if (ENABLE_OFFLOADING)
1789 g->have_offload = true;
1790 }
1791
1792 if (cgraph_node::get_create (decl)->offloadable
1793 && !lookup_attribute ("omp declare target",
1794 DECL_ATTRIBUTES (current_function_decl)))
1795 {
1796 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1797 ? "omp target entrypoint"
1798 : "omp declare target");
1799 DECL_ATTRIBUTES (decl)
1800 = tree_cons (get_identifier (target_attr),
1801 NULL_TREE, DECL_ATTRIBUTES (decl));
1802 }
1803
1804 t = build_decl (DECL_SOURCE_LOCATION (decl),
1805 RESULT_DECL, NULL_TREE, void_type_node);
1806 DECL_ARTIFICIAL (t) = 1;
1807 DECL_IGNORED_P (t) = 1;
1808 DECL_CONTEXT (t) = decl;
1809 DECL_RESULT (decl) = t;
1810
1811 tree data_name = get_identifier (".omp_data_i");
1812 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1813 ptr_type_node);
1814 DECL_ARTIFICIAL (t) = 1;
1815 DECL_NAMELESS (t) = 1;
1816 DECL_ARG_TYPE (t) = ptr_type_node;
1817 DECL_CONTEXT (t) = current_function_decl;
1818 TREE_USED (t) = 1;
1819 TREE_READONLY (t) = 1;
1820 DECL_ARGUMENTS (decl) = t;
1821 if (!task_copy)
1822 ctx->receiver_decl = t;
1823 else
1824 {
1825 t = build_decl (DECL_SOURCE_LOCATION (decl),
1826 PARM_DECL, get_identifier (".omp_data_o"),
1827 ptr_type_node);
1828 DECL_ARTIFICIAL (t) = 1;
1829 DECL_NAMELESS (t) = 1;
1830 DECL_ARG_TYPE (t) = ptr_type_node;
1831 DECL_CONTEXT (t) = current_function_decl;
1832 TREE_USED (t) = 1;
1833 TREE_ADDRESSABLE (t) = 1;
1834 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1835 DECL_ARGUMENTS (decl) = t;
1836 }
1837
1838 /* Allocate memory for the function structure. The call to
1839 allocate_struct_function clobbers CFUN, so we need to restore
1840 it afterward. */
1841 push_struct_function (decl);
1842 cfun->function_end_locus = gimple_location (ctx->stmt);
1843 init_tree_ssa (cfun);
1844 pop_cfun ();
1845 }
1846
1847 /* Callback for walk_gimple_seq. Check if combined parallel
1848 contains gimple_omp_for_combined_into_p OMP_FOR. */
1849
1850 tree
1851 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1852 bool *handled_ops_p,
1853 struct walk_stmt_info *wi)
1854 {
1855 gimple *stmt = gsi_stmt (*gsi_p);
1856
1857 *handled_ops_p = true;
1858 switch (gimple_code (stmt))
1859 {
1860 WALK_SUBSTMTS;
1861
1862 case GIMPLE_OMP_FOR:
1863 if (gimple_omp_for_combined_into_p (stmt)
1864 && gimple_omp_for_kind (stmt)
1865 == *(const enum gf_mask *) (wi->info))
1866 {
1867 wi->info = stmt;
1868 return integer_zero_node;
1869 }
1870 break;
1871 default:
1872 break;
1873 }
1874 return NULL;
1875 }
1876
1877 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1878
1879 static void
1880 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1881 omp_context *outer_ctx)
1882 {
1883 struct walk_stmt_info wi;
1884
1885 memset (&wi, 0, sizeof (wi));
1886 wi.val_only = true;
1887 wi.info = (void *) &msk;
1888 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1889 if (wi.info != (void *) &msk)
1890 {
1891 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1892 struct omp_for_data fd;
1893 omp_extract_for_data (for_stmt, &fd, NULL);
1894 /* We need two temporaries with fd.loop.v type (istart/iend)
1895 and then (fd.collapse - 1) temporaries with the same
1896 type for count2 ... countN-1 vars if not constant. */
1897 size_t count = 2, i;
1898 tree type = fd.iter_type;
1899 if (fd.collapse > 1
1900 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1901 {
1902 count += fd.collapse - 1;
1903 /* If there are lastprivate clauses on the inner
1904 GIMPLE_OMP_FOR, add one more temporaries for the total number
1905 of iterations (product of count1 ... countN-1). */
1906 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1907 OMP_CLAUSE_LASTPRIVATE))
1908 count++;
1909 else if (msk == GF_OMP_FOR_KIND_FOR
1910 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1911 OMP_CLAUSE_LASTPRIVATE))
1912 count++;
1913 }
1914 for (i = 0; i < count; i++)
1915 {
1916 tree temp = create_tmp_var (type);
1917 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1918 insert_decl_map (&outer_ctx->cb, temp, temp);
1919 OMP_CLAUSE_DECL (c) = temp;
1920 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1921 gimple_omp_taskreg_set_clauses (stmt, c);
1922 }
1923 }
1924 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1925 && omp_find_clause (gimple_omp_task_clauses (stmt),
1926 OMP_CLAUSE_REDUCTION))
1927 {
1928 tree type = build_pointer_type (pointer_sized_int_node);
1929 tree temp = create_tmp_var (type);
1930 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1931 insert_decl_map (&outer_ctx->cb, temp, temp);
1932 OMP_CLAUSE_DECL (c) = temp;
1933 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1934 gimple_omp_task_set_clauses (stmt, c);
1935 }
1936 }
1937
1938 /* Scan an OpenMP parallel directive. */
1939
1940 static void
1941 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1942 {
1943 omp_context *ctx;
1944 tree name;
1945 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1946
1947 /* Ignore parallel directives with empty bodies, unless there
1948 are copyin clauses. */
1949 if (optimize > 0
1950 && empty_body_p (gimple_omp_body (stmt))
1951 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1952 OMP_CLAUSE_COPYIN) == NULL)
1953 {
1954 gsi_replace (gsi, gimple_build_nop (), false);
1955 return;
1956 }
1957
1958 if (gimple_omp_parallel_combined_p (stmt))
1959 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1960 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1961 OMP_CLAUSE_REDUCTION);
1962 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1963 if (OMP_CLAUSE_REDUCTION_TASK (c))
1964 {
1965 tree type = build_pointer_type (pointer_sized_int_node);
1966 tree temp = create_tmp_var (type);
1967 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1968 if (outer_ctx)
1969 insert_decl_map (&outer_ctx->cb, temp, temp);
1970 OMP_CLAUSE_DECL (c) = temp;
1971 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1972 gimple_omp_parallel_set_clauses (stmt, c);
1973 break;
1974 }
1975 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1976 break;
1977
1978 ctx = new_omp_context (stmt, outer_ctx);
1979 taskreg_contexts.safe_push (ctx);
1980 if (taskreg_nesting_level > 1)
1981 ctx->is_nested = true;
1982 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1983 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1984 name = create_tmp_var_name (".omp_data_s");
1985 name = build_decl (gimple_location (stmt),
1986 TYPE_DECL, name, ctx->record_type);
1987 DECL_ARTIFICIAL (name) = 1;
1988 DECL_NAMELESS (name) = 1;
1989 TYPE_NAME (ctx->record_type) = name;
1990 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1991 if (!gimple_omp_parallel_grid_phony (stmt))
1992 {
1993 create_omp_child_function (ctx, false);
1994 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1995 }
1996
1997 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1998 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1999
2000 if (TYPE_FIELDS (ctx->record_type) == NULL)
2001 ctx->record_type = ctx->receiver_decl = NULL;
2002 }
2003
2004 /* Scan an OpenMP task directive. */
2005
2006 static void
2007 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2008 {
2009 omp_context *ctx;
2010 tree name, t;
2011 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2012
2013 /* Ignore task directives with empty bodies, unless they have depend
2014 clause. */
2015 if (optimize > 0
2016 && gimple_omp_body (stmt)
2017 && empty_body_p (gimple_omp_body (stmt))
2018 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2019 {
2020 gsi_replace (gsi, gimple_build_nop (), false);
2021 return;
2022 }
2023
2024 if (gimple_omp_task_taskloop_p (stmt))
2025 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2026
2027 ctx = new_omp_context (stmt, outer_ctx);
2028
2029 if (gimple_omp_task_taskwait_p (stmt))
2030 {
2031 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2032 return;
2033 }
2034
2035 taskreg_contexts.safe_push (ctx);
2036 if (taskreg_nesting_level > 1)
2037 ctx->is_nested = true;
2038 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2039 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2040 name = create_tmp_var_name (".omp_data_s");
2041 name = build_decl (gimple_location (stmt),
2042 TYPE_DECL, name, ctx->record_type);
2043 DECL_ARTIFICIAL (name) = 1;
2044 DECL_NAMELESS (name) = 1;
2045 TYPE_NAME (ctx->record_type) = name;
2046 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2047 create_omp_child_function (ctx, false);
2048 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2049
2050 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2051
2052 if (ctx->srecord_type)
2053 {
2054 name = create_tmp_var_name (".omp_data_a");
2055 name = build_decl (gimple_location (stmt),
2056 TYPE_DECL, name, ctx->srecord_type);
2057 DECL_ARTIFICIAL (name) = 1;
2058 DECL_NAMELESS (name) = 1;
2059 TYPE_NAME (ctx->srecord_type) = name;
2060 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2061 create_omp_child_function (ctx, true);
2062 }
2063
2064 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2065
2066 if (TYPE_FIELDS (ctx->record_type) == NULL)
2067 {
2068 ctx->record_type = ctx->receiver_decl = NULL;
2069 t = build_int_cst (long_integer_type_node, 0);
2070 gimple_omp_task_set_arg_size (stmt, t);
2071 t = build_int_cst (long_integer_type_node, 1);
2072 gimple_omp_task_set_arg_align (stmt, t);
2073 }
2074 }
2075
2076 /* Helper function for finish_taskreg_scan, called through walk_tree.
2077 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2078 tree, replace it in the expression. */
2079
2080 static tree
2081 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2082 {
2083 if (VAR_P (*tp))
2084 {
2085 omp_context *ctx = (omp_context *) data;
2086 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2087 if (t != *tp)
2088 {
2089 if (DECL_HAS_VALUE_EXPR_P (t))
2090 t = unshare_expr (DECL_VALUE_EXPR (t));
2091 *tp = t;
2092 }
2093 *walk_subtrees = 0;
2094 }
2095 else if (IS_TYPE_OR_DECL_P (*tp))
2096 *walk_subtrees = 0;
2097 return NULL_TREE;
2098 }
2099
2100 /* If any decls have been made addressable during scan_omp,
2101 adjust their fields if needed, and layout record types
2102 of parallel/task constructs. */
2103
2104 static void
2105 finish_taskreg_scan (omp_context *ctx)
2106 {
2107 if (ctx->record_type == NULL_TREE)
2108 return;
2109
2110 /* If any task_shared_vars were needed, verify all
2111 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2112 statements if use_pointer_for_field hasn't changed
2113 because of that. If it did, update field types now. */
2114 if (task_shared_vars)
2115 {
2116 tree c;
2117
2118 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2119 c; c = OMP_CLAUSE_CHAIN (c))
2120 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2121 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2122 {
2123 tree decl = OMP_CLAUSE_DECL (c);
2124
2125 /* Global variables don't need to be copied,
2126 the receiver side will use them directly. */
2127 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2128 continue;
2129 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2130 || !use_pointer_for_field (decl, ctx))
2131 continue;
2132 tree field = lookup_field (decl, ctx);
2133 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2134 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2135 continue;
2136 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2137 TREE_THIS_VOLATILE (field) = 0;
2138 DECL_USER_ALIGN (field) = 0;
2139 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2140 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2141 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2142 if (ctx->srecord_type)
2143 {
2144 tree sfield = lookup_sfield (decl, ctx);
2145 TREE_TYPE (sfield) = TREE_TYPE (field);
2146 TREE_THIS_VOLATILE (sfield) = 0;
2147 DECL_USER_ALIGN (sfield) = 0;
2148 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2149 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2150 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2151 }
2152 }
2153 }
2154
2155 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2156 {
2157 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2158 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2159 if (c)
2160 {
2161 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2162 expects to find it at the start of data. */
2163 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2164 tree *p = &TYPE_FIELDS (ctx->record_type);
2165 while (*p)
2166 if (*p == f)
2167 {
2168 *p = DECL_CHAIN (*p);
2169 break;
2170 }
2171 else
2172 p = &DECL_CHAIN (*p);
2173 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2174 TYPE_FIELDS (ctx->record_type) = f;
2175 }
2176 layout_type (ctx->record_type);
2177 fixup_child_record_type (ctx);
2178 }
2179 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2180 {
2181 layout_type (ctx->record_type);
2182 fixup_child_record_type (ctx);
2183 }
2184 else
2185 {
2186 location_t loc = gimple_location (ctx->stmt);
2187 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2188 /* Move VLA fields to the end. */
2189 p = &TYPE_FIELDS (ctx->record_type);
2190 while (*p)
2191 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2192 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2193 {
2194 *q = *p;
2195 *p = TREE_CHAIN (*p);
2196 TREE_CHAIN (*q) = NULL_TREE;
2197 q = &TREE_CHAIN (*q);
2198 }
2199 else
2200 p = &DECL_CHAIN (*p);
2201 *p = vla_fields;
2202 if (gimple_omp_task_taskloop_p (ctx->stmt))
2203 {
2204 /* Move fields corresponding to first and second _looptemp_
2205 clause first. There are filled by GOMP_taskloop
2206 and thus need to be in specific positions. */
2207 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2208 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2209 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2210 OMP_CLAUSE__LOOPTEMP_);
2211 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2212 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2213 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2214 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2215 p = &TYPE_FIELDS (ctx->record_type);
2216 while (*p)
2217 if (*p == f1 || *p == f2 || *p == f3)
2218 *p = DECL_CHAIN (*p);
2219 else
2220 p = &DECL_CHAIN (*p);
2221 DECL_CHAIN (f1) = f2;
2222 if (c3)
2223 {
2224 DECL_CHAIN (f2) = f3;
2225 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2226 }
2227 else
2228 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2229 TYPE_FIELDS (ctx->record_type) = f1;
2230 if (ctx->srecord_type)
2231 {
2232 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2233 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2234 if (c3)
2235 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2236 p = &TYPE_FIELDS (ctx->srecord_type);
2237 while (*p)
2238 if (*p == f1 || *p == f2 || *p == f3)
2239 *p = DECL_CHAIN (*p);
2240 else
2241 p = &DECL_CHAIN (*p);
2242 DECL_CHAIN (f1) = f2;
2243 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2244 if (c3)
2245 {
2246 DECL_CHAIN (f2) = f3;
2247 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2248 }
2249 else
2250 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2251 TYPE_FIELDS (ctx->srecord_type) = f1;
2252 }
2253 }
2254 layout_type (ctx->record_type);
2255 fixup_child_record_type (ctx);
2256 if (ctx->srecord_type)
2257 layout_type (ctx->srecord_type);
2258 tree t = fold_convert_loc (loc, long_integer_type_node,
2259 TYPE_SIZE_UNIT (ctx->record_type));
2260 if (TREE_CODE (t) != INTEGER_CST)
2261 {
2262 t = unshare_expr (t);
2263 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2264 }
2265 gimple_omp_task_set_arg_size (ctx->stmt, t);
2266 t = build_int_cst (long_integer_type_node,
2267 TYPE_ALIGN_UNIT (ctx->record_type));
2268 gimple_omp_task_set_arg_align (ctx->stmt, t);
2269 }
2270 }
2271
2272 /* Find the enclosing offload context. */
2273
2274 static omp_context *
2275 enclosing_target_ctx (omp_context *ctx)
2276 {
2277 for (; ctx; ctx = ctx->outer)
2278 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2279 break;
2280
2281 return ctx;
2282 }
2283
2284 /* Return true if ctx is part of an oacc kernels region. */
2285
2286 static bool
2287 ctx_in_oacc_kernels_region (omp_context *ctx)
2288 {
2289 for (;ctx != NULL; ctx = ctx->outer)
2290 {
2291 gimple *stmt = ctx->stmt;
2292 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2293 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2294 return true;
2295 }
2296
2297 return false;
2298 }
2299
2300 /* Check the parallelism clauses inside a kernels regions.
2301 Until kernels handling moves to use the same loop indirection
2302 scheme as parallel, we need to do this checking early. */
2303
2304 static unsigned
2305 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2306 {
2307 bool checking = true;
2308 unsigned outer_mask = 0;
2309 unsigned this_mask = 0;
2310 bool has_seq = false, has_auto = false;
2311
2312 if (ctx->outer)
2313 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2314 if (!stmt)
2315 {
2316 checking = false;
2317 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2318 return outer_mask;
2319 stmt = as_a <gomp_for *> (ctx->stmt);
2320 }
2321
2322 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2323 {
2324 switch (OMP_CLAUSE_CODE (c))
2325 {
2326 case OMP_CLAUSE_GANG:
2327 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2328 break;
2329 case OMP_CLAUSE_WORKER:
2330 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2331 break;
2332 case OMP_CLAUSE_VECTOR:
2333 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2334 break;
2335 case OMP_CLAUSE_SEQ:
2336 has_seq = true;
2337 break;
2338 case OMP_CLAUSE_AUTO:
2339 has_auto = true;
2340 break;
2341 default:
2342 break;
2343 }
2344 }
2345
2346 if (checking)
2347 {
2348 if (has_seq && (this_mask || has_auto))
2349 error_at (gimple_location (stmt), "%<seq%> overrides other"
2350 " OpenACC loop specifiers");
2351 else if (has_auto && this_mask)
2352 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2353 " OpenACC loop specifiers");
2354
2355 if (this_mask & outer_mask)
2356 error_at (gimple_location (stmt), "inner loop uses same"
2357 " OpenACC parallelism as containing loop");
2358 }
2359
2360 return outer_mask | this_mask;
2361 }
2362
2363 /* Scan a GIMPLE_OMP_FOR. */
2364
2365 static omp_context *
2366 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2367 {
2368 omp_context *ctx;
2369 size_t i;
2370 tree clauses = gimple_omp_for_clauses (stmt);
2371
2372 ctx = new_omp_context (stmt, outer_ctx);
2373
2374 if (is_gimple_omp_oacc (stmt))
2375 {
2376 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2377
2378 if (!tgt || is_oacc_parallel (tgt))
2379 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2380 {
2381 char const *check = NULL;
2382
2383 switch (OMP_CLAUSE_CODE (c))
2384 {
2385 case OMP_CLAUSE_GANG:
2386 check = "gang";
2387 break;
2388
2389 case OMP_CLAUSE_WORKER:
2390 check = "worker";
2391 break;
2392
2393 case OMP_CLAUSE_VECTOR:
2394 check = "vector";
2395 break;
2396
2397 default:
2398 break;
2399 }
2400
2401 if (check && OMP_CLAUSE_OPERAND (c, 0))
2402 error_at (gimple_location (stmt),
2403 "argument not permitted on %qs clause in"
2404 " OpenACC %<parallel%>", check);
2405 }
2406
2407 if (tgt && is_oacc_kernels (tgt))
2408 {
2409 /* Strip out reductions, as they are not handled yet. */
2410 tree *prev_ptr = &clauses;
2411
2412 while (tree probe = *prev_ptr)
2413 {
2414 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2415
2416 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2417 *prev_ptr = *next_ptr;
2418 else
2419 prev_ptr = next_ptr;
2420 }
2421
2422 gimple_omp_for_set_clauses (stmt, clauses);
2423 check_oacc_kernel_gwv (stmt, ctx);
2424 }
2425 }
2426
2427 scan_sharing_clauses (clauses, ctx);
2428
2429 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2430 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2431 {
2432 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2433 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2434 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2435 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2436 }
2437 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2438 return ctx;
2439 }
2440
2441 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2442
2443 static void
2444 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2445 omp_context *outer_ctx)
2446 {
2447 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2448 gsi_replace (gsi, bind, false);
2449 gimple_seq seq = NULL;
2450 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2451 tree cond = create_tmp_var_raw (integer_type_node);
2452 DECL_CONTEXT (cond) = current_function_decl;
2453 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2454 gimple_bind_set_vars (bind, cond);
2455 gimple_call_set_lhs (g, cond);
2456 gimple_seq_add_stmt (&seq, g);
2457 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2458 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2459 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2460 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2461 gimple_seq_add_stmt (&seq, g);
2462 g = gimple_build_label (lab1);
2463 gimple_seq_add_stmt (&seq, g);
2464 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2465 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2466 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2467 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2468 gimple_omp_for_set_clauses (new_stmt, clause);
2469 gimple_seq_add_stmt (&seq, new_stmt);
2470 g = gimple_build_goto (lab3);
2471 gimple_seq_add_stmt (&seq, g);
2472 g = gimple_build_label (lab2);
2473 gimple_seq_add_stmt (&seq, g);
2474 gimple_seq_add_stmt (&seq, stmt);
2475 g = gimple_build_label (lab3);
2476 gimple_seq_add_stmt (&seq, g);
2477 gimple_bind_set_body (bind, seq);
2478 update_stmt (bind);
2479 scan_omp_for (new_stmt, outer_ctx);
2480 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2481 }
2482
2483 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2484 struct walk_stmt_info *);
2485 static omp_context *maybe_lookup_ctx (gimple *);
2486
2487 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2488 for scan phase loop. */
2489
2490 static void
2491 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2492 omp_context *outer_ctx)
2493 {
2494 /* The only change between inclusive and exclusive scan will be
2495 within the first simd loop, so just use inclusive in the
2496 worksharing loop. */
2497 outer_ctx->scan_inclusive = true;
2498 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2499 OMP_CLAUSE_DECL (c) = integer_zero_node;
2500
2501 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2502 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2503 gsi_replace (gsi, input_stmt, false);
2504 gimple_seq input_body = NULL;
2505 gimple_seq_add_stmt (&input_body, stmt);
2506 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2507
2508 gimple_stmt_iterator input1_gsi = gsi_none ();
2509 struct walk_stmt_info wi;
2510 memset (&wi, 0, sizeof (wi));
2511 wi.val_only = true;
2512 wi.info = (void *) &input1_gsi;
2513 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2514 gcc_assert (!gsi_end_p (input1_gsi));
2515
2516 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2517 gsi_next (&input1_gsi);
2518 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2519 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2520 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2521 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2522 std::swap (input_stmt1, scan_stmt1);
2523
2524 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2525 gimple_omp_set_body (input_stmt1, NULL);
2526
2527 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2528 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2529
2530 gimple_omp_set_body (input_stmt1, input_body1);
2531 gimple_omp_set_body (scan_stmt1, NULL);
2532
2533 gimple_stmt_iterator input2_gsi = gsi_none ();
2534 memset (&wi, 0, sizeof (wi));
2535 wi.val_only = true;
2536 wi.info = (void *) &input2_gsi;
2537 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2538 NULL, &wi);
2539 gcc_assert (!gsi_end_p (input2_gsi));
2540
2541 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2542 gsi_next (&input2_gsi);
2543 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2544 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2545 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2546 std::swap (input_stmt2, scan_stmt2);
2547
2548 gimple_omp_set_body (input_stmt2, NULL);
2549
2550 gimple_omp_set_body (input_stmt, input_body);
2551 gimple_omp_set_body (scan_stmt, scan_body);
2552
2553 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2554 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2555
2556 ctx = new_omp_context (scan_stmt, outer_ctx);
2557 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2558
2559 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2560 }
2561
2562 /* Scan an OpenMP sections directive. */
2563
2564 static void
2565 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2566 {
2567 omp_context *ctx;
2568
2569 ctx = new_omp_context (stmt, outer_ctx);
2570 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2571 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2572 }
2573
2574 /* Scan an OpenMP single directive. */
2575
2576 static void
2577 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2578 {
2579 omp_context *ctx;
2580 tree name;
2581
2582 ctx = new_omp_context (stmt, outer_ctx);
2583 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2584 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2585 name = create_tmp_var_name (".omp_copy_s");
2586 name = build_decl (gimple_location (stmt),
2587 TYPE_DECL, name, ctx->record_type);
2588 TYPE_NAME (ctx->record_type) = name;
2589
2590 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2591 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2592
2593 if (TYPE_FIELDS (ctx->record_type) == NULL)
2594 ctx->record_type = NULL;
2595 else
2596 layout_type (ctx->record_type);
2597 }
2598
2599 /* Scan a GIMPLE_OMP_TARGET. */
2600
2601 static void
2602 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2603 {
2604 omp_context *ctx;
2605 tree name;
2606 bool offloaded = is_gimple_omp_offloaded (stmt);
2607 tree clauses = gimple_omp_target_clauses (stmt);
2608
2609 ctx = new_omp_context (stmt, outer_ctx);
2610 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2611 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2612 name = create_tmp_var_name (".omp_data_t");
2613 name = build_decl (gimple_location (stmt),
2614 TYPE_DECL, name, ctx->record_type);
2615 DECL_ARTIFICIAL (name) = 1;
2616 DECL_NAMELESS (name) = 1;
2617 TYPE_NAME (ctx->record_type) = name;
2618 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2619
2620 if (offloaded)
2621 {
2622 create_omp_child_function (ctx, false);
2623 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2624 }
2625
2626 scan_sharing_clauses (clauses, ctx);
2627 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2628
2629 if (TYPE_FIELDS (ctx->record_type) == NULL)
2630 ctx->record_type = ctx->receiver_decl = NULL;
2631 else
2632 {
2633 TYPE_FIELDS (ctx->record_type)
2634 = nreverse (TYPE_FIELDS (ctx->record_type));
2635 if (flag_checking)
2636 {
2637 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2638 for (tree field = TYPE_FIELDS (ctx->record_type);
2639 field;
2640 field = DECL_CHAIN (field))
2641 gcc_assert (DECL_ALIGN (field) == align);
2642 }
2643 layout_type (ctx->record_type);
2644 if (offloaded)
2645 fixup_child_record_type (ctx);
2646 }
2647 }
2648
2649 /* Scan an OpenMP teams directive. */
2650
2651 static void
2652 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2653 {
2654 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2655
2656 if (!gimple_omp_teams_host (stmt))
2657 {
2658 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2659 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2660 return;
2661 }
2662 taskreg_contexts.safe_push (ctx);
2663 gcc_assert (taskreg_nesting_level == 1);
2664 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2665 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2666 tree name = create_tmp_var_name (".omp_data_s");
2667 name = build_decl (gimple_location (stmt),
2668 TYPE_DECL, name, ctx->record_type);
2669 DECL_ARTIFICIAL (name) = 1;
2670 DECL_NAMELESS (name) = 1;
2671 TYPE_NAME (ctx->record_type) = name;
2672 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2673 create_omp_child_function (ctx, false);
2674 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2675
2676 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2677 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2678
2679 if (TYPE_FIELDS (ctx->record_type) == NULL)
2680 ctx->record_type = ctx->receiver_decl = NULL;
2681 }
2682
2683 /* Check nesting restrictions. */
2684 static bool
2685 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2686 {
2687 tree c;
2688
2689 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2690 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2691 the original copy of its contents. */
2692 return true;
2693
2694 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2695 inside an OpenACC CTX. */
2696 if (!(is_gimple_omp (stmt)
2697 && is_gimple_omp_oacc (stmt))
2698 /* Except for atomic codes that we share with OpenMP. */
2699 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2700 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2701 {
2702 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2703 {
2704 error_at (gimple_location (stmt),
2705 "non-OpenACC construct inside of OpenACC routine");
2706 return false;
2707 }
2708 else
2709 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2710 if (is_gimple_omp (octx->stmt)
2711 && is_gimple_omp_oacc (octx->stmt))
2712 {
2713 error_at (gimple_location (stmt),
2714 "non-OpenACC construct inside of OpenACC region");
2715 return false;
2716 }
2717 }
2718
2719 if (ctx != NULL)
2720 {
2721 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2722 && ctx->outer
2723 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2724 ctx = ctx->outer;
2725 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2726 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2727 && !ctx->loop_p)
2728 {
2729 c = NULL_TREE;
2730 if (ctx->order_concurrent
2731 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2732 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2733 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2734 {
2735 error_at (gimple_location (stmt),
2736 "OpenMP constructs other than %<parallel%>, %<loop%>"
2737 " or %<simd%> may not be nested inside a region with"
2738 " the %<order(concurrent)%> clause");
2739 return false;
2740 }
2741 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2742 {
2743 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2744 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2745 {
2746 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2747 && (ctx->outer == NULL
2748 || !gimple_omp_for_combined_into_p (ctx->stmt)
2749 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2750 || (gimple_omp_for_kind (ctx->outer->stmt)
2751 != GF_OMP_FOR_KIND_FOR)
2752 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2753 {
2754 error_at (gimple_location (stmt),
2755 "%<ordered simd threads%> must be closely "
2756 "nested inside of %<for simd%> region");
2757 return false;
2758 }
2759 return true;
2760 }
2761 }
2762 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2763 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2764 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2765 return true;
2766 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2767 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2768 return true;
2769 error_at (gimple_location (stmt),
2770 "OpenMP constructs other than "
2771 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2772 "not be nested inside %<simd%> region");
2773 return false;
2774 }
2775 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2776 {
2777 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2778 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2779 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2780 && omp_find_clause (gimple_omp_for_clauses (stmt),
2781 OMP_CLAUSE_BIND) == NULL_TREE))
2782 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2783 {
2784 error_at (gimple_location (stmt),
2785 "only %<distribute%>, %<parallel%> or %<loop%> "
2786 "regions are allowed to be strictly nested inside "
2787 "%<teams%> region");
2788 return false;
2789 }
2790 }
2791 else if (ctx->order_concurrent
2792 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2793 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2794 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2795 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2796 {
2797 if (ctx->loop_p)
2798 error_at (gimple_location (stmt),
2799 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2800 "%<simd%> may not be nested inside a %<loop%> region");
2801 else
2802 error_at (gimple_location (stmt),
2803 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2804 "%<simd%> may not be nested inside a region with "
2805 "the %<order(concurrent)%> clause");
2806 return false;
2807 }
2808 }
2809 switch (gimple_code (stmt))
2810 {
2811 case GIMPLE_OMP_FOR:
2812 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2813 return true;
2814 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2815 {
2816 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2817 {
2818 error_at (gimple_location (stmt),
2819 "%<distribute%> region must be strictly nested "
2820 "inside %<teams%> construct");
2821 return false;
2822 }
2823 return true;
2824 }
2825 /* We split taskloop into task and nested taskloop in it. */
2826 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2827 return true;
2828 /* For now, hope this will change and loop bind(parallel) will not
2829 be allowed in lots of contexts. */
2830 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2831 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2832 return true;
2833 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2834 {
2835 bool ok = false;
2836
2837 if (ctx)
2838 switch (gimple_code (ctx->stmt))
2839 {
2840 case GIMPLE_OMP_FOR:
2841 ok = (gimple_omp_for_kind (ctx->stmt)
2842 == GF_OMP_FOR_KIND_OACC_LOOP);
2843 break;
2844
2845 case GIMPLE_OMP_TARGET:
2846 switch (gimple_omp_target_kind (ctx->stmt))
2847 {
2848 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2849 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2850 ok = true;
2851 break;
2852
2853 default:
2854 break;
2855 }
2856
2857 default:
2858 break;
2859 }
2860 else if (oacc_get_fn_attrib (current_function_decl))
2861 ok = true;
2862 if (!ok)
2863 {
2864 error_at (gimple_location (stmt),
2865 "OpenACC loop directive must be associated with"
2866 " an OpenACC compute region");
2867 return false;
2868 }
2869 }
2870 /* FALLTHRU */
2871 case GIMPLE_CALL:
2872 if (is_gimple_call (stmt)
2873 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2874 == BUILT_IN_GOMP_CANCEL
2875 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2876 == BUILT_IN_GOMP_CANCELLATION_POINT))
2877 {
2878 const char *bad = NULL;
2879 const char *kind = NULL;
2880 const char *construct
2881 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2882 == BUILT_IN_GOMP_CANCEL)
2883 ? "cancel"
2884 : "cancellation point";
2885 if (ctx == NULL)
2886 {
2887 error_at (gimple_location (stmt), "orphaned %qs construct",
2888 construct);
2889 return false;
2890 }
2891 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2892 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2893 : 0)
2894 {
2895 case 1:
2896 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2897 bad = "parallel";
2898 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2899 == BUILT_IN_GOMP_CANCEL
2900 && !integer_zerop (gimple_call_arg (stmt, 1)))
2901 ctx->cancellable = true;
2902 kind = "parallel";
2903 break;
2904 case 2:
2905 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2906 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2907 bad = "for";
2908 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2909 == BUILT_IN_GOMP_CANCEL
2910 && !integer_zerop (gimple_call_arg (stmt, 1)))
2911 {
2912 ctx->cancellable = true;
2913 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2914 OMP_CLAUSE_NOWAIT))
2915 warning_at (gimple_location (stmt), 0,
2916 "%<cancel for%> inside "
2917 "%<nowait%> for construct");
2918 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2919 OMP_CLAUSE_ORDERED))
2920 warning_at (gimple_location (stmt), 0,
2921 "%<cancel for%> inside "
2922 "%<ordered%> for construct");
2923 }
2924 kind = "for";
2925 break;
2926 case 4:
2927 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2928 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2929 bad = "sections";
2930 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2931 == BUILT_IN_GOMP_CANCEL
2932 && !integer_zerop (gimple_call_arg (stmt, 1)))
2933 {
2934 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2935 {
2936 ctx->cancellable = true;
2937 if (omp_find_clause (gimple_omp_sections_clauses
2938 (ctx->stmt),
2939 OMP_CLAUSE_NOWAIT))
2940 warning_at (gimple_location (stmt), 0,
2941 "%<cancel sections%> inside "
2942 "%<nowait%> sections construct");
2943 }
2944 else
2945 {
2946 gcc_assert (ctx->outer
2947 && gimple_code (ctx->outer->stmt)
2948 == GIMPLE_OMP_SECTIONS);
2949 ctx->outer->cancellable = true;
2950 if (omp_find_clause (gimple_omp_sections_clauses
2951 (ctx->outer->stmt),
2952 OMP_CLAUSE_NOWAIT))
2953 warning_at (gimple_location (stmt), 0,
2954 "%<cancel sections%> inside "
2955 "%<nowait%> sections construct");
2956 }
2957 }
2958 kind = "sections";
2959 break;
2960 case 8:
2961 if (!is_task_ctx (ctx)
2962 && (!is_taskloop_ctx (ctx)
2963 || ctx->outer == NULL
2964 || !is_task_ctx (ctx->outer)))
2965 bad = "task";
2966 else
2967 {
2968 for (omp_context *octx = ctx->outer;
2969 octx; octx = octx->outer)
2970 {
2971 switch (gimple_code (octx->stmt))
2972 {
2973 case GIMPLE_OMP_TASKGROUP:
2974 break;
2975 case GIMPLE_OMP_TARGET:
2976 if (gimple_omp_target_kind (octx->stmt)
2977 != GF_OMP_TARGET_KIND_REGION)
2978 continue;
2979 /* FALLTHRU */
2980 case GIMPLE_OMP_PARALLEL:
2981 case GIMPLE_OMP_TEAMS:
2982 error_at (gimple_location (stmt),
2983 "%<%s taskgroup%> construct not closely "
2984 "nested inside of %<taskgroup%> region",
2985 construct);
2986 return false;
2987 case GIMPLE_OMP_TASK:
2988 if (gimple_omp_task_taskloop_p (octx->stmt)
2989 && octx->outer
2990 && is_taskloop_ctx (octx->outer))
2991 {
2992 tree clauses
2993 = gimple_omp_for_clauses (octx->outer->stmt);
2994 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2995 break;
2996 }
2997 continue;
2998 default:
2999 continue;
3000 }
3001 break;
3002 }
3003 ctx->cancellable = true;
3004 }
3005 kind = "taskgroup";
3006 break;
3007 default:
3008 error_at (gimple_location (stmt), "invalid arguments");
3009 return false;
3010 }
3011 if (bad)
3012 {
3013 error_at (gimple_location (stmt),
3014 "%<%s %s%> construct not closely nested inside of %qs",
3015 construct, kind, bad);
3016 return false;
3017 }
3018 }
3019 /* FALLTHRU */
3020 case GIMPLE_OMP_SECTIONS:
3021 case GIMPLE_OMP_SINGLE:
3022 for (; ctx != NULL; ctx = ctx->outer)
3023 switch (gimple_code (ctx->stmt))
3024 {
3025 case GIMPLE_OMP_FOR:
3026 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3027 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3028 break;
3029 /* FALLTHRU */
3030 case GIMPLE_OMP_SECTIONS:
3031 case GIMPLE_OMP_SINGLE:
3032 case GIMPLE_OMP_ORDERED:
3033 case GIMPLE_OMP_MASTER:
3034 case GIMPLE_OMP_TASK:
3035 case GIMPLE_OMP_CRITICAL:
3036 if (is_gimple_call (stmt))
3037 {
3038 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3039 != BUILT_IN_GOMP_BARRIER)
3040 return true;
3041 error_at (gimple_location (stmt),
3042 "barrier region may not be closely nested inside "
3043 "of work-sharing, %<loop%>, %<critical%>, "
3044 "%<ordered%>, %<master%>, explicit %<task%> or "
3045 "%<taskloop%> region");
3046 return false;
3047 }
3048 error_at (gimple_location (stmt),
3049 "work-sharing region may not be closely nested inside "
3050 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3051 "%<master%>, explicit %<task%> or %<taskloop%> region");
3052 return false;
3053 case GIMPLE_OMP_PARALLEL:
3054 case GIMPLE_OMP_TEAMS:
3055 return true;
3056 case GIMPLE_OMP_TARGET:
3057 if (gimple_omp_target_kind (ctx->stmt)
3058 == GF_OMP_TARGET_KIND_REGION)
3059 return true;
3060 break;
3061 default:
3062 break;
3063 }
3064 break;
3065 case GIMPLE_OMP_MASTER:
3066 for (; ctx != NULL; ctx = ctx->outer)
3067 switch (gimple_code (ctx->stmt))
3068 {
3069 case GIMPLE_OMP_FOR:
3070 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3071 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3072 break;
3073 /* FALLTHRU */
3074 case GIMPLE_OMP_SECTIONS:
3075 case GIMPLE_OMP_SINGLE:
3076 case GIMPLE_OMP_TASK:
3077 error_at (gimple_location (stmt),
3078 "%<master%> region may not be closely nested inside "
3079 "of work-sharing, %<loop%>, explicit %<task%> or "
3080 "%<taskloop%> region");
3081 return false;
3082 case GIMPLE_OMP_PARALLEL:
3083 case GIMPLE_OMP_TEAMS:
3084 return true;
3085 case GIMPLE_OMP_TARGET:
3086 if (gimple_omp_target_kind (ctx->stmt)
3087 == GF_OMP_TARGET_KIND_REGION)
3088 return true;
3089 break;
3090 default:
3091 break;
3092 }
3093 break;
3094 case GIMPLE_OMP_TASK:
3095 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3096 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3097 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3098 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3099 {
3100 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3101 error_at (OMP_CLAUSE_LOCATION (c),
3102 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3103 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3104 return false;
3105 }
3106 break;
3107 case GIMPLE_OMP_ORDERED:
3108 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3109 c; c = OMP_CLAUSE_CHAIN (c))
3110 {
3111 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3112 {
3113 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3114 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3115 continue;
3116 }
3117 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3118 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3119 || kind == OMP_CLAUSE_DEPEND_SINK)
3120 {
3121 tree oclause;
3122 /* Look for containing ordered(N) loop. */
3123 if (ctx == NULL
3124 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3125 || (oclause
3126 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3127 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3128 {
3129 error_at (OMP_CLAUSE_LOCATION (c),
3130 "%<ordered%> construct with %<depend%> clause "
3131 "must be closely nested inside an %<ordered%> "
3132 "loop");
3133 return false;
3134 }
3135 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3136 {
3137 error_at (OMP_CLAUSE_LOCATION (c),
3138 "%<ordered%> construct with %<depend%> clause "
3139 "must be closely nested inside a loop with "
3140 "%<ordered%> clause with a parameter");
3141 return false;
3142 }
3143 }
3144 else
3145 {
3146 error_at (OMP_CLAUSE_LOCATION (c),
3147 "invalid depend kind in omp %<ordered%> %<depend%>");
3148 return false;
3149 }
3150 }
3151 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3152 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3153 {
3154 /* ordered simd must be closely nested inside of simd region,
3155 and simd region must not encounter constructs other than
3156 ordered simd, therefore ordered simd may be either orphaned,
3157 or ctx->stmt must be simd. The latter case is handled already
3158 earlier. */
3159 if (ctx != NULL)
3160 {
3161 error_at (gimple_location (stmt),
3162 "%<ordered%> %<simd%> must be closely nested inside "
3163 "%<simd%> region");
3164 return false;
3165 }
3166 }
3167 for (; ctx != NULL; ctx = ctx->outer)
3168 switch (gimple_code (ctx->stmt))
3169 {
3170 case GIMPLE_OMP_CRITICAL:
3171 case GIMPLE_OMP_TASK:
3172 case GIMPLE_OMP_ORDERED:
3173 ordered_in_taskloop:
3174 error_at (gimple_location (stmt),
3175 "%<ordered%> region may not be closely nested inside "
3176 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3177 "%<taskloop%> region");
3178 return false;
3179 case GIMPLE_OMP_FOR:
3180 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3181 goto ordered_in_taskloop;
3182 tree o;
3183 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3184 OMP_CLAUSE_ORDERED);
3185 if (o == NULL)
3186 {
3187 error_at (gimple_location (stmt),
3188 "%<ordered%> region must be closely nested inside "
3189 "a loop region with an %<ordered%> clause");
3190 return false;
3191 }
3192 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3193 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3194 {
3195 error_at (gimple_location (stmt),
3196 "%<ordered%> region without %<depend%> clause may "
3197 "not be closely nested inside a loop region with "
3198 "an %<ordered%> clause with a parameter");
3199 return false;
3200 }
3201 return true;
3202 case GIMPLE_OMP_TARGET:
3203 if (gimple_omp_target_kind (ctx->stmt)
3204 != GF_OMP_TARGET_KIND_REGION)
3205 break;
3206 /* FALLTHRU */
3207 case GIMPLE_OMP_PARALLEL:
3208 case GIMPLE_OMP_TEAMS:
3209 error_at (gimple_location (stmt),
3210 "%<ordered%> region must be closely nested inside "
3211 "a loop region with an %<ordered%> clause");
3212 return false;
3213 default:
3214 break;
3215 }
3216 break;
3217 case GIMPLE_OMP_CRITICAL:
3218 {
3219 tree this_stmt_name
3220 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3221 for (; ctx != NULL; ctx = ctx->outer)
3222 if (gomp_critical *other_crit
3223 = dyn_cast <gomp_critical *> (ctx->stmt))
3224 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3225 {
3226 error_at (gimple_location (stmt),
3227 "%<critical%> region may not be nested inside "
3228 "a %<critical%> region with the same name");
3229 return false;
3230 }
3231 }
3232 break;
3233 case GIMPLE_OMP_TEAMS:
3234 if (ctx == NULL)
3235 break;
3236 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3237 || (gimple_omp_target_kind (ctx->stmt)
3238 != GF_OMP_TARGET_KIND_REGION))
3239 {
3240 /* Teams construct can appear either strictly nested inside of
3241 target construct with no intervening stmts, or can be encountered
3242 only by initial task (so must not appear inside any OpenMP
3243 construct. */
3244 error_at (gimple_location (stmt),
3245 "%<teams%> construct must be closely nested inside of "
3246 "%<target%> construct or not nested in any OpenMP "
3247 "construct");
3248 return false;
3249 }
3250 break;
3251 case GIMPLE_OMP_TARGET:
3252 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3254 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3255 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3256 {
3257 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3258 error_at (OMP_CLAUSE_LOCATION (c),
3259 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3260 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3261 return false;
3262 }
3263 if (is_gimple_omp_offloaded (stmt)
3264 && oacc_get_fn_attrib (cfun->decl) != NULL)
3265 {
3266 error_at (gimple_location (stmt),
3267 "OpenACC region inside of OpenACC routine, nested "
3268 "parallelism not supported yet");
3269 return false;
3270 }
3271 for (; ctx != NULL; ctx = ctx->outer)
3272 {
3273 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3274 {
3275 if (is_gimple_omp (stmt)
3276 && is_gimple_omp_oacc (stmt)
3277 && is_gimple_omp (ctx->stmt))
3278 {
3279 error_at (gimple_location (stmt),
3280 "OpenACC construct inside of non-OpenACC region");
3281 return false;
3282 }
3283 continue;
3284 }
3285
3286 const char *stmt_name, *ctx_stmt_name;
3287 switch (gimple_omp_target_kind (stmt))
3288 {
3289 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3290 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3291 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3292 case GF_OMP_TARGET_KIND_ENTER_DATA:
3293 stmt_name = "target enter data"; break;
3294 case GF_OMP_TARGET_KIND_EXIT_DATA:
3295 stmt_name = "target exit data"; break;
3296 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3297 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3298 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3299 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3300 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3301 stmt_name = "enter/exit data"; break;
3302 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3303 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3304 break;
3305 default: gcc_unreachable ();
3306 }
3307 switch (gimple_omp_target_kind (ctx->stmt))
3308 {
3309 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3310 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3311 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3312 ctx_stmt_name = "parallel"; break;
3313 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3314 ctx_stmt_name = "kernels"; break;
3315 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3316 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3317 ctx_stmt_name = "host_data"; break;
3318 default: gcc_unreachable ();
3319 }
3320
3321 /* OpenACC/OpenMP mismatch? */
3322 if (is_gimple_omp_oacc (stmt)
3323 != is_gimple_omp_oacc (ctx->stmt))
3324 {
3325 error_at (gimple_location (stmt),
3326 "%s %qs construct inside of %s %qs region",
3327 (is_gimple_omp_oacc (stmt)
3328 ? "OpenACC" : "OpenMP"), stmt_name,
3329 (is_gimple_omp_oacc (ctx->stmt)
3330 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3331 return false;
3332 }
3333 if (is_gimple_omp_offloaded (ctx->stmt))
3334 {
3335 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3336 if (is_gimple_omp_oacc (ctx->stmt))
3337 {
3338 error_at (gimple_location (stmt),
3339 "%qs construct inside of %qs region",
3340 stmt_name, ctx_stmt_name);
3341 return false;
3342 }
3343 else
3344 {
3345 warning_at (gimple_location (stmt), 0,
3346 "%qs construct inside of %qs region",
3347 stmt_name, ctx_stmt_name);
3348 }
3349 }
3350 }
3351 break;
3352 default:
3353 break;
3354 }
3355 return true;
3356 }
3357
3358
3359 /* Helper function scan_omp.
3360
3361 Callback for walk_tree or operators in walk_gimple_stmt used to
3362 scan for OMP directives in TP. */
3363
3364 static tree
3365 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3366 {
3367 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3368 omp_context *ctx = (omp_context *) wi->info;
3369 tree t = *tp;
3370
3371 switch (TREE_CODE (t))
3372 {
3373 case VAR_DECL:
3374 case PARM_DECL:
3375 case LABEL_DECL:
3376 case RESULT_DECL:
3377 if (ctx)
3378 {
3379 tree repl = remap_decl (t, &ctx->cb);
3380 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3381 *tp = repl;
3382 }
3383 break;
3384
3385 default:
3386 if (ctx && TYPE_P (t))
3387 *tp = remap_type (t, &ctx->cb);
3388 else if (!DECL_P (t))
3389 {
3390 *walk_subtrees = 1;
3391 if (ctx)
3392 {
3393 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3394 if (tem != TREE_TYPE (t))
3395 {
3396 if (TREE_CODE (t) == INTEGER_CST)
3397 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3398 else
3399 TREE_TYPE (t) = tem;
3400 }
3401 }
3402 }
3403 break;
3404 }
3405
3406 return NULL_TREE;
3407 }
3408
3409 /* Return true if FNDECL is a setjmp or a longjmp. */
3410
3411 static bool
3412 setjmp_or_longjmp_p (const_tree fndecl)
3413 {
3414 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3415 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3416 return true;
3417
3418 tree declname = DECL_NAME (fndecl);
3419 if (!declname
3420 || (DECL_CONTEXT (fndecl) != NULL_TREE
3421 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3422 || !TREE_PUBLIC (fndecl))
3423 return false;
3424
3425 const char *name = IDENTIFIER_POINTER (declname);
3426 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3427 }
3428
3429 /* Return true if FNDECL is an omp_* runtime API call. */
3430
3431 static bool
3432 omp_runtime_api_call (const_tree fndecl)
3433 {
3434 tree declname = DECL_NAME (fndecl);
3435 if (!declname
3436 || (DECL_CONTEXT (fndecl) != NULL_TREE
3437 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3438 || !TREE_PUBLIC (fndecl))
3439 return false;
3440
3441 const char *name = IDENTIFIER_POINTER (declname);
3442 if (strncmp (name, "omp_", 4) != 0)
3443 return false;
3444
3445 static const char *omp_runtime_apis[] =
3446 {
3447 /* This array has 3 sections. First omp_* calls that don't
3448 have any suffixes. */
3449 "target_alloc",
3450 "target_associate_ptr",
3451 "target_disassociate_ptr",
3452 "target_free",
3453 "target_is_present",
3454 "target_memcpy",
3455 "target_memcpy_rect",
3456 NULL,
3457 /* Now omp_* calls that are available as omp_* and omp_*_. */
3458 "capture_affinity",
3459 "destroy_lock",
3460 "destroy_nest_lock",
3461 "display_affinity",
3462 "get_active_level",
3463 "get_affinity_format",
3464 "get_cancellation",
3465 "get_default_device",
3466 "get_dynamic",
3467 "get_initial_device",
3468 "get_level",
3469 "get_max_active_levels",
3470 "get_max_task_priority",
3471 "get_max_threads",
3472 "get_nested",
3473 "get_num_devices",
3474 "get_num_places",
3475 "get_num_procs",
3476 "get_num_teams",
3477 "get_num_threads",
3478 "get_partition_num_places",
3479 "get_place_num",
3480 "get_proc_bind",
3481 "get_team_num",
3482 "get_thread_limit",
3483 "get_thread_num",
3484 "get_wtick",
3485 "get_wtime",
3486 "in_final",
3487 "in_parallel",
3488 "init_lock",
3489 "init_nest_lock",
3490 "is_initial_device",
3491 "pause_resource",
3492 "pause_resource_all",
3493 "set_affinity_format",
3494 "set_lock",
3495 "set_nest_lock",
3496 "test_lock",
3497 "test_nest_lock",
3498 "unset_lock",
3499 "unset_nest_lock",
3500 NULL,
3501 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3502 "get_ancestor_thread_num",
3503 "get_partition_place_nums",
3504 "get_place_num_procs",
3505 "get_place_proc_ids",
3506 "get_schedule",
3507 "get_team_size",
3508 "set_default_device",
3509 "set_dynamic",
3510 "set_max_active_levels",
3511 "set_nested",
3512 "set_num_threads",
3513 "set_schedule"
3514 };
3515
3516 int mode = 0;
3517 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3518 {
3519 if (omp_runtime_apis[i] == NULL)
3520 {
3521 mode++;
3522 continue;
3523 }
3524 size_t len = strlen (omp_runtime_apis[i]);
3525 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3526 && (name[4 + len] == '\0'
3527 || (mode > 0
3528 && name[4 + len] == '_'
3529 && (name[4 + len + 1] == '\0'
3530 || (mode > 1
3531 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3532 return true;
3533 }
3534 return false;
3535 }
3536
3537 /* Helper function for scan_omp.
3538
3539 Callback for walk_gimple_stmt used to scan for OMP directives in
3540 the current statement in GSI. */
3541
3542 static tree
3543 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3544 struct walk_stmt_info *wi)
3545 {
3546 gimple *stmt = gsi_stmt (*gsi);
3547 omp_context *ctx = (omp_context *) wi->info;
3548
3549 if (gimple_has_location (stmt))
3550 input_location = gimple_location (stmt);
3551
3552 /* Check the nesting restrictions. */
3553 bool remove = false;
3554 if (is_gimple_omp (stmt))
3555 remove = !check_omp_nesting_restrictions (stmt, ctx);
3556 else if (is_gimple_call (stmt))
3557 {
3558 tree fndecl = gimple_call_fndecl (stmt);
3559 if (fndecl)
3560 {
3561 if (ctx
3562 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3563 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3564 && setjmp_or_longjmp_p (fndecl)
3565 && !ctx->loop_p)
3566 {
3567 remove = true;
3568 error_at (gimple_location (stmt),
3569 "setjmp/longjmp inside %<simd%> construct");
3570 }
3571 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3572 switch (DECL_FUNCTION_CODE (fndecl))
3573 {
3574 case BUILT_IN_GOMP_BARRIER:
3575 case BUILT_IN_GOMP_CANCEL:
3576 case BUILT_IN_GOMP_CANCELLATION_POINT:
3577 case BUILT_IN_GOMP_TASKYIELD:
3578 case BUILT_IN_GOMP_TASKWAIT:
3579 case BUILT_IN_GOMP_TASKGROUP_START:
3580 case BUILT_IN_GOMP_TASKGROUP_END:
3581 remove = !check_omp_nesting_restrictions (stmt, ctx);
3582 break;
3583 default:
3584 break;
3585 }
3586 else if (ctx)
3587 {
3588 omp_context *octx = ctx;
3589 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3590 octx = ctx->outer;
3591 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3592 {
3593 remove = true;
3594 error_at (gimple_location (stmt),
3595 "OpenMP runtime API call %qD in a region with "
3596 "%<order(concurrent)%> clause", fndecl);
3597 }
3598 }
3599 }
3600 }
3601 if (remove)
3602 {
3603 stmt = gimple_build_nop ();
3604 gsi_replace (gsi, stmt, false);
3605 }
3606
3607 *handled_ops_p = true;
3608
3609 switch (gimple_code (stmt))
3610 {
3611 case GIMPLE_OMP_PARALLEL:
3612 taskreg_nesting_level++;
3613 scan_omp_parallel (gsi, ctx);
3614 taskreg_nesting_level--;
3615 break;
3616
3617 case GIMPLE_OMP_TASK:
3618 taskreg_nesting_level++;
3619 scan_omp_task (gsi, ctx);
3620 taskreg_nesting_level--;
3621 break;
3622
3623 case GIMPLE_OMP_FOR:
3624 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3625 == GF_OMP_FOR_KIND_SIMD)
3626 && gimple_omp_for_combined_into_p (stmt)
3627 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3628 {
3629 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3630 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3631 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3632 {
3633 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3634 break;
3635 }
3636 }
3637 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3638 == GF_OMP_FOR_KIND_SIMD)
3639 && omp_maybe_offloaded_ctx (ctx)
3640 && omp_max_simt_vf ())
3641 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3642 else
3643 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3644 break;
3645
3646 case GIMPLE_OMP_SECTIONS:
3647 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3648 break;
3649
3650 case GIMPLE_OMP_SINGLE:
3651 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3652 break;
3653
3654 case GIMPLE_OMP_SCAN:
3655 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3656 {
3657 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3658 ctx->scan_inclusive = true;
3659 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3660 ctx->scan_exclusive = true;
3661 }
3662 /* FALLTHRU */
3663 case GIMPLE_OMP_SECTION:
3664 case GIMPLE_OMP_MASTER:
3665 case GIMPLE_OMP_ORDERED:
3666 case GIMPLE_OMP_CRITICAL:
3667 case GIMPLE_OMP_GRID_BODY:
3668 ctx = new_omp_context (stmt, ctx);
3669 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3670 break;
3671
3672 case GIMPLE_OMP_TASKGROUP:
3673 ctx = new_omp_context (stmt, ctx);
3674 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3675 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3676 break;
3677
3678 case GIMPLE_OMP_TARGET:
3679 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3680 break;
3681
3682 case GIMPLE_OMP_TEAMS:
3683 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3684 {
3685 taskreg_nesting_level++;
3686 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3687 taskreg_nesting_level--;
3688 }
3689 else
3690 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3691 break;
3692
3693 case GIMPLE_BIND:
3694 {
3695 tree var;
3696
3697 *handled_ops_p = false;
3698 if (ctx)
3699 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3700 var ;
3701 var = DECL_CHAIN (var))
3702 insert_decl_map (&ctx->cb, var, var);
3703 }
3704 break;
3705 default:
3706 *handled_ops_p = false;
3707 break;
3708 }
3709
3710 return NULL_TREE;
3711 }
3712
3713
3714 /* Scan all the statements starting at the current statement. CTX
3715 contains context information about the OMP directives and
3716 clauses found during the scan. */
3717
3718 static void
3719 scan_omp (gimple_seq *body_p, omp_context *ctx)
3720 {
3721 location_t saved_location;
3722 struct walk_stmt_info wi;
3723
3724 memset (&wi, 0, sizeof (wi));
3725 wi.info = ctx;
3726 wi.want_locations = true;
3727
3728 saved_location = input_location;
3729 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3730 input_location = saved_location;
3731 }
3732 \f
3733 /* Re-gimplification and code generation routines. */
3734
3735 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3736 of BIND if in a method. */
3737
3738 static void
3739 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3740 {
3741 if (DECL_ARGUMENTS (current_function_decl)
3742 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3743 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3744 == POINTER_TYPE))
3745 {
3746 tree vars = gimple_bind_vars (bind);
3747 for (tree *pvar = &vars; *pvar; )
3748 if (omp_member_access_dummy_var (*pvar))
3749 *pvar = DECL_CHAIN (*pvar);
3750 else
3751 pvar = &DECL_CHAIN (*pvar);
3752 gimple_bind_set_vars (bind, vars);
3753 }
3754 }
3755
3756 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3757 block and its subblocks. */
3758
3759 static void
3760 remove_member_access_dummy_vars (tree block)
3761 {
3762 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3763 if (omp_member_access_dummy_var (*pvar))
3764 *pvar = DECL_CHAIN (*pvar);
3765 else
3766 pvar = &DECL_CHAIN (*pvar);
3767
3768 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3769 remove_member_access_dummy_vars (block);
3770 }
3771
3772 /* If a context was created for STMT when it was scanned, return it. */
3773
3774 static omp_context *
3775 maybe_lookup_ctx (gimple *stmt)
3776 {
3777 splay_tree_node n;
3778 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3779 return n ? (omp_context *) n->value : NULL;
3780 }
3781
3782
3783 /* Find the mapping for DECL in CTX or the immediately enclosing
3784 context that has a mapping for DECL.
3785
3786 If CTX is a nested parallel directive, we may have to use the decl
3787 mappings created in CTX's parent context. Suppose that we have the
3788 following parallel nesting (variable UIDs showed for clarity):
3789
3790 iD.1562 = 0;
3791 #omp parallel shared(iD.1562) -> outer parallel
3792 iD.1562 = iD.1562 + 1;
3793
3794 #omp parallel shared (iD.1562) -> inner parallel
3795 iD.1562 = iD.1562 - 1;
3796
3797 Each parallel structure will create a distinct .omp_data_s structure
3798 for copying iD.1562 in/out of the directive:
3799
3800 outer parallel .omp_data_s.1.i -> iD.1562
3801 inner parallel .omp_data_s.2.i -> iD.1562
3802
3803 A shared variable mapping will produce a copy-out operation before
3804 the parallel directive and a copy-in operation after it. So, in
3805 this case we would have:
3806
3807 iD.1562 = 0;
3808 .omp_data_o.1.i = iD.1562;
3809 #omp parallel shared(iD.1562) -> outer parallel
3810 .omp_data_i.1 = &.omp_data_o.1
3811 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3812
3813 .omp_data_o.2.i = iD.1562; -> **
3814 #omp parallel shared(iD.1562) -> inner parallel
3815 .omp_data_i.2 = &.omp_data_o.2
3816 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3817
3818
3819 ** This is a problem. The symbol iD.1562 cannot be referenced
3820 inside the body of the outer parallel region. But since we are
3821 emitting this copy operation while expanding the inner parallel
3822 directive, we need to access the CTX structure of the outer
3823 parallel directive to get the correct mapping:
3824
3825 .omp_data_o.2.i = .omp_data_i.1->i
3826
3827 Since there may be other workshare or parallel directives enclosing
3828 the parallel directive, it may be necessary to walk up the context
3829 parent chain. This is not a problem in general because nested
3830 parallelism happens only rarely. */
3831
3832 static tree
3833 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3834 {
3835 tree t;
3836 omp_context *up;
3837
3838 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3839 t = maybe_lookup_decl (decl, up);
3840
3841 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3842
3843 return t ? t : decl;
3844 }
3845
3846
3847 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3848 in outer contexts. */
3849
3850 static tree
3851 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3852 {
3853 tree t = NULL;
3854 omp_context *up;
3855
3856 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3857 t = maybe_lookup_decl (decl, up);
3858
3859 return t ? t : decl;
3860 }
3861
3862
3863 /* Construct the initialization value for reduction operation OP. */
3864
3865 tree
3866 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3867 {
3868 switch (op)
3869 {
3870 case PLUS_EXPR:
3871 case MINUS_EXPR:
3872 case BIT_IOR_EXPR:
3873 case BIT_XOR_EXPR:
3874 case TRUTH_OR_EXPR:
3875 case TRUTH_ORIF_EXPR:
3876 case TRUTH_XOR_EXPR:
3877 case NE_EXPR:
3878 return build_zero_cst (type);
3879
3880 case MULT_EXPR:
3881 case TRUTH_AND_EXPR:
3882 case TRUTH_ANDIF_EXPR:
3883 case EQ_EXPR:
3884 return fold_convert_loc (loc, type, integer_one_node);
3885
3886 case BIT_AND_EXPR:
3887 return fold_convert_loc (loc, type, integer_minus_one_node);
3888
3889 case MAX_EXPR:
3890 if (SCALAR_FLOAT_TYPE_P (type))
3891 {
3892 REAL_VALUE_TYPE max, min;
3893 if (HONOR_INFINITIES (type))
3894 {
3895 real_inf (&max);
3896 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3897 }
3898 else
3899 real_maxval (&min, 1, TYPE_MODE (type));
3900 return build_real (type, min);
3901 }
3902 else if (POINTER_TYPE_P (type))
3903 {
3904 wide_int min
3905 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3906 return wide_int_to_tree (type, min);
3907 }
3908 else
3909 {
3910 gcc_assert (INTEGRAL_TYPE_P (type));
3911 return TYPE_MIN_VALUE (type);
3912 }
3913
3914 case MIN_EXPR:
3915 if (SCALAR_FLOAT_TYPE_P (type))
3916 {
3917 REAL_VALUE_TYPE max;
3918 if (HONOR_INFINITIES (type))
3919 real_inf (&max);
3920 else
3921 real_maxval (&max, 0, TYPE_MODE (type));
3922 return build_real (type, max);
3923 }
3924 else if (POINTER_TYPE_P (type))
3925 {
3926 wide_int max
3927 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3928 return wide_int_to_tree (type, max);
3929 }
3930 else
3931 {
3932 gcc_assert (INTEGRAL_TYPE_P (type));
3933 return TYPE_MAX_VALUE (type);
3934 }
3935
3936 default:
3937 gcc_unreachable ();
3938 }
3939 }
3940
3941 /* Construct the initialization value for reduction CLAUSE. */
3942
3943 tree
3944 omp_reduction_init (tree clause, tree type)
3945 {
3946 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3947 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3948 }
3949
3950 /* Return alignment to be assumed for var in CLAUSE, which should be
3951 OMP_CLAUSE_ALIGNED. */
3952
3953 static tree
3954 omp_clause_aligned_alignment (tree clause)
3955 {
3956 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3957 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3958
3959 /* Otherwise return implementation defined alignment. */
3960 unsigned int al = 1;
3961 opt_scalar_mode mode_iter;
3962 auto_vector_sizes sizes;
3963 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3964 poly_uint64 vs = 0;
3965 for (unsigned int i = 0; i < sizes.length (); ++i)
3966 vs = ordered_max (vs, sizes[i]);
3967 static enum mode_class classes[]
3968 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3969 for (int i = 0; i < 4; i += 2)
3970 /* The for loop above dictates that we only walk through scalar classes. */
3971 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3972 {
3973 scalar_mode mode = mode_iter.require ();
3974 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3975 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3976 continue;
3977 while (maybe_ne (vs, 0U)
3978 && known_lt (GET_MODE_SIZE (vmode), vs)
3979 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3980 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3981
3982 tree type = lang_hooks.types.type_for_mode (mode, 1);
3983 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3984 continue;
3985 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3986 GET_MODE_SIZE (mode));
3987 type = build_vector_type (type, nelts);
3988 if (TYPE_MODE (type) != vmode)
3989 continue;
3990 if (TYPE_ALIGN_UNIT (type) > al)
3991 al = TYPE_ALIGN_UNIT (type);
3992 }
3993 return build_int_cst (integer_type_node, al);
3994 }
3995
3996
3997 /* This structure is part of the interface between lower_rec_simd_input_clauses
3998 and lower_rec_input_clauses. */
3999
4000 class omplow_simd_context {
4001 public:
4002 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4003 tree idx;
4004 tree lane;
4005 tree lastlane;
4006 vec<tree, va_heap> simt_eargs;
4007 gimple_seq simt_dlist;
4008 poly_uint64_pod max_vf;
4009 bool is_simt;
4010 };
4011
4012 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4013 privatization. */
4014
4015 static bool
4016 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4017 omplow_simd_context *sctx, tree &ivar,
4018 tree &lvar, tree *rvar = NULL,
4019 tree *rvar2 = NULL)
4020 {
4021 if (known_eq (sctx->max_vf, 0U))
4022 {
4023 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4024 if (maybe_gt (sctx->max_vf, 1U))
4025 {
4026 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4027 OMP_CLAUSE_SAFELEN);
4028 if (c)
4029 {
4030 poly_uint64 safe_len;
4031 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4032 || maybe_lt (safe_len, 1U))
4033 sctx->max_vf = 1;
4034 else
4035 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4036 }
4037 }
4038 if (maybe_gt (sctx->max_vf, 1U))
4039 {
4040 sctx->idx = create_tmp_var (unsigned_type_node);
4041 sctx->lane = create_tmp_var (unsigned_type_node);
4042 }
4043 }
4044 if (known_eq (sctx->max_vf, 1U))
4045 return false;
4046
4047 if (sctx->is_simt)
4048 {
4049 if (is_gimple_reg (new_var))
4050 {
4051 ivar = lvar = new_var;
4052 return true;
4053 }
4054 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4055 ivar = lvar = create_tmp_var (type);
4056 TREE_ADDRESSABLE (ivar) = 1;
4057 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4058 NULL, DECL_ATTRIBUTES (ivar));
4059 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4060 tree clobber = build_clobber (type);
4061 gimple *g = gimple_build_assign (ivar, clobber);
4062 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4063 }
4064 else
4065 {
4066 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4067 tree avar = create_tmp_var_raw (atype);
4068 if (TREE_ADDRESSABLE (new_var))
4069 TREE_ADDRESSABLE (avar) = 1;
4070 DECL_ATTRIBUTES (avar)
4071 = tree_cons (get_identifier ("omp simd array"), NULL,
4072 DECL_ATTRIBUTES (avar));
4073 gimple_add_tmp_var (avar);
4074 tree iavar = avar;
4075 if (rvar && !ctx->for_simd_scan_phase)
4076 {
4077 /* For inscan reductions, create another array temporary,
4078 which will hold the reduced value. */
4079 iavar = create_tmp_var_raw (atype);
4080 if (TREE_ADDRESSABLE (new_var))
4081 TREE_ADDRESSABLE (iavar) = 1;
4082 DECL_ATTRIBUTES (iavar)
4083 = tree_cons (get_identifier ("omp simd array"), NULL,
4084 tree_cons (get_identifier ("omp simd inscan"), NULL,
4085 DECL_ATTRIBUTES (iavar)));
4086 gimple_add_tmp_var (iavar);
4087 ctx->cb.decl_map->put (avar, iavar);
4088 if (sctx->lastlane == NULL_TREE)
4089 sctx->lastlane = create_tmp_var (unsigned_type_node);
4090 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4091 sctx->lastlane, NULL_TREE, NULL_TREE);
4092 TREE_THIS_NOTRAP (*rvar) = 1;
4093
4094 if (ctx->scan_exclusive)
4095 {
4096 /* And for exclusive scan yet another one, which will
4097 hold the value during the scan phase. */
4098 tree savar = create_tmp_var_raw (atype);
4099 if (TREE_ADDRESSABLE (new_var))
4100 TREE_ADDRESSABLE (savar) = 1;
4101 DECL_ATTRIBUTES (savar)
4102 = tree_cons (get_identifier ("omp simd array"), NULL,
4103 tree_cons (get_identifier ("omp simd inscan "
4104 "exclusive"), NULL,
4105 DECL_ATTRIBUTES (savar)));
4106 gimple_add_tmp_var (savar);
4107 ctx->cb.decl_map->put (iavar, savar);
4108 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4109 sctx->idx, NULL_TREE, NULL_TREE);
4110 TREE_THIS_NOTRAP (*rvar2) = 1;
4111 }
4112 }
4113 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4114 NULL_TREE, NULL_TREE);
4115 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4116 NULL_TREE, NULL_TREE);
4117 TREE_THIS_NOTRAP (ivar) = 1;
4118 TREE_THIS_NOTRAP (lvar) = 1;
4119 }
4120 if (DECL_P (new_var))
4121 {
4122 SET_DECL_VALUE_EXPR (new_var, lvar);
4123 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4124 }
4125 return true;
4126 }
4127
4128 /* Helper function of lower_rec_input_clauses. For a reference
4129 in simd reduction, add an underlying variable it will reference. */
4130
4131 static void
4132 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4133 {
4134 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4135 if (TREE_CONSTANT (z))
4136 {
4137 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4138 get_name (new_vard));
4139 gimple_add_tmp_var (z);
4140 TREE_ADDRESSABLE (z) = 1;
4141 z = build_fold_addr_expr_loc (loc, z);
4142 gimplify_assign (new_vard, z, ilist);
4143 }
4144 }
4145
4146 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4147 code to emit (type) (tskred_temp[idx]). */
4148
4149 static tree
4150 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4151 unsigned idx)
4152 {
4153 unsigned HOST_WIDE_INT sz
4154 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4155 tree r = build2 (MEM_REF, pointer_sized_int_node,
4156 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4157 idx * sz));
4158 tree v = create_tmp_var (pointer_sized_int_node);
4159 gimple *g = gimple_build_assign (v, r);
4160 gimple_seq_add_stmt (ilist, g);
4161 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4162 {
4163 v = create_tmp_var (type);
4164 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4165 gimple_seq_add_stmt (ilist, g);
4166 }
4167 return v;
4168 }
4169
4170 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4171 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4172 private variables. Initialization statements go in ILIST, while calls
4173 to destructors go in DLIST. */
4174
4175 static void
4176 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4177 omp_context *ctx, struct omp_for_data *fd)
4178 {
4179 tree c, copyin_seq, x, ptr;
4180 bool copyin_by_ref = false;
4181 bool lastprivate_firstprivate = false;
4182 bool reduction_omp_orig_ref = false;
4183 int pass;
4184 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4185 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4186 omplow_simd_context sctx = omplow_simd_context ();
4187 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4188 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4189 gimple_seq llist[4] = { };
4190 tree nonconst_simd_if = NULL_TREE;
4191
4192 copyin_seq = NULL;
4193 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4194
4195 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4196 with data sharing clauses referencing variable sized vars. That
4197 is unnecessarily hard to support and very unlikely to result in
4198 vectorized code anyway. */
4199 if (is_simd)
4200 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4201 switch (OMP_CLAUSE_CODE (c))
4202 {
4203 case OMP_CLAUSE_LINEAR:
4204 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4205 sctx.max_vf = 1;
4206 /* FALLTHRU */
4207 case OMP_CLAUSE_PRIVATE:
4208 case OMP_CLAUSE_FIRSTPRIVATE:
4209 case OMP_CLAUSE_LASTPRIVATE:
4210 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4211 sctx.max_vf = 1;
4212 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4213 {
4214 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4215 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4216 sctx.max_vf = 1;
4217 }
4218 break;
4219 case OMP_CLAUSE_REDUCTION:
4220 case OMP_CLAUSE_IN_REDUCTION:
4221 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4222 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4223 sctx.max_vf = 1;
4224 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4225 {
4226 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4227 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4228 sctx.max_vf = 1;
4229 }
4230 break;
4231 case OMP_CLAUSE_IF:
4232 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4233 sctx.max_vf = 1;
4234 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4235 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4236 break;
4237 case OMP_CLAUSE_SIMDLEN:
4238 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4239 sctx.max_vf = 1;
4240 break;
4241 case OMP_CLAUSE__CONDTEMP_:
4242 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4243 if (sctx.is_simt)
4244 sctx.max_vf = 1;
4245 break;
4246 default:
4247 continue;
4248 }
4249
4250 /* Add a placeholder for simduid. */
4251 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4252 sctx.simt_eargs.safe_push (NULL_TREE);
4253
4254 unsigned task_reduction_cnt = 0;
4255 unsigned task_reduction_cntorig = 0;
4256 unsigned task_reduction_cnt_full = 0;
4257 unsigned task_reduction_cntorig_full = 0;
4258 unsigned task_reduction_other_cnt = 0;
4259 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4260 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4261 /* Do all the fixed sized types in the first pass, and the variable sized
4262 types in the second pass. This makes sure that the scalar arguments to
4263 the variable sized types are processed before we use them in the
4264 variable sized operations. For task reductions we use 4 passes, in the
4265 first two we ignore them, in the third one gather arguments for
4266 GOMP_task_reduction_remap call and in the last pass actually handle
4267 the task reductions. */
4268 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4269 ? 4 : 2); ++pass)
4270 {
4271 if (pass == 2 && task_reduction_cnt)
4272 {
4273 tskred_atype
4274 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4275 + task_reduction_cntorig);
4276 tskred_avar = create_tmp_var_raw (tskred_atype);
4277 gimple_add_tmp_var (tskred_avar);
4278 TREE_ADDRESSABLE (tskred_avar) = 1;
4279 task_reduction_cnt_full = task_reduction_cnt;
4280 task_reduction_cntorig_full = task_reduction_cntorig;
4281 }
4282 else if (pass == 3 && task_reduction_cnt)
4283 {
4284 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4285 gimple *g
4286 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4287 size_int (task_reduction_cntorig),
4288 build_fold_addr_expr (tskred_avar));
4289 gimple_seq_add_stmt (ilist, g);
4290 }
4291 if (pass == 3 && task_reduction_other_cnt)
4292 {
4293 /* For reduction clauses, build
4294 tskred_base = (void *) tskred_temp[2]
4295 + omp_get_thread_num () * tskred_temp[1]
4296 or if tskred_temp[1] is known to be constant, that constant
4297 directly. This is the start of the private reduction copy block
4298 for the current thread. */
4299 tree v = create_tmp_var (integer_type_node);
4300 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4301 gimple *g = gimple_build_call (x, 0);
4302 gimple_call_set_lhs (g, v);
4303 gimple_seq_add_stmt (ilist, g);
4304 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4305 tskred_temp = OMP_CLAUSE_DECL (c);
4306 if (is_taskreg_ctx (ctx))
4307 tskred_temp = lookup_decl (tskred_temp, ctx);
4308 tree v2 = create_tmp_var (sizetype);
4309 g = gimple_build_assign (v2, NOP_EXPR, v);
4310 gimple_seq_add_stmt (ilist, g);
4311 if (ctx->task_reductions[0])
4312 v = fold_convert (sizetype, ctx->task_reductions[0]);
4313 else
4314 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4315 tree v3 = create_tmp_var (sizetype);
4316 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4317 gimple_seq_add_stmt (ilist, g);
4318 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4319 tskred_base = create_tmp_var (ptr_type_node);
4320 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4321 gimple_seq_add_stmt (ilist, g);
4322 }
4323 task_reduction_cnt = 0;
4324 task_reduction_cntorig = 0;
4325 task_reduction_other_cnt = 0;
4326 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4327 {
4328 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4329 tree var, new_var;
4330 bool by_ref;
4331 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4332 bool task_reduction_p = false;
4333 bool task_reduction_needs_orig_p = false;
4334 tree cond = NULL_TREE;
4335
4336 switch (c_kind)
4337 {
4338 case OMP_CLAUSE_PRIVATE:
4339 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4340 continue;
4341 break;
4342 case OMP_CLAUSE_SHARED:
4343 /* Ignore shared directives in teams construct inside
4344 of target construct. */
4345 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4346 && !is_host_teams_ctx (ctx))
4347 continue;
4348 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4349 {
4350 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4351 || is_global_var (OMP_CLAUSE_DECL (c)));
4352 continue;
4353 }
4354 case OMP_CLAUSE_FIRSTPRIVATE:
4355 case OMP_CLAUSE_COPYIN:
4356 break;
4357 case OMP_CLAUSE_LINEAR:
4358 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4359 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4360 lastprivate_firstprivate = true;
4361 break;
4362 case OMP_CLAUSE_REDUCTION:
4363 case OMP_CLAUSE_IN_REDUCTION:
4364 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4365 {
4366 task_reduction_p = true;
4367 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4368 {
4369 task_reduction_other_cnt++;
4370 if (pass == 2)
4371 continue;
4372 }
4373 else
4374 task_reduction_cnt++;
4375 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4376 {
4377 var = OMP_CLAUSE_DECL (c);
4378 /* If var is a global variable that isn't privatized
4379 in outer contexts, we don't need to look up the
4380 original address, it is always the address of the
4381 global variable itself. */
4382 if (!DECL_P (var)
4383 || omp_is_reference (var)
4384 || !is_global_var
4385 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4386 {
4387 task_reduction_needs_orig_p = true;
4388 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4389 task_reduction_cntorig++;
4390 }
4391 }
4392 }
4393 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4394 reduction_omp_orig_ref = true;
4395 break;
4396 case OMP_CLAUSE__REDUCTEMP_:
4397 if (!is_taskreg_ctx (ctx))
4398 continue;
4399 /* FALLTHRU */
4400 case OMP_CLAUSE__LOOPTEMP_:
4401 /* Handle _looptemp_/_reductemp_ clauses only on
4402 parallel/task. */
4403 if (fd)
4404 continue;
4405 break;
4406 case OMP_CLAUSE_LASTPRIVATE:
4407 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4408 {
4409 lastprivate_firstprivate = true;
4410 if (pass != 0 || is_taskloop_ctx (ctx))
4411 continue;
4412 }
4413 /* Even without corresponding firstprivate, if
4414 decl is Fortran allocatable, it needs outer var
4415 reference. */
4416 else if (pass == 0
4417 && lang_hooks.decls.omp_private_outer_ref
4418 (OMP_CLAUSE_DECL (c)))
4419 lastprivate_firstprivate = true;
4420 break;
4421 case OMP_CLAUSE_ALIGNED:
4422 if (pass != 1)
4423 continue;
4424 var = OMP_CLAUSE_DECL (c);
4425 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4426 && !is_global_var (var))
4427 {
4428 new_var = maybe_lookup_decl (var, ctx);
4429 if (new_var == NULL_TREE)
4430 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4431 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4432 tree alarg = omp_clause_aligned_alignment (c);
4433 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4434 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4435 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4436 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4437 gimplify_and_add (x, ilist);
4438 }
4439 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4440 && is_global_var (var))
4441 {
4442 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4443 new_var = lookup_decl (var, ctx);
4444 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4445 t = build_fold_addr_expr_loc (clause_loc, t);
4446 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4447 tree alarg = omp_clause_aligned_alignment (c);
4448 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4449 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4450 t = fold_convert_loc (clause_loc, ptype, t);
4451 x = create_tmp_var (ptype);
4452 t = build2 (MODIFY_EXPR, ptype, x, t);
4453 gimplify_and_add (t, ilist);
4454 t = build_simple_mem_ref_loc (clause_loc, x);
4455 SET_DECL_VALUE_EXPR (new_var, t);
4456 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4457 }
4458 continue;
4459 case OMP_CLAUSE__CONDTEMP_:
4460 if (is_parallel_ctx (ctx)
4461 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4462 break;
4463 continue;
4464 default:
4465 continue;
4466 }
4467
4468 if (task_reduction_p != (pass >= 2))
4469 continue;
4470
4471 new_var = var = OMP_CLAUSE_DECL (c);
4472 if ((c_kind == OMP_CLAUSE_REDUCTION
4473 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4474 && TREE_CODE (var) == MEM_REF)
4475 {
4476 var = TREE_OPERAND (var, 0);
4477 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4478 var = TREE_OPERAND (var, 0);
4479 if (TREE_CODE (var) == INDIRECT_REF
4480 || TREE_CODE (var) == ADDR_EXPR)
4481 var = TREE_OPERAND (var, 0);
4482 if (is_variable_sized (var))
4483 {
4484 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4485 var = DECL_VALUE_EXPR (var);
4486 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4487 var = TREE_OPERAND (var, 0);
4488 gcc_assert (DECL_P (var));
4489 }
4490 new_var = var;
4491 }
4492 if (c_kind != OMP_CLAUSE_COPYIN)
4493 new_var = lookup_decl (var, ctx);
4494
4495 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4496 {
4497 if (pass != 0)
4498 continue;
4499 }
4500 /* C/C++ array section reductions. */
4501 else if ((c_kind == OMP_CLAUSE_REDUCTION
4502 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4503 && var != OMP_CLAUSE_DECL (c))
4504 {
4505 if (pass == 0)
4506 continue;
4507
4508 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4509 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4510
4511 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4512 {
4513 tree b = TREE_OPERAND (orig_var, 1);
4514 b = maybe_lookup_decl (b, ctx);
4515 if (b == NULL)
4516 {
4517 b = TREE_OPERAND (orig_var, 1);
4518 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4519 }
4520 if (integer_zerop (bias))
4521 bias = b;
4522 else
4523 {
4524 bias = fold_convert_loc (clause_loc,
4525 TREE_TYPE (b), bias);
4526 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4527 TREE_TYPE (b), b, bias);
4528 }
4529 orig_var = TREE_OPERAND (orig_var, 0);
4530 }
4531 if (pass == 2)
4532 {
4533 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4534 if (is_global_var (out)
4535 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4536 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4537 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4538 != POINTER_TYPE)))
4539 x = var;
4540 else
4541 {
4542 bool by_ref = use_pointer_for_field (var, NULL);
4543 x = build_receiver_ref (var, by_ref, ctx);
4544 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4545 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4546 == POINTER_TYPE))
4547 x = build_fold_addr_expr (x);
4548 }
4549 if (TREE_CODE (orig_var) == INDIRECT_REF)
4550 x = build_simple_mem_ref (x);
4551 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4552 {
4553 if (var == TREE_OPERAND (orig_var, 0))
4554 x = build_fold_addr_expr (x);
4555 }
4556 bias = fold_convert (sizetype, bias);
4557 x = fold_convert (ptr_type_node, x);
4558 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4559 TREE_TYPE (x), x, bias);
4560 unsigned cnt = task_reduction_cnt - 1;
4561 if (!task_reduction_needs_orig_p)
4562 cnt += (task_reduction_cntorig_full
4563 - task_reduction_cntorig);
4564 else
4565 cnt = task_reduction_cntorig - 1;
4566 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4567 size_int (cnt), NULL_TREE, NULL_TREE);
4568 gimplify_assign (r, x, ilist);
4569 continue;
4570 }
4571
4572 if (TREE_CODE (orig_var) == INDIRECT_REF
4573 || TREE_CODE (orig_var) == ADDR_EXPR)
4574 orig_var = TREE_OPERAND (orig_var, 0);
4575 tree d = OMP_CLAUSE_DECL (c);
4576 tree type = TREE_TYPE (d);
4577 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4578 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4579 const char *name = get_name (orig_var);
4580 if (pass == 3)
4581 {
4582 tree xv = create_tmp_var (ptr_type_node);
4583 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4584 {
4585 unsigned cnt = task_reduction_cnt - 1;
4586 if (!task_reduction_needs_orig_p)
4587 cnt += (task_reduction_cntorig_full
4588 - task_reduction_cntorig);
4589 else
4590 cnt = task_reduction_cntorig - 1;
4591 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4592 size_int (cnt), NULL_TREE, NULL_TREE);
4593
4594 gimple *g = gimple_build_assign (xv, x);
4595 gimple_seq_add_stmt (ilist, g);
4596 }
4597 else
4598 {
4599 unsigned int idx = *ctx->task_reduction_map->get (c);
4600 tree off;
4601 if (ctx->task_reductions[1 + idx])
4602 off = fold_convert (sizetype,
4603 ctx->task_reductions[1 + idx]);
4604 else
4605 off = task_reduction_read (ilist, tskred_temp, sizetype,
4606 7 + 3 * idx + 1);
4607 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4608 tskred_base, off);
4609 gimple_seq_add_stmt (ilist, g);
4610 }
4611 x = fold_convert (build_pointer_type (boolean_type_node),
4612 xv);
4613 if (TREE_CONSTANT (v))
4614 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4615 TYPE_SIZE_UNIT (type));
4616 else
4617 {
4618 tree t = maybe_lookup_decl (v, ctx);
4619 if (t)
4620 v = t;
4621 else
4622 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4623 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4624 fb_rvalue);
4625 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4626 TREE_TYPE (v), v,
4627 build_int_cst (TREE_TYPE (v), 1));
4628 t = fold_build2_loc (clause_loc, MULT_EXPR,
4629 TREE_TYPE (v), t,
4630 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4631 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4632 }
4633 cond = create_tmp_var (TREE_TYPE (x));
4634 gimplify_assign (cond, x, ilist);
4635 x = xv;
4636 }
4637 else if (TREE_CONSTANT (v))
4638 {
4639 x = create_tmp_var_raw (type, name);
4640 gimple_add_tmp_var (x);
4641 TREE_ADDRESSABLE (x) = 1;
4642 x = build_fold_addr_expr_loc (clause_loc, x);
4643 }
4644 else
4645 {
4646 tree atmp
4647 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4648 tree t = maybe_lookup_decl (v, ctx);
4649 if (t)
4650 v = t;
4651 else
4652 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4653 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4654 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4655 TREE_TYPE (v), v,
4656 build_int_cst (TREE_TYPE (v), 1));
4657 t = fold_build2_loc (clause_loc, MULT_EXPR,
4658 TREE_TYPE (v), t,
4659 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4660 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4661 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4662 }
4663
4664 tree ptype = build_pointer_type (TREE_TYPE (type));
4665 x = fold_convert_loc (clause_loc, ptype, x);
4666 tree y = create_tmp_var (ptype, name);
4667 gimplify_assign (y, x, ilist);
4668 x = y;
4669 tree yb = y;
4670
4671 if (!integer_zerop (bias))
4672 {
4673 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4674 bias);
4675 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4676 x);
4677 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4678 pointer_sized_int_node, yb, bias);
4679 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4680 yb = create_tmp_var (ptype, name);
4681 gimplify_assign (yb, x, ilist);
4682 x = yb;
4683 }
4684
4685 d = TREE_OPERAND (d, 0);
4686 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4687 d = TREE_OPERAND (d, 0);
4688 if (TREE_CODE (d) == ADDR_EXPR)
4689 {
4690 if (orig_var != var)
4691 {
4692 gcc_assert (is_variable_sized (orig_var));
4693 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4694 x);
4695 gimplify_assign (new_var, x, ilist);
4696 tree new_orig_var = lookup_decl (orig_var, ctx);
4697 tree t = build_fold_indirect_ref (new_var);
4698 DECL_IGNORED_P (new_var) = 0;
4699 TREE_THIS_NOTRAP (t) = 1;
4700 SET_DECL_VALUE_EXPR (new_orig_var, t);
4701 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4702 }
4703 else
4704 {
4705 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4706 build_int_cst (ptype, 0));
4707 SET_DECL_VALUE_EXPR (new_var, x);
4708 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4709 }
4710 }
4711 else
4712 {
4713 gcc_assert (orig_var == var);
4714 if (TREE_CODE (d) == INDIRECT_REF)
4715 {
4716 x = create_tmp_var (ptype, name);
4717 TREE_ADDRESSABLE (x) = 1;
4718 gimplify_assign (x, yb, ilist);
4719 x = build_fold_addr_expr_loc (clause_loc, x);
4720 }
4721 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4722 gimplify_assign (new_var, x, ilist);
4723 }
4724 /* GOMP_taskgroup_reduction_register memsets the whole
4725 array to zero. If the initializer is zero, we don't
4726 need to initialize it again, just mark it as ever
4727 used unconditionally, i.e. cond = true. */
4728 if (cond
4729 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4730 && initializer_zerop (omp_reduction_init (c,
4731 TREE_TYPE (type))))
4732 {
4733 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4734 boolean_true_node);
4735 gimple_seq_add_stmt (ilist, g);
4736 continue;
4737 }
4738 tree end = create_artificial_label (UNKNOWN_LOCATION);
4739 if (cond)
4740 {
4741 gimple *g;
4742 if (!is_parallel_ctx (ctx))
4743 {
4744 tree condv = create_tmp_var (boolean_type_node);
4745 g = gimple_build_assign (condv,
4746 build_simple_mem_ref (cond));
4747 gimple_seq_add_stmt (ilist, g);
4748 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4749 g = gimple_build_cond (NE_EXPR, condv,
4750 boolean_false_node, end, lab1);
4751 gimple_seq_add_stmt (ilist, g);
4752 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4753 }
4754 g = gimple_build_assign (build_simple_mem_ref (cond),
4755 boolean_true_node);
4756 gimple_seq_add_stmt (ilist, g);
4757 }
4758
4759 tree y1 = create_tmp_var (ptype);
4760 gimplify_assign (y1, y, ilist);
4761 tree i2 = NULL_TREE, y2 = NULL_TREE;
4762 tree body2 = NULL_TREE, end2 = NULL_TREE;
4763 tree y3 = NULL_TREE, y4 = NULL_TREE;
4764 if (task_reduction_needs_orig_p)
4765 {
4766 y3 = create_tmp_var (ptype);
4767 tree ref;
4768 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4769 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4770 size_int (task_reduction_cnt_full
4771 + task_reduction_cntorig - 1),
4772 NULL_TREE, NULL_TREE);
4773 else
4774 {
4775 unsigned int idx = *ctx->task_reduction_map->get (c);
4776 ref = task_reduction_read (ilist, tskred_temp, ptype,
4777 7 + 3 * idx);
4778 }
4779 gimplify_assign (y3, ref, ilist);
4780 }
4781 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4782 {
4783 if (pass != 3)
4784 {
4785 y2 = create_tmp_var (ptype);
4786 gimplify_assign (y2, y, ilist);
4787 }
4788 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4789 {
4790 tree ref = build_outer_var_ref (var, ctx);
4791 /* For ref build_outer_var_ref already performs this. */
4792 if (TREE_CODE (d) == INDIRECT_REF)
4793 gcc_assert (omp_is_reference (var));
4794 else if (TREE_CODE (d) == ADDR_EXPR)
4795 ref = build_fold_addr_expr (ref);
4796 else if (omp_is_reference (var))
4797 ref = build_fold_addr_expr (ref);
4798 ref = fold_convert_loc (clause_loc, ptype, ref);
4799 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4800 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4801 {
4802 y3 = create_tmp_var (ptype);
4803 gimplify_assign (y3, unshare_expr (ref), ilist);
4804 }
4805 if (is_simd)
4806 {
4807 y4 = create_tmp_var (ptype);
4808 gimplify_assign (y4, ref, dlist);
4809 }
4810 }
4811 }
4812 tree i = create_tmp_var (TREE_TYPE (v));
4813 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4814 tree body = create_artificial_label (UNKNOWN_LOCATION);
4815 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4816 if (y2)
4817 {
4818 i2 = create_tmp_var (TREE_TYPE (v));
4819 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4820 body2 = create_artificial_label (UNKNOWN_LOCATION);
4821 end2 = create_artificial_label (UNKNOWN_LOCATION);
4822 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4823 }
4824 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4825 {
4826 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4827 tree decl_placeholder
4828 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4829 SET_DECL_VALUE_EXPR (decl_placeholder,
4830 build_simple_mem_ref (y1));
4831 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4832 SET_DECL_VALUE_EXPR (placeholder,
4833 y3 ? build_simple_mem_ref (y3)
4834 : error_mark_node);
4835 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4836 x = lang_hooks.decls.omp_clause_default_ctor
4837 (c, build_simple_mem_ref (y1),
4838 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4839 if (x)
4840 gimplify_and_add (x, ilist);
4841 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4842 {
4843 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4844 lower_omp (&tseq, ctx);
4845 gimple_seq_add_seq (ilist, tseq);
4846 }
4847 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4848 if (is_simd)
4849 {
4850 SET_DECL_VALUE_EXPR (decl_placeholder,
4851 build_simple_mem_ref (y2));
4852 SET_DECL_VALUE_EXPR (placeholder,
4853 build_simple_mem_ref (y4));
4854 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4855 lower_omp (&tseq, ctx);
4856 gimple_seq_add_seq (dlist, tseq);
4857 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4858 }
4859 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4860 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4861 if (y2)
4862 {
4863 x = lang_hooks.decls.omp_clause_dtor
4864 (c, build_simple_mem_ref (y2));
4865 if (x)
4866 gimplify_and_add (x, dlist);
4867 }
4868 }
4869 else
4870 {
4871 x = omp_reduction_init (c, TREE_TYPE (type));
4872 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4873
4874 /* reduction(-:var) sums up the partial results, so it
4875 acts identically to reduction(+:var). */
4876 if (code == MINUS_EXPR)
4877 code = PLUS_EXPR;
4878
4879 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4880 if (is_simd)
4881 {
4882 x = build2 (code, TREE_TYPE (type),
4883 build_simple_mem_ref (y4),
4884 build_simple_mem_ref (y2));
4885 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4886 }
4887 }
4888 gimple *g
4889 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4890 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4891 gimple_seq_add_stmt (ilist, g);
4892 if (y3)
4893 {
4894 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4895 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4896 gimple_seq_add_stmt (ilist, g);
4897 }
4898 g = gimple_build_assign (i, PLUS_EXPR, i,
4899 build_int_cst (TREE_TYPE (i), 1));
4900 gimple_seq_add_stmt (ilist, g);
4901 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4902 gimple_seq_add_stmt (ilist, g);
4903 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4904 if (y2)
4905 {
4906 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4907 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4908 gimple_seq_add_stmt (dlist, g);
4909 if (y4)
4910 {
4911 g = gimple_build_assign
4912 (y4, POINTER_PLUS_EXPR, y4,
4913 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4914 gimple_seq_add_stmt (dlist, g);
4915 }
4916 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4917 build_int_cst (TREE_TYPE (i2), 1));
4918 gimple_seq_add_stmt (dlist, g);
4919 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4920 gimple_seq_add_stmt (dlist, g);
4921 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4922 }
4923 continue;
4924 }
4925 else if (pass == 2)
4926 {
4927 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4928 x = var;
4929 else
4930 {
4931 bool by_ref = use_pointer_for_field (var, ctx);
4932 x = build_receiver_ref (var, by_ref, ctx);
4933 }
4934 if (!omp_is_reference (var))
4935 x = build_fold_addr_expr (x);
4936 x = fold_convert (ptr_type_node, x);
4937 unsigned cnt = task_reduction_cnt - 1;
4938 if (!task_reduction_needs_orig_p)
4939 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4940 else
4941 cnt = task_reduction_cntorig - 1;
4942 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4943 size_int (cnt), NULL_TREE, NULL_TREE);
4944 gimplify_assign (r, x, ilist);
4945 continue;
4946 }
4947 else if (pass == 3)
4948 {
4949 tree type = TREE_TYPE (new_var);
4950 if (!omp_is_reference (var))
4951 type = build_pointer_type (type);
4952 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4953 {
4954 unsigned cnt = task_reduction_cnt - 1;
4955 if (!task_reduction_needs_orig_p)
4956 cnt += (task_reduction_cntorig_full
4957 - task_reduction_cntorig);
4958 else
4959 cnt = task_reduction_cntorig - 1;
4960 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4961 size_int (cnt), NULL_TREE, NULL_TREE);
4962 }
4963 else
4964 {
4965 unsigned int idx = *ctx->task_reduction_map->get (c);
4966 tree off;
4967 if (ctx->task_reductions[1 + idx])
4968 off = fold_convert (sizetype,
4969 ctx->task_reductions[1 + idx]);
4970 else
4971 off = task_reduction_read (ilist, tskred_temp, sizetype,
4972 7 + 3 * idx + 1);
4973 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4974 tskred_base, off);
4975 }
4976 x = fold_convert (type, x);
4977 tree t;
4978 if (omp_is_reference (var))
4979 {
4980 gimplify_assign (new_var, x, ilist);
4981 t = new_var;
4982 new_var = build_simple_mem_ref (new_var);
4983 }
4984 else
4985 {
4986 t = create_tmp_var (type);
4987 gimplify_assign (t, x, ilist);
4988 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4989 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4990 }
4991 t = fold_convert (build_pointer_type (boolean_type_node), t);
4992 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4993 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4994 cond = create_tmp_var (TREE_TYPE (t));
4995 gimplify_assign (cond, t, ilist);
4996 }
4997 else if (is_variable_sized (var))
4998 {
4999 /* For variable sized types, we need to allocate the
5000 actual storage here. Call alloca and store the
5001 result in the pointer decl that we created elsewhere. */
5002 if (pass == 0)
5003 continue;
5004
5005 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5006 {
5007 gcall *stmt;
5008 tree tmp, atmp;
5009
5010 ptr = DECL_VALUE_EXPR (new_var);
5011 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5012 ptr = TREE_OPERAND (ptr, 0);
5013 gcc_assert (DECL_P (ptr));
5014 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5015
5016 /* void *tmp = __builtin_alloca */
5017 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5018 stmt = gimple_build_call (atmp, 2, x,
5019 size_int (DECL_ALIGN (var)));
5020 tmp = create_tmp_var_raw (ptr_type_node);
5021 gimple_add_tmp_var (tmp);
5022 gimple_call_set_lhs (stmt, tmp);
5023
5024 gimple_seq_add_stmt (ilist, stmt);
5025
5026 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5027 gimplify_assign (ptr, x, ilist);
5028 }
5029 }
5030 else if (omp_is_reference (var)
5031 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5032 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5033 {
5034 /* For references that are being privatized for Fortran,
5035 allocate new backing storage for the new pointer
5036 variable. This allows us to avoid changing all the
5037 code that expects a pointer to something that expects
5038 a direct variable. */
5039 if (pass == 0)
5040 continue;
5041
5042 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5043 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5044 {
5045 x = build_receiver_ref (var, false, ctx);
5046 x = build_fold_addr_expr_loc (clause_loc, x);
5047 }
5048 else if (TREE_CONSTANT (x))
5049 {
5050 /* For reduction in SIMD loop, defer adding the
5051 initialization of the reference, because if we decide
5052 to use SIMD array for it, the initilization could cause
5053 expansion ICE. Ditto for other privatization clauses. */
5054 if (is_simd)
5055 x = NULL_TREE;
5056 else
5057 {
5058 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5059 get_name (var));
5060 gimple_add_tmp_var (x);
5061 TREE_ADDRESSABLE (x) = 1;
5062 x = build_fold_addr_expr_loc (clause_loc, x);
5063 }
5064 }
5065 else
5066 {
5067 tree atmp
5068 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5069 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5070 tree al = size_int (TYPE_ALIGN (rtype));
5071 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5072 }
5073
5074 if (x)
5075 {
5076 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5077 gimplify_assign (new_var, x, ilist);
5078 }
5079
5080 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5081 }
5082 else if ((c_kind == OMP_CLAUSE_REDUCTION
5083 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5084 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5085 {
5086 if (pass == 0)
5087 continue;
5088 }
5089 else if (pass != 0)
5090 continue;
5091
5092 switch (OMP_CLAUSE_CODE (c))
5093 {
5094 case OMP_CLAUSE_SHARED:
5095 /* Ignore shared directives in teams construct inside
5096 target construct. */
5097 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5098 && !is_host_teams_ctx (ctx))
5099 continue;
5100 /* Shared global vars are just accessed directly. */
5101 if (is_global_var (new_var))
5102 break;
5103 /* For taskloop firstprivate/lastprivate, represented
5104 as firstprivate and shared clause on the task, new_var
5105 is the firstprivate var. */
5106 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5107 break;
5108 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5109 needs to be delayed until after fixup_child_record_type so
5110 that we get the correct type during the dereference. */
5111 by_ref = use_pointer_for_field (var, ctx);
5112 x = build_receiver_ref (var, by_ref, ctx);
5113 SET_DECL_VALUE_EXPR (new_var, x);
5114 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5115
5116 /* ??? If VAR is not passed by reference, and the variable
5117 hasn't been initialized yet, then we'll get a warning for
5118 the store into the omp_data_s structure. Ideally, we'd be
5119 able to notice this and not store anything at all, but
5120 we're generating code too early. Suppress the warning. */
5121 if (!by_ref)
5122 TREE_NO_WARNING (var) = 1;
5123 break;
5124
5125 case OMP_CLAUSE__CONDTEMP_:
5126 if (is_parallel_ctx (ctx))
5127 {
5128 x = build_receiver_ref (var, false, ctx);
5129 SET_DECL_VALUE_EXPR (new_var, x);
5130 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5131 }
5132 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5133 {
5134 x = build_zero_cst (TREE_TYPE (var));
5135 goto do_private;
5136 }
5137 break;
5138
5139 case OMP_CLAUSE_LASTPRIVATE:
5140 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5141 break;
5142 /* FALLTHRU */
5143
5144 case OMP_CLAUSE_PRIVATE:
5145 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5146 x = build_outer_var_ref (var, ctx);
5147 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5148 {
5149 if (is_task_ctx (ctx))
5150 x = build_receiver_ref (var, false, ctx);
5151 else
5152 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5153 }
5154 else
5155 x = NULL;
5156 do_private:
5157 tree nx;
5158 bool copy_ctor;
5159 copy_ctor = false;
5160 nx = unshare_expr (new_var);
5161 if (is_simd
5162 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5163 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5164 copy_ctor = true;
5165 if (copy_ctor)
5166 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5167 else
5168 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5169 if (is_simd)
5170 {
5171 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5172 if ((TREE_ADDRESSABLE (new_var) || nx || y
5173 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5174 && (gimple_omp_for_collapse (ctx->stmt) != 1
5175 || (gimple_omp_for_index (ctx->stmt, 0)
5176 != new_var)))
5177 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5178 || omp_is_reference (var))
5179 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5180 ivar, lvar))
5181 {
5182 if (omp_is_reference (var))
5183 {
5184 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5185 tree new_vard = TREE_OPERAND (new_var, 0);
5186 gcc_assert (DECL_P (new_vard));
5187 SET_DECL_VALUE_EXPR (new_vard,
5188 build_fold_addr_expr (lvar));
5189 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5190 }
5191
5192 if (nx)
5193 {
5194 tree iv = unshare_expr (ivar);
5195 if (copy_ctor)
5196 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5197 x);
5198 else
5199 x = lang_hooks.decls.omp_clause_default_ctor (c,
5200 iv,
5201 x);
5202 }
5203 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5204 {
5205 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5206 unshare_expr (ivar), x);
5207 nx = x;
5208 }
5209 if (nx && x)
5210 gimplify_and_add (x, &llist[0]);
5211 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5212 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5213 {
5214 tree v = new_var;
5215 if (!DECL_P (v))
5216 {
5217 gcc_assert (TREE_CODE (v) == MEM_REF);
5218 v = TREE_OPERAND (v, 0);
5219 gcc_assert (DECL_P (v));
5220 }
5221 v = *ctx->lastprivate_conditional_map->get (v);
5222 tree t = create_tmp_var (TREE_TYPE (v));
5223 tree z = build_zero_cst (TREE_TYPE (v));
5224 tree orig_v
5225 = build_outer_var_ref (var, ctx,
5226 OMP_CLAUSE_LASTPRIVATE);
5227 gimple_seq_add_stmt (dlist,
5228 gimple_build_assign (t, z));
5229 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5230 tree civar = DECL_VALUE_EXPR (v);
5231 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5232 civar = unshare_expr (civar);
5233 TREE_OPERAND (civar, 1) = sctx.idx;
5234 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5235 unshare_expr (civar));
5236 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5237 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5238 orig_v, unshare_expr (ivar)));
5239 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5240 civar);
5241 x = build3 (COND_EXPR, void_type_node, cond, x,
5242 void_node);
5243 gimple_seq tseq = NULL;
5244 gimplify_and_add (x, &tseq);
5245 if (ctx->outer)
5246 lower_omp (&tseq, ctx->outer);
5247 gimple_seq_add_seq (&llist[1], tseq);
5248 }
5249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5250 && ctx->for_simd_scan_phase)
5251 {
5252 x = unshare_expr (ivar);
5253 tree orig_v
5254 = build_outer_var_ref (var, ctx,
5255 OMP_CLAUSE_LASTPRIVATE);
5256 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5257 orig_v);
5258 gimplify_and_add (x, &llist[0]);
5259 }
5260 if (y)
5261 {
5262 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5263 if (y)
5264 gimplify_and_add (y, &llist[1]);
5265 }
5266 break;
5267 }
5268 if (omp_is_reference (var))
5269 {
5270 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5271 tree new_vard = TREE_OPERAND (new_var, 0);
5272 gcc_assert (DECL_P (new_vard));
5273 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5274 x = TYPE_SIZE_UNIT (type);
5275 if (TREE_CONSTANT (x))
5276 {
5277 x = create_tmp_var_raw (type, get_name (var));
5278 gimple_add_tmp_var (x);
5279 TREE_ADDRESSABLE (x) = 1;
5280 x = build_fold_addr_expr_loc (clause_loc, x);
5281 x = fold_convert_loc (clause_loc,
5282 TREE_TYPE (new_vard), x);
5283 gimplify_assign (new_vard, x, ilist);
5284 }
5285 }
5286 }
5287 if (nx)
5288 gimplify_and_add (nx, ilist);
5289 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5290 && is_simd
5291 && ctx->for_simd_scan_phase)
5292 {
5293 tree orig_v = build_outer_var_ref (var, ctx,
5294 OMP_CLAUSE_LASTPRIVATE);
5295 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5296 orig_v);
5297 gimplify_and_add (x, ilist);
5298 }
5299 /* FALLTHRU */
5300
5301 do_dtor:
5302 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5303 if (x)
5304 gimplify_and_add (x, dlist);
5305 break;
5306
5307 case OMP_CLAUSE_LINEAR:
5308 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5309 goto do_firstprivate;
5310 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5311 x = NULL;
5312 else
5313 x = build_outer_var_ref (var, ctx);
5314 goto do_private;
5315
5316 case OMP_CLAUSE_FIRSTPRIVATE:
5317 if (is_task_ctx (ctx))
5318 {
5319 if ((omp_is_reference (var)
5320 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5321 || is_variable_sized (var))
5322 goto do_dtor;
5323 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5324 ctx))
5325 || use_pointer_for_field (var, NULL))
5326 {
5327 x = build_receiver_ref (var, false, ctx);
5328 SET_DECL_VALUE_EXPR (new_var, x);
5329 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5330 goto do_dtor;
5331 }
5332 }
5333 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5334 && omp_is_reference (var))
5335 {
5336 x = build_outer_var_ref (var, ctx);
5337 gcc_assert (TREE_CODE (x) == MEM_REF
5338 && integer_zerop (TREE_OPERAND (x, 1)));
5339 x = TREE_OPERAND (x, 0);
5340 x = lang_hooks.decls.omp_clause_copy_ctor
5341 (c, unshare_expr (new_var), x);
5342 gimplify_and_add (x, ilist);
5343 goto do_dtor;
5344 }
5345 do_firstprivate:
5346 x = build_outer_var_ref (var, ctx);
5347 if (is_simd)
5348 {
5349 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5350 && gimple_omp_for_combined_into_p (ctx->stmt))
5351 {
5352 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5353 tree stept = TREE_TYPE (t);
5354 tree ct = omp_find_clause (clauses,
5355 OMP_CLAUSE__LOOPTEMP_);
5356 gcc_assert (ct);
5357 tree l = OMP_CLAUSE_DECL (ct);
5358 tree n1 = fd->loop.n1;
5359 tree step = fd->loop.step;
5360 tree itype = TREE_TYPE (l);
5361 if (POINTER_TYPE_P (itype))
5362 itype = signed_type_for (itype);
5363 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5364 if (TYPE_UNSIGNED (itype)
5365 && fd->loop.cond_code == GT_EXPR)
5366 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5367 fold_build1 (NEGATE_EXPR, itype, l),
5368 fold_build1 (NEGATE_EXPR,
5369 itype, step));
5370 else
5371 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5372 t = fold_build2 (MULT_EXPR, stept,
5373 fold_convert (stept, l), t);
5374
5375 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5376 {
5377 if (omp_is_reference (var))
5378 {
5379 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5380 tree new_vard = TREE_OPERAND (new_var, 0);
5381 gcc_assert (DECL_P (new_vard));
5382 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5383 nx = TYPE_SIZE_UNIT (type);
5384 if (TREE_CONSTANT (nx))
5385 {
5386 nx = create_tmp_var_raw (type,
5387 get_name (var));
5388 gimple_add_tmp_var (nx);
5389 TREE_ADDRESSABLE (nx) = 1;
5390 nx = build_fold_addr_expr_loc (clause_loc,
5391 nx);
5392 nx = fold_convert_loc (clause_loc,
5393 TREE_TYPE (new_vard),
5394 nx);
5395 gimplify_assign (new_vard, nx, ilist);
5396 }
5397 }
5398
5399 x = lang_hooks.decls.omp_clause_linear_ctor
5400 (c, new_var, x, t);
5401 gimplify_and_add (x, ilist);
5402 goto do_dtor;
5403 }
5404
5405 if (POINTER_TYPE_P (TREE_TYPE (x)))
5406 x = fold_build2 (POINTER_PLUS_EXPR,
5407 TREE_TYPE (x), x, t);
5408 else
5409 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5410 }
5411
5412 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5413 || TREE_ADDRESSABLE (new_var)
5414 || omp_is_reference (var))
5415 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5416 ivar, lvar))
5417 {
5418 if (omp_is_reference (var))
5419 {
5420 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5421 tree new_vard = TREE_OPERAND (new_var, 0);
5422 gcc_assert (DECL_P (new_vard));
5423 SET_DECL_VALUE_EXPR (new_vard,
5424 build_fold_addr_expr (lvar));
5425 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5426 }
5427 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5428 {
5429 tree iv = create_tmp_var (TREE_TYPE (new_var));
5430 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5431 gimplify_and_add (x, ilist);
5432 gimple_stmt_iterator gsi
5433 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5434 gassign *g
5435 = gimple_build_assign (unshare_expr (lvar), iv);
5436 gsi_insert_before_without_update (&gsi, g,
5437 GSI_SAME_STMT);
5438 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5439 enum tree_code code = PLUS_EXPR;
5440 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5441 code = POINTER_PLUS_EXPR;
5442 g = gimple_build_assign (iv, code, iv, t);
5443 gsi_insert_before_without_update (&gsi, g,
5444 GSI_SAME_STMT);
5445 break;
5446 }
5447 x = lang_hooks.decls.omp_clause_copy_ctor
5448 (c, unshare_expr (ivar), x);
5449 gimplify_and_add (x, &llist[0]);
5450 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5451 if (x)
5452 gimplify_and_add (x, &llist[1]);
5453 break;
5454 }
5455 if (omp_is_reference (var))
5456 {
5457 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5458 tree new_vard = TREE_OPERAND (new_var, 0);
5459 gcc_assert (DECL_P (new_vard));
5460 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5461 nx = TYPE_SIZE_UNIT (type);
5462 if (TREE_CONSTANT (nx))
5463 {
5464 nx = create_tmp_var_raw (type, get_name (var));
5465 gimple_add_tmp_var (nx);
5466 TREE_ADDRESSABLE (nx) = 1;
5467 nx = build_fold_addr_expr_loc (clause_loc, nx);
5468 nx = fold_convert_loc (clause_loc,
5469 TREE_TYPE (new_vard), nx);
5470 gimplify_assign (new_vard, nx, ilist);
5471 }
5472 }
5473 }
5474 x = lang_hooks.decls.omp_clause_copy_ctor
5475 (c, unshare_expr (new_var), x);
5476 gimplify_and_add (x, ilist);
5477 goto do_dtor;
5478
5479 case OMP_CLAUSE__LOOPTEMP_:
5480 case OMP_CLAUSE__REDUCTEMP_:
5481 gcc_assert (is_taskreg_ctx (ctx));
5482 x = build_outer_var_ref (var, ctx);
5483 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5484 gimplify_and_add (x, ilist);
5485 break;
5486
5487 case OMP_CLAUSE_COPYIN:
5488 by_ref = use_pointer_for_field (var, NULL);
5489 x = build_receiver_ref (var, by_ref, ctx);
5490 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5491 append_to_statement_list (x, &copyin_seq);
5492 copyin_by_ref |= by_ref;
5493 break;
5494
5495 case OMP_CLAUSE_REDUCTION:
5496 case OMP_CLAUSE_IN_REDUCTION:
5497 /* OpenACC reductions are initialized using the
5498 GOACC_REDUCTION internal function. */
5499 if (is_gimple_omp_oacc (ctx->stmt))
5500 break;
5501 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5502 {
5503 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5504 gimple *tseq;
5505 tree ptype = TREE_TYPE (placeholder);
5506 if (cond)
5507 {
5508 x = error_mark_node;
5509 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5510 && !task_reduction_needs_orig_p)
5511 x = var;
5512 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5513 {
5514 tree pptype = build_pointer_type (ptype);
5515 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5516 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5517 size_int (task_reduction_cnt_full
5518 + task_reduction_cntorig - 1),
5519 NULL_TREE, NULL_TREE);
5520 else
5521 {
5522 unsigned int idx
5523 = *ctx->task_reduction_map->get (c);
5524 x = task_reduction_read (ilist, tskred_temp,
5525 pptype, 7 + 3 * idx);
5526 }
5527 x = fold_convert (pptype, x);
5528 x = build_simple_mem_ref (x);
5529 }
5530 }
5531 else
5532 {
5533 x = build_outer_var_ref (var, ctx);
5534
5535 if (omp_is_reference (var)
5536 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5537 x = build_fold_addr_expr_loc (clause_loc, x);
5538 }
5539 SET_DECL_VALUE_EXPR (placeholder, x);
5540 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5541 tree new_vard = new_var;
5542 if (omp_is_reference (var))
5543 {
5544 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5545 new_vard = TREE_OPERAND (new_var, 0);
5546 gcc_assert (DECL_P (new_vard));
5547 }
5548 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5549 if (is_simd
5550 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5551 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5552 rvarp = &rvar;
5553 if (is_simd
5554 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5555 ivar, lvar, rvarp,
5556 &rvar2))
5557 {
5558 if (new_vard == new_var)
5559 {
5560 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5561 SET_DECL_VALUE_EXPR (new_var, ivar);
5562 }
5563 else
5564 {
5565 SET_DECL_VALUE_EXPR (new_vard,
5566 build_fold_addr_expr (ivar));
5567 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5568 }
5569 x = lang_hooks.decls.omp_clause_default_ctor
5570 (c, unshare_expr (ivar),
5571 build_outer_var_ref (var, ctx));
5572 if (rvarp && ctx->for_simd_scan_phase)
5573 {
5574 if (x)
5575 gimplify_and_add (x, &llist[0]);
5576 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5577 if (x)
5578 gimplify_and_add (x, &llist[1]);
5579 break;
5580 }
5581 else if (rvarp)
5582 {
5583 if (x)
5584 {
5585 gimplify_and_add (x, &llist[0]);
5586
5587 tree ivar2 = unshare_expr (lvar);
5588 TREE_OPERAND (ivar2, 1) = sctx.idx;
5589 x = lang_hooks.decls.omp_clause_default_ctor
5590 (c, ivar2, build_outer_var_ref (var, ctx));
5591 gimplify_and_add (x, &llist[0]);
5592
5593 if (rvar2)
5594 {
5595 x = lang_hooks.decls.omp_clause_default_ctor
5596 (c, unshare_expr (rvar2),
5597 build_outer_var_ref (var, ctx));
5598 gimplify_and_add (x, &llist[0]);
5599 }
5600
5601 /* For types that need construction, add another
5602 private var which will be default constructed
5603 and optionally initialized with
5604 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5605 loop we want to assign this value instead of
5606 constructing and destructing it in each
5607 iteration. */
5608 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5609 gimple_add_tmp_var (nv);
5610 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5611 ? rvar2
5612 : ivar, 0),
5613 nv);
5614 x = lang_hooks.decls.omp_clause_default_ctor
5615 (c, nv, build_outer_var_ref (var, ctx));
5616 gimplify_and_add (x, ilist);
5617
5618 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5619 {
5620 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5621 x = DECL_VALUE_EXPR (new_vard);
5622 tree vexpr = nv;
5623 if (new_vard != new_var)
5624 vexpr = build_fold_addr_expr (nv);
5625 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5626 lower_omp (&tseq, ctx);
5627 SET_DECL_VALUE_EXPR (new_vard, x);
5628 gimple_seq_add_seq (ilist, tseq);
5629 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5630 }
5631
5632 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5633 if (x)
5634 gimplify_and_add (x, dlist);
5635 }
5636
5637 tree ref = build_outer_var_ref (var, ctx);
5638 x = unshare_expr (ivar);
5639 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5640 ref);
5641 gimplify_and_add (x, &llist[0]);
5642
5643 ref = build_outer_var_ref (var, ctx);
5644 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5645 rvar);
5646 gimplify_and_add (x, &llist[3]);
5647
5648 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5649 if (new_vard == new_var)
5650 SET_DECL_VALUE_EXPR (new_var, lvar);
5651 else
5652 SET_DECL_VALUE_EXPR (new_vard,
5653 build_fold_addr_expr (lvar));
5654
5655 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5656 if (x)
5657 gimplify_and_add (x, &llist[1]);
5658
5659 tree ivar2 = unshare_expr (lvar);
5660 TREE_OPERAND (ivar2, 1) = sctx.idx;
5661 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5662 if (x)
5663 gimplify_and_add (x, &llist[1]);
5664
5665 if (rvar2)
5666 {
5667 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5668 if (x)
5669 gimplify_and_add (x, &llist[1]);
5670 }
5671 break;
5672 }
5673 if (x)
5674 gimplify_and_add (x, &llist[0]);
5675 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5676 {
5677 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5678 lower_omp (&tseq, ctx);
5679 gimple_seq_add_seq (&llist[0], tseq);
5680 }
5681 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5682 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5683 lower_omp (&tseq, ctx);
5684 gimple_seq_add_seq (&llist[1], tseq);
5685 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5686 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5687 if (new_vard == new_var)
5688 SET_DECL_VALUE_EXPR (new_var, lvar);
5689 else
5690 SET_DECL_VALUE_EXPR (new_vard,
5691 build_fold_addr_expr (lvar));
5692 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5693 if (x)
5694 gimplify_and_add (x, &llist[1]);
5695 break;
5696 }
5697 /* If this is a reference to constant size reduction var
5698 with placeholder, we haven't emitted the initializer
5699 for it because it is undesirable if SIMD arrays are used.
5700 But if they aren't used, we need to emit the deferred
5701 initialization now. */
5702 else if (omp_is_reference (var) && is_simd)
5703 handle_simd_reference (clause_loc, new_vard, ilist);
5704
5705 tree lab2 = NULL_TREE;
5706 if (cond)
5707 {
5708 gimple *g;
5709 if (!is_parallel_ctx (ctx))
5710 {
5711 tree condv = create_tmp_var (boolean_type_node);
5712 tree m = build_simple_mem_ref (cond);
5713 g = gimple_build_assign (condv, m);
5714 gimple_seq_add_stmt (ilist, g);
5715 tree lab1
5716 = create_artificial_label (UNKNOWN_LOCATION);
5717 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5718 g = gimple_build_cond (NE_EXPR, condv,
5719 boolean_false_node,
5720 lab2, lab1);
5721 gimple_seq_add_stmt (ilist, g);
5722 gimple_seq_add_stmt (ilist,
5723 gimple_build_label (lab1));
5724 }
5725 g = gimple_build_assign (build_simple_mem_ref (cond),
5726 boolean_true_node);
5727 gimple_seq_add_stmt (ilist, g);
5728 }
5729 x = lang_hooks.decls.omp_clause_default_ctor
5730 (c, unshare_expr (new_var),
5731 cond ? NULL_TREE
5732 : build_outer_var_ref (var, ctx));
5733 if (x)
5734 gimplify_and_add (x, ilist);
5735
5736 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5737 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5738 {
5739 if (ctx->for_simd_scan_phase)
5740 goto do_dtor;
5741 if (x || (!is_simd
5742 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5743 {
5744 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5745 gimple_add_tmp_var (nv);
5746 ctx->cb.decl_map->put (new_vard, nv);
5747 x = lang_hooks.decls.omp_clause_default_ctor
5748 (c, nv, build_outer_var_ref (var, ctx));
5749 if (x)
5750 gimplify_and_add (x, ilist);
5751 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5752 {
5753 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5754 tree vexpr = nv;
5755 if (new_vard != new_var)
5756 vexpr = build_fold_addr_expr (nv);
5757 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5758 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5759 lower_omp (&tseq, ctx);
5760 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5761 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5762 gimple_seq_add_seq (ilist, tseq);
5763 }
5764 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5765 if (is_simd && ctx->scan_exclusive)
5766 {
5767 tree nv2
5768 = create_tmp_var_raw (TREE_TYPE (new_var));
5769 gimple_add_tmp_var (nv2);
5770 ctx->cb.decl_map->put (nv, nv2);
5771 x = lang_hooks.decls.omp_clause_default_ctor
5772 (c, nv2, build_outer_var_ref (var, ctx));
5773 gimplify_and_add (x, ilist);
5774 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5775 if (x)
5776 gimplify_and_add (x, dlist);
5777 }
5778 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5779 if (x)
5780 gimplify_and_add (x, dlist);
5781 }
5782 else if (is_simd
5783 && ctx->scan_exclusive
5784 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5785 {
5786 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5787 gimple_add_tmp_var (nv2);
5788 ctx->cb.decl_map->put (new_vard, nv2);
5789 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5790 if (x)
5791 gimplify_and_add (x, dlist);
5792 }
5793 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5794 goto do_dtor;
5795 }
5796
5797 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5798 {
5799 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5800 lower_omp (&tseq, ctx);
5801 gimple_seq_add_seq (ilist, tseq);
5802 }
5803 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5804 if (is_simd)
5805 {
5806 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5807 lower_omp (&tseq, ctx);
5808 gimple_seq_add_seq (dlist, tseq);
5809 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5810 }
5811 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5812 if (cond)
5813 {
5814 if (lab2)
5815 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5816 break;
5817 }
5818 goto do_dtor;
5819 }
5820 else
5821 {
5822 x = omp_reduction_init (c, TREE_TYPE (new_var));
5823 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5824 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5825
5826 if (cond)
5827 {
5828 gimple *g;
5829 tree lab2 = NULL_TREE;
5830 /* GOMP_taskgroup_reduction_register memsets the whole
5831 array to zero. If the initializer is zero, we don't
5832 need to initialize it again, just mark it as ever
5833 used unconditionally, i.e. cond = true. */
5834 if (initializer_zerop (x))
5835 {
5836 g = gimple_build_assign (build_simple_mem_ref (cond),
5837 boolean_true_node);
5838 gimple_seq_add_stmt (ilist, g);
5839 break;
5840 }
5841
5842 /* Otherwise, emit
5843 if (!cond) { cond = true; new_var = x; } */
5844 if (!is_parallel_ctx (ctx))
5845 {
5846 tree condv = create_tmp_var (boolean_type_node);
5847 tree m = build_simple_mem_ref (cond);
5848 g = gimple_build_assign (condv, m);
5849 gimple_seq_add_stmt (ilist, g);
5850 tree lab1
5851 = create_artificial_label (UNKNOWN_LOCATION);
5852 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5853 g = gimple_build_cond (NE_EXPR, condv,
5854 boolean_false_node,
5855 lab2, lab1);
5856 gimple_seq_add_stmt (ilist, g);
5857 gimple_seq_add_stmt (ilist,
5858 gimple_build_label (lab1));
5859 }
5860 g = gimple_build_assign (build_simple_mem_ref (cond),
5861 boolean_true_node);
5862 gimple_seq_add_stmt (ilist, g);
5863 gimplify_assign (new_var, x, ilist);
5864 if (lab2)
5865 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5866 break;
5867 }
5868
5869 /* reduction(-:var) sums up the partial results, so it
5870 acts identically to reduction(+:var). */
5871 if (code == MINUS_EXPR)
5872 code = PLUS_EXPR;
5873
5874 tree new_vard = new_var;
5875 if (is_simd && omp_is_reference (var))
5876 {
5877 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5878 new_vard = TREE_OPERAND (new_var, 0);
5879 gcc_assert (DECL_P (new_vard));
5880 }
5881 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5882 if (is_simd
5883 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5884 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5885 rvarp = &rvar;
5886 if (is_simd
5887 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5888 ivar, lvar, rvarp,
5889 &rvar2))
5890 {
5891 if (new_vard != new_var)
5892 {
5893 SET_DECL_VALUE_EXPR (new_vard,
5894 build_fold_addr_expr (lvar));
5895 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5896 }
5897
5898 tree ref = build_outer_var_ref (var, ctx);
5899
5900 if (rvarp)
5901 {
5902 if (ctx->for_simd_scan_phase)
5903 break;
5904 gimplify_assign (ivar, ref, &llist[0]);
5905 ref = build_outer_var_ref (var, ctx);
5906 gimplify_assign (ref, rvar, &llist[3]);
5907 break;
5908 }
5909
5910 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5911
5912 if (sctx.is_simt)
5913 {
5914 if (!simt_lane)
5915 simt_lane = create_tmp_var (unsigned_type_node);
5916 x = build_call_expr_internal_loc
5917 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5918 TREE_TYPE (ivar), 2, ivar, simt_lane);
5919 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5920 gimplify_assign (ivar, x, &llist[2]);
5921 }
5922 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5923 ref = build_outer_var_ref (var, ctx);
5924 gimplify_assign (ref, x, &llist[1]);
5925
5926 }
5927 else
5928 {
5929 if (omp_is_reference (var) && is_simd)
5930 handle_simd_reference (clause_loc, new_vard, ilist);
5931 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5932 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5933 break;
5934 gimplify_assign (new_var, x, ilist);
5935 if (is_simd)
5936 {
5937 tree ref = build_outer_var_ref (var, ctx);
5938
5939 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5940 ref = build_outer_var_ref (var, ctx);
5941 gimplify_assign (ref, x, dlist);
5942 }
5943 }
5944 }
5945 break;
5946
5947 default:
5948 gcc_unreachable ();
5949 }
5950 }
5951 }
5952 if (tskred_avar)
5953 {
5954 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
5955 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5956 }
5957
5958 if (known_eq (sctx.max_vf, 1U))
5959 {
5960 sctx.is_simt = false;
5961 if (ctx->lastprivate_conditional_map)
5962 {
5963 if (gimple_omp_for_combined_into_p (ctx->stmt))
5964 {
5965 /* Signal to lower_omp_1 that it should use parent context. */
5966 ctx->combined_into_simd_safelen1 = true;
5967 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5968 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5969 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5970 {
5971 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5972 omp_context *outer = ctx->outer;
5973 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
5974 outer = outer->outer;
5975 tree *v = ctx->lastprivate_conditional_map->get (o);
5976 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
5977 tree *pv = outer->lastprivate_conditional_map->get (po);
5978 *v = *pv;
5979 }
5980 }
5981 else
5982 {
5983 /* When not vectorized, treat lastprivate(conditional:) like
5984 normal lastprivate, as there will be just one simd lane
5985 writing the privatized variable. */
5986 delete ctx->lastprivate_conditional_map;
5987 ctx->lastprivate_conditional_map = NULL;
5988 }
5989 }
5990 }
5991
5992 if (nonconst_simd_if)
5993 {
5994 if (sctx.lane == NULL_TREE)
5995 {
5996 sctx.idx = create_tmp_var (unsigned_type_node);
5997 sctx.lane = create_tmp_var (unsigned_type_node);
5998 }
5999 /* FIXME: For now. */
6000 sctx.is_simt = false;
6001 }
6002
6003 if (sctx.lane || sctx.is_simt)
6004 {
6005 uid = create_tmp_var (ptr_type_node, "simduid");
6006 /* Don't want uninit warnings on simduid, it is always uninitialized,
6007 but we use it not for the value, but for the DECL_UID only. */
6008 TREE_NO_WARNING (uid) = 1;
6009 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6010 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6011 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6012 gimple_omp_for_set_clauses (ctx->stmt, c);
6013 }
6014 /* Emit calls denoting privatized variables and initializing a pointer to
6015 structure that holds private variables as fields after ompdevlow pass. */
6016 if (sctx.is_simt)
6017 {
6018 sctx.simt_eargs[0] = uid;
6019 gimple *g
6020 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6021 gimple_call_set_lhs (g, uid);
6022 gimple_seq_add_stmt (ilist, g);
6023 sctx.simt_eargs.release ();
6024
6025 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6026 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6027 gimple_call_set_lhs (g, simtrec);
6028 gimple_seq_add_stmt (ilist, g);
6029 }
6030 if (sctx.lane)
6031 {
6032 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6033 2 + (nonconst_simd_if != NULL),
6034 uid, integer_zero_node,
6035 nonconst_simd_if);
6036 gimple_call_set_lhs (g, sctx.lane);
6037 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6038 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6039 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6040 build_int_cst (unsigned_type_node, 0));
6041 gimple_seq_add_stmt (ilist, g);
6042 if (sctx.lastlane)
6043 {
6044 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6045 2, uid, sctx.lane);
6046 gimple_call_set_lhs (g, sctx.lastlane);
6047 gimple_seq_add_stmt (dlist, g);
6048 gimple_seq_add_seq (dlist, llist[3]);
6049 }
6050 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6051 if (llist[2])
6052 {
6053 tree simt_vf = create_tmp_var (unsigned_type_node);
6054 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6055 gimple_call_set_lhs (g, simt_vf);
6056 gimple_seq_add_stmt (dlist, g);
6057
6058 tree t = build_int_cst (unsigned_type_node, 1);
6059 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6060 gimple_seq_add_stmt (dlist, g);
6061
6062 t = build_int_cst (unsigned_type_node, 0);
6063 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6064 gimple_seq_add_stmt (dlist, g);
6065
6066 tree body = create_artificial_label (UNKNOWN_LOCATION);
6067 tree header = create_artificial_label (UNKNOWN_LOCATION);
6068 tree end = create_artificial_label (UNKNOWN_LOCATION);
6069 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6070 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6071
6072 gimple_seq_add_seq (dlist, llist[2]);
6073
6074 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6075 gimple_seq_add_stmt (dlist, g);
6076
6077 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6078 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6079 gimple_seq_add_stmt (dlist, g);
6080
6081 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6082 }
6083 for (int i = 0; i < 2; i++)
6084 if (llist[i])
6085 {
6086 tree vf = create_tmp_var (unsigned_type_node);
6087 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6088 gimple_call_set_lhs (g, vf);
6089 gimple_seq *seq = i == 0 ? ilist : dlist;
6090 gimple_seq_add_stmt (seq, g);
6091 tree t = build_int_cst (unsigned_type_node, 0);
6092 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6093 gimple_seq_add_stmt (seq, g);
6094 tree body = create_artificial_label (UNKNOWN_LOCATION);
6095 tree header = create_artificial_label (UNKNOWN_LOCATION);
6096 tree end = create_artificial_label (UNKNOWN_LOCATION);
6097 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6098 gimple_seq_add_stmt (seq, gimple_build_label (body));
6099 gimple_seq_add_seq (seq, llist[i]);
6100 t = build_int_cst (unsigned_type_node, 1);
6101 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6102 gimple_seq_add_stmt (seq, g);
6103 gimple_seq_add_stmt (seq, gimple_build_label (header));
6104 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6105 gimple_seq_add_stmt (seq, g);
6106 gimple_seq_add_stmt (seq, gimple_build_label (end));
6107 }
6108 }
6109 if (sctx.is_simt)
6110 {
6111 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6112 gimple *g
6113 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6114 gimple_seq_add_stmt (dlist, g);
6115 }
6116
6117 /* The copyin sequence is not to be executed by the main thread, since
6118 that would result in self-copies. Perhaps not visible to scalars,
6119 but it certainly is to C++ operator=. */
6120 if (copyin_seq)
6121 {
6122 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6123 0);
6124 x = build2 (NE_EXPR, boolean_type_node, x,
6125 build_int_cst (TREE_TYPE (x), 0));
6126 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6127 gimplify_and_add (x, ilist);
6128 }
6129
6130 /* If any copyin variable is passed by reference, we must ensure the
6131 master thread doesn't modify it before it is copied over in all
6132 threads. Similarly for variables in both firstprivate and
6133 lastprivate clauses we need to ensure the lastprivate copying
6134 happens after firstprivate copying in all threads. And similarly
6135 for UDRs if initializer expression refers to omp_orig. */
6136 if (copyin_by_ref || lastprivate_firstprivate
6137 || (reduction_omp_orig_ref
6138 && !ctx->scan_inclusive
6139 && !ctx->scan_exclusive))
6140 {
6141 /* Don't add any barrier for #pragma omp simd or
6142 #pragma omp distribute. */
6143 if (!is_task_ctx (ctx)
6144 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6145 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6146 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6147 }
6148
6149 /* If max_vf is non-zero, then we can use only a vectorization factor
6150 up to the max_vf we chose. So stick it into the safelen clause. */
6151 if (maybe_ne (sctx.max_vf, 0U))
6152 {
6153 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6154 OMP_CLAUSE_SAFELEN);
6155 poly_uint64 safe_len;
6156 if (c == NULL_TREE
6157 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6158 && maybe_gt (safe_len, sctx.max_vf)))
6159 {
6160 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6161 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6162 sctx.max_vf);
6163 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6164 gimple_omp_for_set_clauses (ctx->stmt, c);
6165 }
6166 }
6167 }
6168
6169 /* Create temporary variables for lastprivate(conditional:) implementation
6170 in context CTX with CLAUSES. */
6171
6172 static void
6173 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6174 {
6175 tree iter_type = NULL_TREE;
6176 tree cond_ptr = NULL_TREE;
6177 tree iter_var = NULL_TREE;
6178 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6179 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6180 tree next = *clauses;
6181 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6182 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6183 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6184 {
6185 if (is_simd)
6186 {
6187 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6188 gcc_assert (cc);
6189 if (iter_type == NULL_TREE)
6190 {
6191 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6192 iter_var = create_tmp_var_raw (iter_type);
6193 DECL_CONTEXT (iter_var) = current_function_decl;
6194 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6195 DECL_CHAIN (iter_var) = ctx->block_vars;
6196 ctx->block_vars = iter_var;
6197 tree c3
6198 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6199 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6200 OMP_CLAUSE_DECL (c3) = iter_var;
6201 OMP_CLAUSE_CHAIN (c3) = *clauses;
6202 *clauses = c3;
6203 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6204 }
6205 next = OMP_CLAUSE_CHAIN (cc);
6206 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6207 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6208 ctx->lastprivate_conditional_map->put (o, v);
6209 continue;
6210 }
6211 if (iter_type == NULL)
6212 {
6213 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6214 {
6215 struct omp_for_data fd;
6216 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6217 NULL);
6218 iter_type = unsigned_type_for (fd.iter_type);
6219 }
6220 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6221 iter_type = unsigned_type_node;
6222 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6223 if (c2)
6224 {
6225 cond_ptr
6226 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6227 OMP_CLAUSE_DECL (c2) = cond_ptr;
6228 }
6229 else
6230 {
6231 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6232 DECL_CONTEXT (cond_ptr) = current_function_decl;
6233 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6234 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6235 ctx->block_vars = cond_ptr;
6236 c2 = build_omp_clause (UNKNOWN_LOCATION,
6237 OMP_CLAUSE__CONDTEMP_);
6238 OMP_CLAUSE_DECL (c2) = cond_ptr;
6239 OMP_CLAUSE_CHAIN (c2) = *clauses;
6240 *clauses = c2;
6241 }
6242 iter_var = create_tmp_var_raw (iter_type);
6243 DECL_CONTEXT (iter_var) = current_function_decl;
6244 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6245 DECL_CHAIN (iter_var) = ctx->block_vars;
6246 ctx->block_vars = iter_var;
6247 tree c3
6248 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6249 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6250 OMP_CLAUSE_DECL (c3) = iter_var;
6251 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6252 OMP_CLAUSE_CHAIN (c2) = c3;
6253 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6254 }
6255 tree v = create_tmp_var_raw (iter_type);
6256 DECL_CONTEXT (v) = current_function_decl;
6257 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6258 DECL_CHAIN (v) = ctx->block_vars;
6259 ctx->block_vars = v;
6260 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6261 ctx->lastprivate_conditional_map->put (o, v);
6262 }
6263 }
6264
6265
6266 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6267 both parallel and workshare constructs. PREDICATE may be NULL if it's
6268 always true. BODY_P is the sequence to insert early initialization
6269 if needed, STMT_LIST is where the non-conditional lastprivate handling
6270 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6271 section. */
6272
6273 static void
6274 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6275 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6276 omp_context *ctx)
6277 {
6278 tree x, c, label = NULL, orig_clauses = clauses;
6279 bool par_clauses = false;
6280 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6281 unsigned HOST_WIDE_INT conditional_off = 0;
6282 gimple_seq post_stmt_list = NULL;
6283
6284 /* Early exit if there are no lastprivate or linear clauses. */
6285 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6286 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6287 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6288 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6289 break;
6290 if (clauses == NULL)
6291 {
6292 /* If this was a workshare clause, see if it had been combined
6293 with its parallel. In that case, look for the clauses on the
6294 parallel statement itself. */
6295 if (is_parallel_ctx (ctx))
6296 return;
6297
6298 ctx = ctx->outer;
6299 if (ctx == NULL || !is_parallel_ctx (ctx))
6300 return;
6301
6302 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6303 OMP_CLAUSE_LASTPRIVATE);
6304 if (clauses == NULL)
6305 return;
6306 par_clauses = true;
6307 }
6308
6309 bool maybe_simt = false;
6310 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6311 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6312 {
6313 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6314 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6315 if (simduid)
6316 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6317 }
6318
6319 if (predicate)
6320 {
6321 gcond *stmt;
6322 tree label_true, arm1, arm2;
6323 enum tree_code pred_code = TREE_CODE (predicate);
6324
6325 label = create_artificial_label (UNKNOWN_LOCATION);
6326 label_true = create_artificial_label (UNKNOWN_LOCATION);
6327 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6328 {
6329 arm1 = TREE_OPERAND (predicate, 0);
6330 arm2 = TREE_OPERAND (predicate, 1);
6331 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6332 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6333 }
6334 else
6335 {
6336 arm1 = predicate;
6337 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6338 arm2 = boolean_false_node;
6339 pred_code = NE_EXPR;
6340 }
6341 if (maybe_simt)
6342 {
6343 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6344 c = fold_convert (integer_type_node, c);
6345 simtcond = create_tmp_var (integer_type_node);
6346 gimplify_assign (simtcond, c, stmt_list);
6347 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6348 1, simtcond);
6349 c = create_tmp_var (integer_type_node);
6350 gimple_call_set_lhs (g, c);
6351 gimple_seq_add_stmt (stmt_list, g);
6352 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6353 label_true, label);
6354 }
6355 else
6356 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6357 gimple_seq_add_stmt (stmt_list, stmt);
6358 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6359 }
6360
6361 tree cond_ptr = NULL_TREE;
6362 for (c = clauses; c ;)
6363 {
6364 tree var, new_var;
6365 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6366 gimple_seq *this_stmt_list = stmt_list;
6367 tree lab2 = NULL_TREE;
6368
6369 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6370 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6371 && ctx->lastprivate_conditional_map
6372 && !ctx->combined_into_simd_safelen1)
6373 {
6374 gcc_assert (body_p);
6375 if (simduid)
6376 goto next;
6377 if (cond_ptr == NULL_TREE)
6378 {
6379 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6380 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6381 }
6382 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6383 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6384 tree v = *ctx->lastprivate_conditional_map->get (o);
6385 gimplify_assign (v, build_zero_cst (type), body_p);
6386 this_stmt_list = cstmt_list;
6387 tree mem;
6388 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6389 {
6390 mem = build2 (MEM_REF, type, cond_ptr,
6391 build_int_cst (TREE_TYPE (cond_ptr),
6392 conditional_off));
6393 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6394 }
6395 else
6396 mem = build4 (ARRAY_REF, type, cond_ptr,
6397 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6398 tree mem2 = copy_node (mem);
6399 gimple_seq seq = NULL;
6400 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6401 gimple_seq_add_seq (this_stmt_list, seq);
6402 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6403 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6404 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6405 gimple_seq_add_stmt (this_stmt_list, g);
6406 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6407 gimplify_assign (mem2, v, this_stmt_list);
6408 }
6409 else if (predicate
6410 && ctx->combined_into_simd_safelen1
6411 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6412 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6413 && ctx->lastprivate_conditional_map)
6414 this_stmt_list = &post_stmt_list;
6415
6416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6417 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6418 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6419 {
6420 var = OMP_CLAUSE_DECL (c);
6421 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6422 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6423 && is_taskloop_ctx (ctx))
6424 {
6425 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6426 new_var = lookup_decl (var, ctx->outer);
6427 }
6428 else
6429 {
6430 new_var = lookup_decl (var, ctx);
6431 /* Avoid uninitialized warnings for lastprivate and
6432 for linear iterators. */
6433 if (predicate
6434 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6435 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6436 TREE_NO_WARNING (new_var) = 1;
6437 }
6438
6439 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6440 {
6441 tree val = DECL_VALUE_EXPR (new_var);
6442 if (TREE_CODE (val) == ARRAY_REF
6443 && VAR_P (TREE_OPERAND (val, 0))
6444 && lookup_attribute ("omp simd array",
6445 DECL_ATTRIBUTES (TREE_OPERAND (val,
6446 0))))
6447 {
6448 if (lastlane == NULL)
6449 {
6450 lastlane = create_tmp_var (unsigned_type_node);
6451 gcall *g
6452 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6453 2, simduid,
6454 TREE_OPERAND (val, 1));
6455 gimple_call_set_lhs (g, lastlane);
6456 gimple_seq_add_stmt (this_stmt_list, g);
6457 }
6458 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6459 TREE_OPERAND (val, 0), lastlane,
6460 NULL_TREE, NULL_TREE);
6461 TREE_THIS_NOTRAP (new_var) = 1;
6462 }
6463 }
6464 else if (maybe_simt)
6465 {
6466 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6467 ? DECL_VALUE_EXPR (new_var)
6468 : new_var);
6469 if (simtlast == NULL)
6470 {
6471 simtlast = create_tmp_var (unsigned_type_node);
6472 gcall *g = gimple_build_call_internal
6473 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6474 gimple_call_set_lhs (g, simtlast);
6475 gimple_seq_add_stmt (this_stmt_list, g);
6476 }
6477 x = build_call_expr_internal_loc
6478 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6479 TREE_TYPE (val), 2, val, simtlast);
6480 new_var = unshare_expr (new_var);
6481 gimplify_assign (new_var, x, this_stmt_list);
6482 new_var = unshare_expr (new_var);
6483 }
6484
6485 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6486 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6487 {
6488 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6489 gimple_seq_add_seq (this_stmt_list,
6490 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6491 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6492 }
6493 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6494 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6495 {
6496 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6497 gimple_seq_add_seq (this_stmt_list,
6498 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6499 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6500 }
6501
6502 x = NULL_TREE;
6503 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6504 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6505 && is_taskloop_ctx (ctx))
6506 {
6507 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6508 ctx->outer->outer);
6509 if (is_global_var (ovar))
6510 x = ovar;
6511 }
6512 if (!x)
6513 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6514 if (omp_is_reference (var))
6515 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6516 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6517 gimplify_and_add (x, this_stmt_list);
6518
6519 if (lab2)
6520 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6521 }
6522
6523 next:
6524 c = OMP_CLAUSE_CHAIN (c);
6525 if (c == NULL && !par_clauses)
6526 {
6527 /* If this was a workshare clause, see if it had been combined
6528 with its parallel. In that case, continue looking for the
6529 clauses also on the parallel statement itself. */
6530 if (is_parallel_ctx (ctx))
6531 break;
6532
6533 ctx = ctx->outer;
6534 if (ctx == NULL || !is_parallel_ctx (ctx))
6535 break;
6536
6537 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6538 OMP_CLAUSE_LASTPRIVATE);
6539 par_clauses = true;
6540 }
6541 }
6542
6543 if (label)
6544 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6545 gimple_seq_add_seq (stmt_list, post_stmt_list);
6546 }
6547
6548 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6549 (which might be a placeholder). INNER is true if this is an inner
6550 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6551 join markers. Generate the before-loop forking sequence in
6552 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6553 general form of these sequences is
6554
6555 GOACC_REDUCTION_SETUP
6556 GOACC_FORK
6557 GOACC_REDUCTION_INIT
6558 ...
6559 GOACC_REDUCTION_FINI
6560 GOACC_JOIN
6561 GOACC_REDUCTION_TEARDOWN. */
6562
6563 static void
6564 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6565 gcall *fork, gcall *join, gimple_seq *fork_seq,
6566 gimple_seq *join_seq, omp_context *ctx)
6567 {
6568 gimple_seq before_fork = NULL;
6569 gimple_seq after_fork = NULL;
6570 gimple_seq before_join = NULL;
6571 gimple_seq after_join = NULL;
6572 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6573 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6574 unsigned offset = 0;
6575
6576 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6578 {
6579 tree orig = OMP_CLAUSE_DECL (c);
6580 tree var = maybe_lookup_decl (orig, ctx);
6581 tree ref_to_res = NULL_TREE;
6582 tree incoming, outgoing, v1, v2, v3;
6583 bool is_private = false;
6584
6585 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6586 if (rcode == MINUS_EXPR)
6587 rcode = PLUS_EXPR;
6588 else if (rcode == TRUTH_ANDIF_EXPR)
6589 rcode = BIT_AND_EXPR;
6590 else if (rcode == TRUTH_ORIF_EXPR)
6591 rcode = BIT_IOR_EXPR;
6592 tree op = build_int_cst (unsigned_type_node, rcode);
6593
6594 if (!var)
6595 var = orig;
6596
6597 incoming = outgoing = var;
6598
6599 if (!inner)
6600 {
6601 /* See if an outer construct also reduces this variable. */
6602 omp_context *outer = ctx;
6603
6604 while (omp_context *probe = outer->outer)
6605 {
6606 enum gimple_code type = gimple_code (probe->stmt);
6607 tree cls;
6608
6609 switch (type)
6610 {
6611 case GIMPLE_OMP_FOR:
6612 cls = gimple_omp_for_clauses (probe->stmt);
6613 break;
6614
6615 case GIMPLE_OMP_TARGET:
6616 if (gimple_omp_target_kind (probe->stmt)
6617 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6618 goto do_lookup;
6619
6620 cls = gimple_omp_target_clauses (probe->stmt);
6621 break;
6622
6623 default:
6624 goto do_lookup;
6625 }
6626
6627 outer = probe;
6628 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6629 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6630 && orig == OMP_CLAUSE_DECL (cls))
6631 {
6632 incoming = outgoing = lookup_decl (orig, probe);
6633 goto has_outer_reduction;
6634 }
6635 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6636 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6637 && orig == OMP_CLAUSE_DECL (cls))
6638 {
6639 is_private = true;
6640 goto do_lookup;
6641 }
6642 }
6643
6644 do_lookup:
6645 /* This is the outermost construct with this reduction,
6646 see if there's a mapping for it. */
6647 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6648 && maybe_lookup_field (orig, outer) && !is_private)
6649 {
6650 ref_to_res = build_receiver_ref (orig, false, outer);
6651 if (omp_is_reference (orig))
6652 ref_to_res = build_simple_mem_ref (ref_to_res);
6653
6654 tree type = TREE_TYPE (var);
6655 if (POINTER_TYPE_P (type))
6656 type = TREE_TYPE (type);
6657
6658 outgoing = var;
6659 incoming = omp_reduction_init_op (loc, rcode, type);
6660 }
6661 else
6662 {
6663 /* Try to look at enclosing contexts for reduction var,
6664 use original if no mapping found. */
6665 tree t = NULL_TREE;
6666 omp_context *c = ctx->outer;
6667 while (c && !t)
6668 {
6669 t = maybe_lookup_decl (orig, c);
6670 c = c->outer;
6671 }
6672 incoming = outgoing = (t ? t : orig);
6673 }
6674
6675 has_outer_reduction:;
6676 }
6677
6678 if (!ref_to_res)
6679 ref_to_res = integer_zero_node;
6680
6681 if (omp_is_reference (orig))
6682 {
6683 tree type = TREE_TYPE (var);
6684 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6685
6686 if (!inner)
6687 {
6688 tree x = create_tmp_var (TREE_TYPE (type), id);
6689 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6690 }
6691
6692 v1 = create_tmp_var (type, id);
6693 v2 = create_tmp_var (type, id);
6694 v3 = create_tmp_var (type, id);
6695
6696 gimplify_assign (v1, var, fork_seq);
6697 gimplify_assign (v2, var, fork_seq);
6698 gimplify_assign (v3, var, fork_seq);
6699
6700 var = build_simple_mem_ref (var);
6701 v1 = build_simple_mem_ref (v1);
6702 v2 = build_simple_mem_ref (v2);
6703 v3 = build_simple_mem_ref (v3);
6704 outgoing = build_simple_mem_ref (outgoing);
6705
6706 if (!TREE_CONSTANT (incoming))
6707 incoming = build_simple_mem_ref (incoming);
6708 }
6709 else
6710 v1 = v2 = v3 = var;
6711
6712 /* Determine position in reduction buffer, which may be used
6713 by target. The parser has ensured that this is not a
6714 variable-sized type. */
6715 fixed_size_mode mode
6716 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6717 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6718 offset = (offset + align - 1) & ~(align - 1);
6719 tree off = build_int_cst (sizetype, offset);
6720 offset += GET_MODE_SIZE (mode);
6721
6722 if (!init_code)
6723 {
6724 init_code = build_int_cst (integer_type_node,
6725 IFN_GOACC_REDUCTION_INIT);
6726 fini_code = build_int_cst (integer_type_node,
6727 IFN_GOACC_REDUCTION_FINI);
6728 setup_code = build_int_cst (integer_type_node,
6729 IFN_GOACC_REDUCTION_SETUP);
6730 teardown_code = build_int_cst (integer_type_node,
6731 IFN_GOACC_REDUCTION_TEARDOWN);
6732 }
6733
6734 tree setup_call
6735 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6736 TREE_TYPE (var), 6, setup_code,
6737 unshare_expr (ref_to_res),
6738 incoming, level, op, off);
6739 tree init_call
6740 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6741 TREE_TYPE (var), 6, init_code,
6742 unshare_expr (ref_to_res),
6743 v1, level, op, off);
6744 tree fini_call
6745 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6746 TREE_TYPE (var), 6, fini_code,
6747 unshare_expr (ref_to_res),
6748 v2, level, op, off);
6749 tree teardown_call
6750 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6751 TREE_TYPE (var), 6, teardown_code,
6752 ref_to_res, v3, level, op, off);
6753
6754 gimplify_assign (v1, setup_call, &before_fork);
6755 gimplify_assign (v2, init_call, &after_fork);
6756 gimplify_assign (v3, fini_call, &before_join);
6757 gimplify_assign (outgoing, teardown_call, &after_join);
6758 }
6759
6760 /* Now stitch things together. */
6761 gimple_seq_add_seq (fork_seq, before_fork);
6762 if (fork)
6763 gimple_seq_add_stmt (fork_seq, fork);
6764 gimple_seq_add_seq (fork_seq, after_fork);
6765
6766 gimple_seq_add_seq (join_seq, before_join);
6767 if (join)
6768 gimple_seq_add_stmt (join_seq, join);
6769 gimple_seq_add_seq (join_seq, after_join);
6770 }
6771
6772 /* Generate code to implement the REDUCTION clauses, append it
6773 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6774 that should be emitted also inside of the critical section,
6775 in that case clear *CLIST afterwards, otherwise leave it as is
6776 and let the caller emit it itself. */
6777
6778 static void
6779 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6780 gimple_seq *clist, omp_context *ctx)
6781 {
6782 gimple_seq sub_seq = NULL;
6783 gimple *stmt;
6784 tree x, c;
6785 int count = 0;
6786
6787 /* OpenACC loop reductions are handled elsewhere. */
6788 if (is_gimple_omp_oacc (ctx->stmt))
6789 return;
6790
6791 /* SIMD reductions are handled in lower_rec_input_clauses. */
6792 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6793 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6794 return;
6795
6796 /* inscan reductions are handled elsewhere. */
6797 if (ctx->scan_inclusive || ctx->scan_exclusive)
6798 return;
6799
6800 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6801 update in that case, otherwise use a lock. */
6802 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6804 && !OMP_CLAUSE_REDUCTION_TASK (c))
6805 {
6806 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6807 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6808 {
6809 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6810 count = -1;
6811 break;
6812 }
6813 count++;
6814 }
6815
6816 if (count == 0)
6817 return;
6818
6819 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6820 {
6821 tree var, ref, new_var, orig_var;
6822 enum tree_code code;
6823 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6824
6825 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6826 || OMP_CLAUSE_REDUCTION_TASK (c))
6827 continue;
6828
6829 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6830 orig_var = var = OMP_CLAUSE_DECL (c);
6831 if (TREE_CODE (var) == MEM_REF)
6832 {
6833 var = TREE_OPERAND (var, 0);
6834 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6835 var = TREE_OPERAND (var, 0);
6836 if (TREE_CODE (var) == ADDR_EXPR)
6837 var = TREE_OPERAND (var, 0);
6838 else
6839 {
6840 /* If this is a pointer or referenced based array
6841 section, the var could be private in the outer
6842 context e.g. on orphaned loop construct. Pretend this
6843 is private variable's outer reference. */
6844 ccode = OMP_CLAUSE_PRIVATE;
6845 if (TREE_CODE (var) == INDIRECT_REF)
6846 var = TREE_OPERAND (var, 0);
6847 }
6848 orig_var = var;
6849 if (is_variable_sized (var))
6850 {
6851 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6852 var = DECL_VALUE_EXPR (var);
6853 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6854 var = TREE_OPERAND (var, 0);
6855 gcc_assert (DECL_P (var));
6856 }
6857 }
6858 new_var = lookup_decl (var, ctx);
6859 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6860 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6861 ref = build_outer_var_ref (var, ctx, ccode);
6862 code = OMP_CLAUSE_REDUCTION_CODE (c);
6863
6864 /* reduction(-:var) sums up the partial results, so it acts
6865 identically to reduction(+:var). */
6866 if (code == MINUS_EXPR)
6867 code = PLUS_EXPR;
6868
6869 if (count == 1)
6870 {
6871 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6872
6873 addr = save_expr (addr);
6874 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6875 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6876 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6877 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6878 gimplify_and_add (x, stmt_seqp);
6879 return;
6880 }
6881 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6882 {
6883 tree d = OMP_CLAUSE_DECL (c);
6884 tree type = TREE_TYPE (d);
6885 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6886 tree i = create_tmp_var (TREE_TYPE (v));
6887 tree ptype = build_pointer_type (TREE_TYPE (type));
6888 tree bias = TREE_OPERAND (d, 1);
6889 d = TREE_OPERAND (d, 0);
6890 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6891 {
6892 tree b = TREE_OPERAND (d, 1);
6893 b = maybe_lookup_decl (b, ctx);
6894 if (b == NULL)
6895 {
6896 b = TREE_OPERAND (d, 1);
6897 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6898 }
6899 if (integer_zerop (bias))
6900 bias = b;
6901 else
6902 {
6903 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6904 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6905 TREE_TYPE (b), b, bias);
6906 }
6907 d = TREE_OPERAND (d, 0);
6908 }
6909 /* For ref build_outer_var_ref already performs this, so
6910 only new_var needs a dereference. */
6911 if (TREE_CODE (d) == INDIRECT_REF)
6912 {
6913 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6914 gcc_assert (omp_is_reference (var) && var == orig_var);
6915 }
6916 else if (TREE_CODE (d) == ADDR_EXPR)
6917 {
6918 if (orig_var == var)
6919 {
6920 new_var = build_fold_addr_expr (new_var);
6921 ref = build_fold_addr_expr (ref);
6922 }
6923 }
6924 else
6925 {
6926 gcc_assert (orig_var == var);
6927 if (omp_is_reference (var))
6928 ref = build_fold_addr_expr (ref);
6929 }
6930 if (DECL_P (v))
6931 {
6932 tree t = maybe_lookup_decl (v, ctx);
6933 if (t)
6934 v = t;
6935 else
6936 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6937 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6938 }
6939 if (!integer_zerop (bias))
6940 {
6941 bias = fold_convert_loc (clause_loc, sizetype, bias);
6942 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6943 TREE_TYPE (new_var), new_var,
6944 unshare_expr (bias));
6945 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6946 TREE_TYPE (ref), ref, bias);
6947 }
6948 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6949 ref = fold_convert_loc (clause_loc, ptype, ref);
6950 tree m = create_tmp_var (ptype);
6951 gimplify_assign (m, new_var, stmt_seqp);
6952 new_var = m;
6953 m = create_tmp_var (ptype);
6954 gimplify_assign (m, ref, stmt_seqp);
6955 ref = m;
6956 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6957 tree body = create_artificial_label (UNKNOWN_LOCATION);
6958 tree end = create_artificial_label (UNKNOWN_LOCATION);
6959 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6960 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6961 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6962 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6963 {
6964 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6965 tree decl_placeholder
6966 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6967 SET_DECL_VALUE_EXPR (placeholder, out);
6968 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6969 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6970 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6971 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6972 gimple_seq_add_seq (&sub_seq,
6973 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6974 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6975 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6976 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6977 }
6978 else
6979 {
6980 x = build2 (code, TREE_TYPE (out), out, priv);
6981 out = unshare_expr (out);
6982 gimplify_assign (out, x, &sub_seq);
6983 }
6984 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6985 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6986 gimple_seq_add_stmt (&sub_seq, g);
6987 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6988 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6989 gimple_seq_add_stmt (&sub_seq, g);
6990 g = gimple_build_assign (i, PLUS_EXPR, i,
6991 build_int_cst (TREE_TYPE (i), 1));
6992 gimple_seq_add_stmt (&sub_seq, g);
6993 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6994 gimple_seq_add_stmt (&sub_seq, g);
6995 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6996 }
6997 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6998 {
6999 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7000
7001 if (omp_is_reference (var)
7002 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7003 TREE_TYPE (ref)))
7004 ref = build_fold_addr_expr_loc (clause_loc, ref);
7005 SET_DECL_VALUE_EXPR (placeholder, ref);
7006 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7007 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7008 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7009 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7010 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7011 }
7012 else
7013 {
7014 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7015 ref = build_outer_var_ref (var, ctx);
7016 gimplify_assign (ref, x, &sub_seq);
7017 }
7018 }
7019
7020 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7021 0);
7022 gimple_seq_add_stmt (stmt_seqp, stmt);
7023
7024 gimple_seq_add_seq (stmt_seqp, sub_seq);
7025
7026 if (clist)
7027 {
7028 gimple_seq_add_seq (stmt_seqp, *clist);
7029 *clist = NULL;
7030 }
7031
7032 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7033 0);
7034 gimple_seq_add_stmt (stmt_seqp, stmt);
7035 }
7036
7037
7038 /* Generate code to implement the COPYPRIVATE clauses. */
7039
7040 static void
7041 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7042 omp_context *ctx)
7043 {
7044 tree c;
7045
7046 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7047 {
7048 tree var, new_var, ref, x;
7049 bool by_ref;
7050 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7051
7052 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7053 continue;
7054
7055 var = OMP_CLAUSE_DECL (c);
7056 by_ref = use_pointer_for_field (var, NULL);
7057
7058 ref = build_sender_ref (var, ctx);
7059 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7060 if (by_ref)
7061 {
7062 x = build_fold_addr_expr_loc (clause_loc, new_var);
7063 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7064 }
7065 gimplify_assign (ref, x, slist);
7066
7067 ref = build_receiver_ref (var, false, ctx);
7068 if (by_ref)
7069 {
7070 ref = fold_convert_loc (clause_loc,
7071 build_pointer_type (TREE_TYPE (new_var)),
7072 ref);
7073 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7074 }
7075 if (omp_is_reference (var))
7076 {
7077 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7078 ref = build_simple_mem_ref_loc (clause_loc, ref);
7079 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7080 }
7081 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7082 gimplify_and_add (x, rlist);
7083 }
7084 }
7085
7086
7087 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7088 and REDUCTION from the sender (aka parent) side. */
7089
7090 static void
7091 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7092 omp_context *ctx)
7093 {
7094 tree c, t;
7095 int ignored_looptemp = 0;
7096 bool is_taskloop = false;
7097
7098 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7099 by GOMP_taskloop. */
7100 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7101 {
7102 ignored_looptemp = 2;
7103 is_taskloop = true;
7104 }
7105
7106 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7107 {
7108 tree val, ref, x, var;
7109 bool by_ref, do_in = false, do_out = false;
7110 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7111
7112 switch (OMP_CLAUSE_CODE (c))
7113 {
7114 case OMP_CLAUSE_PRIVATE:
7115 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7116 break;
7117 continue;
7118 case OMP_CLAUSE_FIRSTPRIVATE:
7119 case OMP_CLAUSE_COPYIN:
7120 case OMP_CLAUSE_LASTPRIVATE:
7121 case OMP_CLAUSE_IN_REDUCTION:
7122 case OMP_CLAUSE__REDUCTEMP_:
7123 break;
7124 case OMP_CLAUSE_REDUCTION:
7125 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7126 continue;
7127 break;
7128 case OMP_CLAUSE_SHARED:
7129 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7130 break;
7131 continue;
7132 case OMP_CLAUSE__LOOPTEMP_:
7133 if (ignored_looptemp)
7134 {
7135 ignored_looptemp--;
7136 continue;
7137 }
7138 break;
7139 default:
7140 continue;
7141 }
7142
7143 val = OMP_CLAUSE_DECL (c);
7144 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7145 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7146 && TREE_CODE (val) == MEM_REF)
7147 {
7148 val = TREE_OPERAND (val, 0);
7149 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7150 val = TREE_OPERAND (val, 0);
7151 if (TREE_CODE (val) == INDIRECT_REF
7152 || TREE_CODE (val) == ADDR_EXPR)
7153 val = TREE_OPERAND (val, 0);
7154 if (is_variable_sized (val))
7155 continue;
7156 }
7157
7158 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7159 outer taskloop region. */
7160 omp_context *ctx_for_o = ctx;
7161 if (is_taskloop
7162 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7163 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7164 ctx_for_o = ctx->outer;
7165
7166 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7167
7168 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7169 && is_global_var (var)
7170 && (val == OMP_CLAUSE_DECL (c)
7171 || !is_task_ctx (ctx)
7172 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7173 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7174 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7175 != POINTER_TYPE)))))
7176 continue;
7177
7178 t = omp_member_access_dummy_var (var);
7179 if (t)
7180 {
7181 var = DECL_VALUE_EXPR (var);
7182 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7183 if (o != t)
7184 var = unshare_and_remap (var, t, o);
7185 else
7186 var = unshare_expr (var);
7187 }
7188
7189 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7190 {
7191 /* Handle taskloop firstprivate/lastprivate, where the
7192 lastprivate on GIMPLE_OMP_TASK is represented as
7193 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7194 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7195 x = omp_build_component_ref (ctx->sender_decl, f);
7196 if (use_pointer_for_field (val, ctx))
7197 var = build_fold_addr_expr (var);
7198 gimplify_assign (x, var, ilist);
7199 DECL_ABSTRACT_ORIGIN (f) = NULL;
7200 continue;
7201 }
7202
7203 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7204 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7205 || val == OMP_CLAUSE_DECL (c))
7206 && is_variable_sized (val))
7207 continue;
7208 by_ref = use_pointer_for_field (val, NULL);
7209
7210 switch (OMP_CLAUSE_CODE (c))
7211 {
7212 case OMP_CLAUSE_FIRSTPRIVATE:
7213 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7214 && !by_ref
7215 && is_task_ctx (ctx))
7216 TREE_NO_WARNING (var) = 1;
7217 do_in = true;
7218 break;
7219
7220 case OMP_CLAUSE_PRIVATE:
7221 case OMP_CLAUSE_COPYIN:
7222 case OMP_CLAUSE__LOOPTEMP_:
7223 case OMP_CLAUSE__REDUCTEMP_:
7224 do_in = true;
7225 break;
7226
7227 case OMP_CLAUSE_LASTPRIVATE:
7228 if (by_ref || omp_is_reference (val))
7229 {
7230 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7231 continue;
7232 do_in = true;
7233 }
7234 else
7235 {
7236 do_out = true;
7237 if (lang_hooks.decls.omp_private_outer_ref (val))
7238 do_in = true;
7239 }
7240 break;
7241
7242 case OMP_CLAUSE_REDUCTION:
7243 case OMP_CLAUSE_IN_REDUCTION:
7244 do_in = true;
7245 if (val == OMP_CLAUSE_DECL (c))
7246 {
7247 if (is_task_ctx (ctx))
7248 by_ref = use_pointer_for_field (val, ctx);
7249 else
7250 do_out = !(by_ref || omp_is_reference (val));
7251 }
7252 else
7253 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7254 break;
7255
7256 default:
7257 gcc_unreachable ();
7258 }
7259
7260 if (do_in)
7261 {
7262 ref = build_sender_ref (val, ctx);
7263 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7264 gimplify_assign (ref, x, ilist);
7265 if (is_task_ctx (ctx))
7266 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7267 }
7268
7269 if (do_out)
7270 {
7271 ref = build_sender_ref (val, ctx);
7272 gimplify_assign (var, ref, olist);
7273 }
7274 }
7275 }
7276
7277 /* Generate code to implement SHARED from the sender (aka parent)
7278 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7279 list things that got automatically shared. */
7280
7281 static void
7282 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7283 {
7284 tree var, ovar, nvar, t, f, x, record_type;
7285
7286 if (ctx->record_type == NULL)
7287 return;
7288
7289 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7290 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7291 {
7292 ovar = DECL_ABSTRACT_ORIGIN (f);
7293 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7294 continue;
7295
7296 nvar = maybe_lookup_decl (ovar, ctx);
7297 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7298 continue;
7299
7300 /* If CTX is a nested parallel directive. Find the immediately
7301 enclosing parallel or workshare construct that contains a
7302 mapping for OVAR. */
7303 var = lookup_decl_in_outer_ctx (ovar, ctx);
7304
7305 t = omp_member_access_dummy_var (var);
7306 if (t)
7307 {
7308 var = DECL_VALUE_EXPR (var);
7309 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7310 if (o != t)
7311 var = unshare_and_remap (var, t, o);
7312 else
7313 var = unshare_expr (var);
7314 }
7315
7316 if (use_pointer_for_field (ovar, ctx))
7317 {
7318 x = build_sender_ref (ovar, ctx);
7319 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7320 && TREE_TYPE (f) == TREE_TYPE (ovar))
7321 {
7322 gcc_assert (is_parallel_ctx (ctx)
7323 && DECL_ARTIFICIAL (ovar));
7324 /* _condtemp_ clause. */
7325 var = build_constructor (TREE_TYPE (x), NULL);
7326 }
7327 else
7328 var = build_fold_addr_expr (var);
7329 gimplify_assign (x, var, ilist);
7330 }
7331 else
7332 {
7333 x = build_sender_ref (ovar, ctx);
7334 gimplify_assign (x, var, ilist);
7335
7336 if (!TREE_READONLY (var)
7337 /* We don't need to receive a new reference to a result
7338 or parm decl. In fact we may not store to it as we will
7339 invalidate any pending RSO and generate wrong gimple
7340 during inlining. */
7341 && !((TREE_CODE (var) == RESULT_DECL
7342 || TREE_CODE (var) == PARM_DECL)
7343 && DECL_BY_REFERENCE (var)))
7344 {
7345 x = build_sender_ref (ovar, ctx);
7346 gimplify_assign (var, x, olist);
7347 }
7348 }
7349 }
7350 }
7351
7352 /* Emit an OpenACC head marker call, encapulating the partitioning and
7353 other information that must be processed by the target compiler.
7354 Return the maximum number of dimensions the associated loop might
7355 be partitioned over. */
7356
7357 static unsigned
7358 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7359 gimple_seq *seq, omp_context *ctx)
7360 {
7361 unsigned levels = 0;
7362 unsigned tag = 0;
7363 tree gang_static = NULL_TREE;
7364 auto_vec<tree, 5> args;
7365
7366 args.quick_push (build_int_cst
7367 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7368 args.quick_push (ddvar);
7369 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7370 {
7371 switch (OMP_CLAUSE_CODE (c))
7372 {
7373 case OMP_CLAUSE_GANG:
7374 tag |= OLF_DIM_GANG;
7375 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7376 /* static:* is represented by -1, and we can ignore it, as
7377 scheduling is always static. */
7378 if (gang_static && integer_minus_onep (gang_static))
7379 gang_static = NULL_TREE;
7380 levels++;
7381 break;
7382
7383 case OMP_CLAUSE_WORKER:
7384 tag |= OLF_DIM_WORKER;
7385 levels++;
7386 break;
7387
7388 case OMP_CLAUSE_VECTOR:
7389 tag |= OLF_DIM_VECTOR;
7390 levels++;
7391 break;
7392
7393 case OMP_CLAUSE_SEQ:
7394 tag |= OLF_SEQ;
7395 break;
7396
7397 case OMP_CLAUSE_AUTO:
7398 tag |= OLF_AUTO;
7399 break;
7400
7401 case OMP_CLAUSE_INDEPENDENT:
7402 tag |= OLF_INDEPENDENT;
7403 break;
7404
7405 case OMP_CLAUSE_TILE:
7406 tag |= OLF_TILE;
7407 break;
7408
7409 default:
7410 continue;
7411 }
7412 }
7413
7414 if (gang_static)
7415 {
7416 if (DECL_P (gang_static))
7417 gang_static = build_outer_var_ref (gang_static, ctx);
7418 tag |= OLF_GANG_STATIC;
7419 }
7420
7421 /* In a parallel region, loops are implicitly INDEPENDENT. */
7422 omp_context *tgt = enclosing_target_ctx (ctx);
7423 if (!tgt || is_oacc_parallel (tgt))
7424 tag |= OLF_INDEPENDENT;
7425
7426 if (tag & OLF_TILE)
7427 /* Tiling could use all 3 levels. */
7428 levels = 3;
7429 else
7430 {
7431 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7432 Ensure at least one level, or 2 for possible auto
7433 partitioning */
7434 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7435 << OLF_DIM_BASE) | OLF_SEQ));
7436
7437 if (levels < 1u + maybe_auto)
7438 levels = 1u + maybe_auto;
7439 }
7440
7441 args.quick_push (build_int_cst (integer_type_node, levels));
7442 args.quick_push (build_int_cst (integer_type_node, tag));
7443 if (gang_static)
7444 args.quick_push (gang_static);
7445
7446 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7447 gimple_set_location (call, loc);
7448 gimple_set_lhs (call, ddvar);
7449 gimple_seq_add_stmt (seq, call);
7450
7451 return levels;
7452 }
7453
7454 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7455 partitioning level of the enclosed region. */
7456
7457 static void
7458 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7459 tree tofollow, gimple_seq *seq)
7460 {
7461 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7462 : IFN_UNIQUE_OACC_TAIL_MARK);
7463 tree marker = build_int_cst (integer_type_node, marker_kind);
7464 int nargs = 2 + (tofollow != NULL_TREE);
7465 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7466 marker, ddvar, tofollow);
7467 gimple_set_location (call, loc);
7468 gimple_set_lhs (call, ddvar);
7469 gimple_seq_add_stmt (seq, call);
7470 }
7471
7472 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7473 the loop clauses, from which we extract reductions. Initialize
7474 HEAD and TAIL. */
7475
7476 static void
7477 lower_oacc_head_tail (location_t loc, tree clauses,
7478 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7479 {
7480 bool inner = false;
7481 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7482 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7483
7484 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7485 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7486 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7487
7488 gcc_assert (count);
7489 for (unsigned done = 1; count; count--, done++)
7490 {
7491 gimple_seq fork_seq = NULL;
7492 gimple_seq join_seq = NULL;
7493
7494 tree place = build_int_cst (integer_type_node, -1);
7495 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7496 fork_kind, ddvar, place);
7497 gimple_set_location (fork, loc);
7498 gimple_set_lhs (fork, ddvar);
7499
7500 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7501 join_kind, ddvar, place);
7502 gimple_set_location (join, loc);
7503 gimple_set_lhs (join, ddvar);
7504
7505 /* Mark the beginning of this level sequence. */
7506 if (inner)
7507 lower_oacc_loop_marker (loc, ddvar, true,
7508 build_int_cst (integer_type_node, count),
7509 &fork_seq);
7510 lower_oacc_loop_marker (loc, ddvar, false,
7511 build_int_cst (integer_type_node, done),
7512 &join_seq);
7513
7514 lower_oacc_reductions (loc, clauses, place, inner,
7515 fork, join, &fork_seq, &join_seq, ctx);
7516
7517 /* Append this level to head. */
7518 gimple_seq_add_seq (head, fork_seq);
7519 /* Prepend it to tail. */
7520 gimple_seq_add_seq (&join_seq, *tail);
7521 *tail = join_seq;
7522
7523 inner = true;
7524 }
7525
7526 /* Mark the end of the sequence. */
7527 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7528 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7529 }
7530
7531 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7532 catch handler and return it. This prevents programs from violating the
7533 structured block semantics with throws. */
7534
7535 static gimple_seq
7536 maybe_catch_exception (gimple_seq body)
7537 {
7538 gimple *g;
7539 tree decl;
7540
7541 if (!flag_exceptions)
7542 return body;
7543
7544 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7545 decl = lang_hooks.eh_protect_cleanup_actions ();
7546 else
7547 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7548
7549 g = gimple_build_eh_must_not_throw (decl);
7550 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7551 GIMPLE_TRY_CATCH);
7552
7553 return gimple_seq_alloc_with_stmt (g);
7554 }
7555
7556 \f
7557 /* Routines to lower OMP directives into OMP-GIMPLE. */
7558
7559 /* If ctx is a worksharing context inside of a cancellable parallel
7560 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7561 and conditional branch to parallel's cancel_label to handle
7562 cancellation in the implicit barrier. */
7563
7564 static void
7565 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7566 gimple_seq *body)
7567 {
7568 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7569 if (gimple_omp_return_nowait_p (omp_return))
7570 return;
7571 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7572 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7573 && outer->cancellable)
7574 {
7575 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7576 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7577 tree lhs = create_tmp_var (c_bool_type);
7578 gimple_omp_return_set_lhs (omp_return, lhs);
7579 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7580 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7581 fold_convert (c_bool_type,
7582 boolean_false_node),
7583 outer->cancel_label, fallthru_label);
7584 gimple_seq_add_stmt (body, g);
7585 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7586 }
7587 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7588 return;
7589 }
7590
7591 /* Find the first task_reduction or reduction clause or return NULL
7592 if there are none. */
7593
7594 static inline tree
7595 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7596 enum omp_clause_code ccode)
7597 {
7598 while (1)
7599 {
7600 clauses = omp_find_clause (clauses, ccode);
7601 if (clauses == NULL_TREE)
7602 return NULL_TREE;
7603 if (ccode != OMP_CLAUSE_REDUCTION
7604 || code == OMP_TASKLOOP
7605 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7606 return clauses;
7607 clauses = OMP_CLAUSE_CHAIN (clauses);
7608 }
7609 }
7610
7611 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7612 gimple_seq *, gimple_seq *);
7613
7614 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7615 CTX is the enclosing OMP context for the current statement. */
7616
7617 static void
7618 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7619 {
7620 tree block, control;
7621 gimple_stmt_iterator tgsi;
7622 gomp_sections *stmt;
7623 gimple *t;
7624 gbind *new_stmt, *bind;
7625 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7626
7627 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7628
7629 push_gimplify_context ();
7630
7631 dlist = NULL;
7632 ilist = NULL;
7633
7634 tree rclauses
7635 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7636 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7637 tree rtmp = NULL_TREE;
7638 if (rclauses)
7639 {
7640 tree type = build_pointer_type (pointer_sized_int_node);
7641 tree temp = create_tmp_var (type);
7642 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7643 OMP_CLAUSE_DECL (c) = temp;
7644 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7645 gimple_omp_sections_set_clauses (stmt, c);
7646 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7647 gimple_omp_sections_clauses (stmt),
7648 &ilist, &tred_dlist);
7649 rclauses = c;
7650 rtmp = make_ssa_name (type);
7651 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7652 }
7653
7654 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7655 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7656
7657 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7658 &ilist, &dlist, ctx, NULL);
7659
7660 control = create_tmp_var (unsigned_type_node, ".section");
7661 gimple_omp_sections_set_control (stmt, control);
7662
7663 new_body = gimple_omp_body (stmt);
7664 gimple_omp_set_body (stmt, NULL);
7665 tgsi = gsi_start (new_body);
7666 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7667 {
7668 omp_context *sctx;
7669 gimple *sec_start;
7670
7671 sec_start = gsi_stmt (tgsi);
7672 sctx = maybe_lookup_ctx (sec_start);
7673 gcc_assert (sctx);
7674
7675 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7676 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7677 GSI_CONTINUE_LINKING);
7678 gimple_omp_set_body (sec_start, NULL);
7679
7680 if (gsi_one_before_end_p (tgsi))
7681 {
7682 gimple_seq l = NULL;
7683 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7684 &ilist, &l, &clist, ctx);
7685 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7686 gimple_omp_section_set_last (sec_start);
7687 }
7688
7689 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7690 GSI_CONTINUE_LINKING);
7691 }
7692
7693 block = make_node (BLOCK);
7694 bind = gimple_build_bind (NULL, new_body, block);
7695
7696 olist = NULL;
7697 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7698 &clist, ctx);
7699 if (clist)
7700 {
7701 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7702 gcall *g = gimple_build_call (fndecl, 0);
7703 gimple_seq_add_stmt (&olist, g);
7704 gimple_seq_add_seq (&olist, clist);
7705 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7706 g = gimple_build_call (fndecl, 0);
7707 gimple_seq_add_stmt (&olist, g);
7708 }
7709
7710 block = make_node (BLOCK);
7711 new_stmt = gimple_build_bind (NULL, NULL, block);
7712 gsi_replace (gsi_p, new_stmt, true);
7713
7714 pop_gimplify_context (new_stmt);
7715 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7716 BLOCK_VARS (block) = gimple_bind_vars (bind);
7717 if (BLOCK_VARS (block))
7718 TREE_USED (block) = 1;
7719
7720 new_body = NULL;
7721 gimple_seq_add_seq (&new_body, ilist);
7722 gimple_seq_add_stmt (&new_body, stmt);
7723 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7724 gimple_seq_add_stmt (&new_body, bind);
7725
7726 t = gimple_build_omp_continue (control, control);
7727 gimple_seq_add_stmt (&new_body, t);
7728
7729 gimple_seq_add_seq (&new_body, olist);
7730 if (ctx->cancellable)
7731 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7732 gimple_seq_add_seq (&new_body, dlist);
7733
7734 new_body = maybe_catch_exception (new_body);
7735
7736 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7737 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7738 t = gimple_build_omp_return (nowait);
7739 gimple_seq_add_stmt (&new_body, t);
7740 gimple_seq_add_seq (&new_body, tred_dlist);
7741 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7742
7743 if (rclauses)
7744 OMP_CLAUSE_DECL (rclauses) = rtmp;
7745
7746 gimple_bind_set_body (new_stmt, new_body);
7747 }
7748
7749
7750 /* A subroutine of lower_omp_single. Expand the simple form of
7751 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7752
7753 if (GOMP_single_start ())
7754 BODY;
7755 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7756
7757 FIXME. It may be better to delay expanding the logic of this until
7758 pass_expand_omp. The expanded logic may make the job more difficult
7759 to a synchronization analysis pass. */
7760
7761 static void
7762 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7763 {
7764 location_t loc = gimple_location (single_stmt);
7765 tree tlabel = create_artificial_label (loc);
7766 tree flabel = create_artificial_label (loc);
7767 gimple *call, *cond;
7768 tree lhs, decl;
7769
7770 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7771 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7772 call = gimple_build_call (decl, 0);
7773 gimple_call_set_lhs (call, lhs);
7774 gimple_seq_add_stmt (pre_p, call);
7775
7776 cond = gimple_build_cond (EQ_EXPR, lhs,
7777 fold_convert_loc (loc, TREE_TYPE (lhs),
7778 boolean_true_node),
7779 tlabel, flabel);
7780 gimple_seq_add_stmt (pre_p, cond);
7781 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7782 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7783 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7784 }
7785
7786
7787 /* A subroutine of lower_omp_single. Expand the simple form of
7788 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7789
7790 #pragma omp single copyprivate (a, b, c)
7791
7792 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7793
7794 {
7795 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7796 {
7797 BODY;
7798 copyout.a = a;
7799 copyout.b = b;
7800 copyout.c = c;
7801 GOMP_single_copy_end (&copyout);
7802 }
7803 else
7804 {
7805 a = copyout_p->a;
7806 b = copyout_p->b;
7807 c = copyout_p->c;
7808 }
7809 GOMP_barrier ();
7810 }
7811
7812 FIXME. It may be better to delay expanding the logic of this until
7813 pass_expand_omp. The expanded logic may make the job more difficult
7814 to a synchronization analysis pass. */
7815
7816 static void
7817 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7818 omp_context *ctx)
7819 {
7820 tree ptr_type, t, l0, l1, l2, bfn_decl;
7821 gimple_seq copyin_seq;
7822 location_t loc = gimple_location (single_stmt);
7823
7824 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7825
7826 ptr_type = build_pointer_type (ctx->record_type);
7827 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7828
7829 l0 = create_artificial_label (loc);
7830 l1 = create_artificial_label (loc);
7831 l2 = create_artificial_label (loc);
7832
7833 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7834 t = build_call_expr_loc (loc, bfn_decl, 0);
7835 t = fold_convert_loc (loc, ptr_type, t);
7836 gimplify_assign (ctx->receiver_decl, t, pre_p);
7837
7838 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7839 build_int_cst (ptr_type, 0));
7840 t = build3 (COND_EXPR, void_type_node, t,
7841 build_and_jump (&l0), build_and_jump (&l1));
7842 gimplify_and_add (t, pre_p);
7843
7844 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7845
7846 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7847
7848 copyin_seq = NULL;
7849 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7850 &copyin_seq, ctx);
7851
7852 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7853 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7854 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7855 gimplify_and_add (t, pre_p);
7856
7857 t = build_and_jump (&l2);
7858 gimplify_and_add (t, pre_p);
7859
7860 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7861
7862 gimple_seq_add_seq (pre_p, copyin_seq);
7863
7864 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7865 }
7866
7867
7868 /* Expand code for an OpenMP single directive. */
7869
7870 static void
7871 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7872 {
7873 tree block;
7874 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7875 gbind *bind;
7876 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7877
7878 push_gimplify_context ();
7879
7880 block = make_node (BLOCK);
7881 bind = gimple_build_bind (NULL, NULL, block);
7882 gsi_replace (gsi_p, bind, true);
7883 bind_body = NULL;
7884 dlist = NULL;
7885 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7886 &bind_body, &dlist, ctx, NULL);
7887 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7888
7889 gimple_seq_add_stmt (&bind_body, single_stmt);
7890
7891 if (ctx->record_type)
7892 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7893 else
7894 lower_omp_single_simple (single_stmt, &bind_body);
7895
7896 gimple_omp_set_body (single_stmt, NULL);
7897
7898 gimple_seq_add_seq (&bind_body, dlist);
7899
7900 bind_body = maybe_catch_exception (bind_body);
7901
7902 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7903 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7904 gimple *g = gimple_build_omp_return (nowait);
7905 gimple_seq_add_stmt (&bind_body_tail, g);
7906 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7907 if (ctx->record_type)
7908 {
7909 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7910 tree clobber = build_clobber (ctx->record_type);
7911 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7912 clobber), GSI_SAME_STMT);
7913 }
7914 gimple_seq_add_seq (&bind_body, bind_body_tail);
7915 gimple_bind_set_body (bind, bind_body);
7916
7917 pop_gimplify_context (bind);
7918
7919 gimple_bind_append_vars (bind, ctx->block_vars);
7920 BLOCK_VARS (block) = ctx->block_vars;
7921 if (BLOCK_VARS (block))
7922 TREE_USED (block) = 1;
7923 }
7924
7925
7926 /* Expand code for an OpenMP master directive. */
7927
7928 static void
7929 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7930 {
7931 tree block, lab = NULL, x, bfn_decl;
7932 gimple *stmt = gsi_stmt (*gsi_p);
7933 gbind *bind;
7934 location_t loc = gimple_location (stmt);
7935 gimple_seq tseq;
7936
7937 push_gimplify_context ();
7938
7939 block = make_node (BLOCK);
7940 bind = gimple_build_bind (NULL, NULL, block);
7941 gsi_replace (gsi_p, bind, true);
7942 gimple_bind_add_stmt (bind, stmt);
7943
7944 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7945 x = build_call_expr_loc (loc, bfn_decl, 0);
7946 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7947 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7948 tseq = NULL;
7949 gimplify_and_add (x, &tseq);
7950 gimple_bind_add_seq (bind, tseq);
7951
7952 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7953 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7954 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7955 gimple_omp_set_body (stmt, NULL);
7956
7957 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7958
7959 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7960
7961 pop_gimplify_context (bind);
7962
7963 gimple_bind_append_vars (bind, ctx->block_vars);
7964 BLOCK_VARS (block) = ctx->block_vars;
7965 }
7966
7967 /* Helper function for lower_omp_task_reductions. For a specific PASS
7968 find out the current clause it should be processed, or return false
7969 if all have been processed already. */
7970
7971 static inline bool
7972 omp_task_reduction_iterate (int pass, enum tree_code code,
7973 enum omp_clause_code ccode, tree *c, tree *decl,
7974 tree *type, tree *next)
7975 {
7976 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7977 {
7978 if (ccode == OMP_CLAUSE_REDUCTION
7979 && code != OMP_TASKLOOP
7980 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7981 continue;
7982 *decl = OMP_CLAUSE_DECL (*c);
7983 *type = TREE_TYPE (*decl);
7984 if (TREE_CODE (*decl) == MEM_REF)
7985 {
7986 if (pass != 1)
7987 continue;
7988 }
7989 else
7990 {
7991 if (omp_is_reference (*decl))
7992 *type = TREE_TYPE (*type);
7993 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7994 continue;
7995 }
7996 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7997 return true;
7998 }
7999 *decl = NULL_TREE;
8000 *type = NULL_TREE;
8001 *next = NULL_TREE;
8002 return false;
8003 }
8004
8005 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8006 OMP_TASKGROUP only with task modifier). Register mapping of those in
8007 START sequence and reducing them and unregister them in the END sequence. */
8008
8009 static void
8010 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8011 gimple_seq *start, gimple_seq *end)
8012 {
8013 enum omp_clause_code ccode
8014 = (code == OMP_TASKGROUP
8015 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8016 tree cancellable = NULL_TREE;
8017 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8018 if (clauses == NULL_TREE)
8019 return;
8020 if (code == OMP_FOR || code == OMP_SECTIONS)
8021 {
8022 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8023 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8024 && outer->cancellable)
8025 {
8026 cancellable = error_mark_node;
8027 break;
8028 }
8029 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8030 break;
8031 }
8032 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8033 tree *last = &TYPE_FIELDS (record_type);
8034 unsigned cnt = 0;
8035 if (cancellable)
8036 {
8037 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8038 ptr_type_node);
8039 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8040 integer_type_node);
8041 *last = field;
8042 DECL_CHAIN (field) = ifield;
8043 last = &DECL_CHAIN (ifield);
8044 DECL_CONTEXT (field) = record_type;
8045 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8046 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8047 DECL_CONTEXT (ifield) = record_type;
8048 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8049 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8050 }
8051 for (int pass = 0; pass < 2; pass++)
8052 {
8053 tree decl, type, next;
8054 for (tree c = clauses;
8055 omp_task_reduction_iterate (pass, code, ccode,
8056 &c, &decl, &type, &next); c = next)
8057 {
8058 ++cnt;
8059 tree new_type = type;
8060 if (ctx->outer)
8061 new_type = remap_type (type, &ctx->outer->cb);
8062 tree field
8063 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8064 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8065 new_type);
8066 if (DECL_P (decl) && type == TREE_TYPE (decl))
8067 {
8068 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8069 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8070 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8071 }
8072 else
8073 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8074 DECL_CONTEXT (field) = record_type;
8075 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8076 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8077 *last = field;
8078 last = &DECL_CHAIN (field);
8079 tree bfield
8080 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8081 boolean_type_node);
8082 DECL_CONTEXT (bfield) = record_type;
8083 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8084 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8085 *last = bfield;
8086 last = &DECL_CHAIN (bfield);
8087 }
8088 }
8089 *last = NULL_TREE;
8090 layout_type (record_type);
8091
8092 /* Build up an array which registers with the runtime all the reductions
8093 and deregisters them at the end. Format documented in libgomp/task.c. */
8094 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8095 tree avar = create_tmp_var_raw (atype);
8096 gimple_add_tmp_var (avar);
8097 TREE_ADDRESSABLE (avar) = 1;
8098 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8099 NULL_TREE, NULL_TREE);
8100 tree t = build_int_cst (pointer_sized_int_node, cnt);
8101 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8102 gimple_seq seq = NULL;
8103 tree sz = fold_convert (pointer_sized_int_node,
8104 TYPE_SIZE_UNIT (record_type));
8105 int cachesz = 64;
8106 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8107 build_int_cst (pointer_sized_int_node, cachesz - 1));
8108 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8109 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8110 ctx->task_reductions.create (1 + cnt);
8111 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8112 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8113 ? sz : NULL_TREE);
8114 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8115 gimple_seq_add_seq (start, seq);
8116 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8117 NULL_TREE, NULL_TREE);
8118 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8119 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8120 NULL_TREE, NULL_TREE);
8121 t = build_int_cst (pointer_sized_int_node,
8122 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8123 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8124 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8125 NULL_TREE, NULL_TREE);
8126 t = build_int_cst (pointer_sized_int_node, -1);
8127 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8128 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8129 NULL_TREE, NULL_TREE);
8130 t = build_int_cst (pointer_sized_int_node, 0);
8131 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8132
8133 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8134 and for each task reduction checks a bool right after the private variable
8135 within that thread's chunk; if the bool is clear, it hasn't been
8136 initialized and thus isn't going to be reduced nor destructed, otherwise
8137 reduce and destruct it. */
8138 tree idx = create_tmp_var (size_type_node);
8139 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8140 tree num_thr_sz = create_tmp_var (size_type_node);
8141 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8142 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8143 tree lab3 = NULL_TREE;
8144 gimple *g;
8145 if (code == OMP_FOR || code == OMP_SECTIONS)
8146 {
8147 /* For worksharing constructs, only perform it in the master thread,
8148 with the exception of cancelled implicit barriers - then only handle
8149 the current thread. */
8150 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8151 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8152 tree thr_num = create_tmp_var (integer_type_node);
8153 g = gimple_build_call (t, 0);
8154 gimple_call_set_lhs (g, thr_num);
8155 gimple_seq_add_stmt (end, g);
8156 if (cancellable)
8157 {
8158 tree c;
8159 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8160 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8161 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8162 if (code == OMP_FOR)
8163 c = gimple_omp_for_clauses (ctx->stmt);
8164 else /* if (code == OMP_SECTIONS) */
8165 c = gimple_omp_sections_clauses (ctx->stmt);
8166 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8167 cancellable = c;
8168 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8169 lab5, lab6);
8170 gimple_seq_add_stmt (end, g);
8171 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8172 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8173 gimple_seq_add_stmt (end, g);
8174 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8175 build_one_cst (TREE_TYPE (idx)));
8176 gimple_seq_add_stmt (end, g);
8177 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8178 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8179 }
8180 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8181 gimple_seq_add_stmt (end, g);
8182 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8183 }
8184 if (code != OMP_PARALLEL)
8185 {
8186 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8187 tree num_thr = create_tmp_var (integer_type_node);
8188 g = gimple_build_call (t, 0);
8189 gimple_call_set_lhs (g, num_thr);
8190 gimple_seq_add_stmt (end, g);
8191 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8192 gimple_seq_add_stmt (end, g);
8193 if (cancellable)
8194 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8195 }
8196 else
8197 {
8198 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8199 OMP_CLAUSE__REDUCTEMP_);
8200 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8201 t = fold_convert (size_type_node, t);
8202 gimplify_assign (num_thr_sz, t, end);
8203 }
8204 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8205 NULL_TREE, NULL_TREE);
8206 tree data = create_tmp_var (pointer_sized_int_node);
8207 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8208 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8209 tree ptr;
8210 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8211 ptr = create_tmp_var (build_pointer_type (record_type));
8212 else
8213 ptr = create_tmp_var (ptr_type_node);
8214 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8215
8216 tree field = TYPE_FIELDS (record_type);
8217 cnt = 0;
8218 if (cancellable)
8219 field = DECL_CHAIN (DECL_CHAIN (field));
8220 for (int pass = 0; pass < 2; pass++)
8221 {
8222 tree decl, type, next;
8223 for (tree c = clauses;
8224 omp_task_reduction_iterate (pass, code, ccode,
8225 &c, &decl, &type, &next); c = next)
8226 {
8227 tree var = decl, ref;
8228 if (TREE_CODE (decl) == MEM_REF)
8229 {
8230 var = TREE_OPERAND (var, 0);
8231 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8232 var = TREE_OPERAND (var, 0);
8233 tree v = var;
8234 if (TREE_CODE (var) == ADDR_EXPR)
8235 var = TREE_OPERAND (var, 0);
8236 else if (TREE_CODE (var) == INDIRECT_REF)
8237 var = TREE_OPERAND (var, 0);
8238 tree orig_var = var;
8239 if (is_variable_sized (var))
8240 {
8241 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8242 var = DECL_VALUE_EXPR (var);
8243 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8244 var = TREE_OPERAND (var, 0);
8245 gcc_assert (DECL_P (var));
8246 }
8247 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8248 if (orig_var != var)
8249 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8250 else if (TREE_CODE (v) == ADDR_EXPR)
8251 t = build_fold_addr_expr (t);
8252 else if (TREE_CODE (v) == INDIRECT_REF)
8253 t = build_fold_indirect_ref (t);
8254 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8255 {
8256 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8257 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8258 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8259 }
8260 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8261 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8262 fold_convert (size_type_node,
8263 TREE_OPERAND (decl, 1)));
8264 }
8265 else
8266 {
8267 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8268 if (!omp_is_reference (decl))
8269 t = build_fold_addr_expr (t);
8270 }
8271 t = fold_convert (pointer_sized_int_node, t);
8272 seq = NULL;
8273 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8274 gimple_seq_add_seq (start, seq);
8275 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8276 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8277 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8278 t = unshare_expr (byte_position (field));
8279 t = fold_convert (pointer_sized_int_node, t);
8280 ctx->task_reduction_map->put (c, cnt);
8281 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8282 ? t : NULL_TREE);
8283 seq = NULL;
8284 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8285 gimple_seq_add_seq (start, seq);
8286 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8287 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8288 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8289
8290 tree bfield = DECL_CHAIN (field);
8291 tree cond;
8292 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8293 /* In parallel or worksharing all threads unconditionally
8294 initialize all their task reduction private variables. */
8295 cond = boolean_true_node;
8296 else if (TREE_TYPE (ptr) == ptr_type_node)
8297 {
8298 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8299 unshare_expr (byte_position (bfield)));
8300 seq = NULL;
8301 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8302 gimple_seq_add_seq (end, seq);
8303 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8304 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8305 build_int_cst (pbool, 0));
8306 }
8307 else
8308 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8309 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8310 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8311 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8312 tree condv = create_tmp_var (boolean_type_node);
8313 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8314 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8315 lab3, lab4);
8316 gimple_seq_add_stmt (end, g);
8317 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8318 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8319 {
8320 /* If this reduction doesn't need destruction and parallel
8321 has been cancelled, there is nothing to do for this
8322 reduction, so jump around the merge operation. */
8323 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8324 g = gimple_build_cond (NE_EXPR, cancellable,
8325 build_zero_cst (TREE_TYPE (cancellable)),
8326 lab4, lab5);
8327 gimple_seq_add_stmt (end, g);
8328 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8329 }
8330
8331 tree new_var;
8332 if (TREE_TYPE (ptr) == ptr_type_node)
8333 {
8334 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8335 unshare_expr (byte_position (field)));
8336 seq = NULL;
8337 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8338 gimple_seq_add_seq (end, seq);
8339 tree pbool = build_pointer_type (TREE_TYPE (field));
8340 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8341 build_int_cst (pbool, 0));
8342 }
8343 else
8344 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8345 build_simple_mem_ref (ptr), field, NULL_TREE);
8346
8347 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8348 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8349 ref = build_simple_mem_ref (ref);
8350 /* reduction(-:var) sums up the partial results, so it acts
8351 identically to reduction(+:var). */
8352 if (rcode == MINUS_EXPR)
8353 rcode = PLUS_EXPR;
8354 if (TREE_CODE (decl) == MEM_REF)
8355 {
8356 tree type = TREE_TYPE (new_var);
8357 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8358 tree i = create_tmp_var (TREE_TYPE (v));
8359 tree ptype = build_pointer_type (TREE_TYPE (type));
8360 if (DECL_P (v))
8361 {
8362 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8363 tree vv = create_tmp_var (TREE_TYPE (v));
8364 gimplify_assign (vv, v, start);
8365 v = vv;
8366 }
8367 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8368 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8369 new_var = build_fold_addr_expr (new_var);
8370 new_var = fold_convert (ptype, new_var);
8371 ref = fold_convert (ptype, ref);
8372 tree m = create_tmp_var (ptype);
8373 gimplify_assign (m, new_var, end);
8374 new_var = m;
8375 m = create_tmp_var (ptype);
8376 gimplify_assign (m, ref, end);
8377 ref = m;
8378 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8379 tree body = create_artificial_label (UNKNOWN_LOCATION);
8380 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8381 gimple_seq_add_stmt (end, gimple_build_label (body));
8382 tree priv = build_simple_mem_ref (new_var);
8383 tree out = build_simple_mem_ref (ref);
8384 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8385 {
8386 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8387 tree decl_placeholder
8388 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8389 tree lab6 = NULL_TREE;
8390 if (cancellable)
8391 {
8392 /* If this reduction needs destruction and parallel
8393 has been cancelled, jump around the merge operation
8394 to the destruction. */
8395 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8396 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8397 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8398 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8399 lab6, lab5);
8400 gimple_seq_add_stmt (end, g);
8401 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8402 }
8403 SET_DECL_VALUE_EXPR (placeholder, out);
8404 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8405 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8406 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8407 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8408 gimple_seq_add_seq (end,
8409 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8410 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8411 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8412 {
8413 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8414 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8415 }
8416 if (cancellable)
8417 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8418 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8419 if (x)
8420 {
8421 gimple_seq tseq = NULL;
8422 gimplify_stmt (&x, &tseq);
8423 gimple_seq_add_seq (end, tseq);
8424 }
8425 }
8426 else
8427 {
8428 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8429 out = unshare_expr (out);
8430 gimplify_assign (out, x, end);
8431 }
8432 gimple *g
8433 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8434 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8435 gimple_seq_add_stmt (end, g);
8436 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8437 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8438 gimple_seq_add_stmt (end, g);
8439 g = gimple_build_assign (i, PLUS_EXPR, i,
8440 build_int_cst (TREE_TYPE (i), 1));
8441 gimple_seq_add_stmt (end, g);
8442 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8443 gimple_seq_add_stmt (end, g);
8444 gimple_seq_add_stmt (end, gimple_build_label (endl));
8445 }
8446 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8447 {
8448 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8449 tree oldv = NULL_TREE;
8450 tree lab6 = NULL_TREE;
8451 if (cancellable)
8452 {
8453 /* If this reduction needs destruction and parallel
8454 has been cancelled, jump around the merge operation
8455 to the destruction. */
8456 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8457 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8458 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8459 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8460 lab6, lab5);
8461 gimple_seq_add_stmt (end, g);
8462 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8463 }
8464 if (omp_is_reference (decl)
8465 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8466 TREE_TYPE (ref)))
8467 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8468 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8469 tree refv = create_tmp_var (TREE_TYPE (ref));
8470 gimplify_assign (refv, ref, end);
8471 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8472 SET_DECL_VALUE_EXPR (placeholder, ref);
8473 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8474 tree d = maybe_lookup_decl (decl, ctx);
8475 gcc_assert (d);
8476 if (DECL_HAS_VALUE_EXPR_P (d))
8477 oldv = DECL_VALUE_EXPR (d);
8478 if (omp_is_reference (var))
8479 {
8480 tree v = fold_convert (TREE_TYPE (d),
8481 build_fold_addr_expr (new_var));
8482 SET_DECL_VALUE_EXPR (d, v);
8483 }
8484 else
8485 SET_DECL_VALUE_EXPR (d, new_var);
8486 DECL_HAS_VALUE_EXPR_P (d) = 1;
8487 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8488 if (oldv)
8489 SET_DECL_VALUE_EXPR (d, oldv);
8490 else
8491 {
8492 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8493 DECL_HAS_VALUE_EXPR_P (d) = 0;
8494 }
8495 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8496 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8497 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8498 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8499 if (cancellable)
8500 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8501 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8502 if (x)
8503 {
8504 gimple_seq tseq = NULL;
8505 gimplify_stmt (&x, &tseq);
8506 gimple_seq_add_seq (end, tseq);
8507 }
8508 }
8509 else
8510 {
8511 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8512 ref = unshare_expr (ref);
8513 gimplify_assign (ref, x, end);
8514 }
8515 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8516 ++cnt;
8517 field = DECL_CHAIN (bfield);
8518 }
8519 }
8520
8521 if (code == OMP_TASKGROUP)
8522 {
8523 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8524 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8525 gimple_seq_add_stmt (start, g);
8526 }
8527 else
8528 {
8529 tree c;
8530 if (code == OMP_FOR)
8531 c = gimple_omp_for_clauses (ctx->stmt);
8532 else if (code == OMP_SECTIONS)
8533 c = gimple_omp_sections_clauses (ctx->stmt);
8534 else
8535 c = gimple_omp_taskreg_clauses (ctx->stmt);
8536 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8537 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8538 build_fold_addr_expr (avar));
8539 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8540 }
8541
8542 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8543 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8544 size_one_node));
8545 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8546 gimple_seq_add_stmt (end, g);
8547 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8548 if (code == OMP_FOR || code == OMP_SECTIONS)
8549 {
8550 enum built_in_function bfn
8551 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8552 t = builtin_decl_explicit (bfn);
8553 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8554 tree arg;
8555 if (cancellable)
8556 {
8557 arg = create_tmp_var (c_bool_type);
8558 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8559 cancellable));
8560 }
8561 else
8562 arg = build_int_cst (c_bool_type, 0);
8563 g = gimple_build_call (t, 1, arg);
8564 }
8565 else
8566 {
8567 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8568 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8569 }
8570 gimple_seq_add_stmt (end, g);
8571 t = build_constructor (atype, NULL);
8572 TREE_THIS_VOLATILE (t) = 1;
8573 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8574 }
8575
8576 /* Expand code for an OpenMP taskgroup directive. */
8577
8578 static void
8579 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8580 {
8581 gimple *stmt = gsi_stmt (*gsi_p);
8582 gcall *x;
8583 gbind *bind;
8584 gimple_seq dseq = NULL;
8585 tree block = make_node (BLOCK);
8586
8587 bind = gimple_build_bind (NULL, NULL, block);
8588 gsi_replace (gsi_p, bind, true);
8589 gimple_bind_add_stmt (bind, stmt);
8590
8591 push_gimplify_context ();
8592
8593 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8594 0);
8595 gimple_bind_add_stmt (bind, x);
8596
8597 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8598 gimple_omp_taskgroup_clauses (stmt),
8599 gimple_bind_body_ptr (bind), &dseq);
8600
8601 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8602 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8603 gimple_omp_set_body (stmt, NULL);
8604
8605 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8606 gimple_bind_add_seq (bind, dseq);
8607
8608 pop_gimplify_context (bind);
8609
8610 gimple_bind_append_vars (bind, ctx->block_vars);
8611 BLOCK_VARS (block) = ctx->block_vars;
8612 }
8613
8614
8615 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8616
8617 static void
8618 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8619 omp_context *ctx)
8620 {
8621 struct omp_for_data fd;
8622 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8623 return;
8624
8625 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8626 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8627 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8628 if (!fd.ordered)
8629 return;
8630
8631 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8632 tree c = gimple_omp_ordered_clauses (ord_stmt);
8633 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8634 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8635 {
8636 /* Merge depend clauses from multiple adjacent
8637 #pragma omp ordered depend(sink:...) constructs
8638 into one #pragma omp ordered depend(sink:...), so that
8639 we can optimize them together. */
8640 gimple_stmt_iterator gsi = *gsi_p;
8641 gsi_next (&gsi);
8642 while (!gsi_end_p (gsi))
8643 {
8644 gimple *stmt = gsi_stmt (gsi);
8645 if (is_gimple_debug (stmt)
8646 || gimple_code (stmt) == GIMPLE_NOP)
8647 {
8648 gsi_next (&gsi);
8649 continue;
8650 }
8651 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8652 break;
8653 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8654 c = gimple_omp_ordered_clauses (ord_stmt2);
8655 if (c == NULL_TREE
8656 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8657 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8658 break;
8659 while (*list_p)
8660 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8661 *list_p = c;
8662 gsi_remove (&gsi, true);
8663 }
8664 }
8665
8666 /* Canonicalize sink dependence clauses into one folded clause if
8667 possible.
8668
8669 The basic algorithm is to create a sink vector whose first
8670 element is the GCD of all the first elements, and whose remaining
8671 elements are the minimum of the subsequent columns.
8672
8673 We ignore dependence vectors whose first element is zero because
8674 such dependencies are known to be executed by the same thread.
8675
8676 We take into account the direction of the loop, so a minimum
8677 becomes a maximum if the loop is iterating forwards. We also
8678 ignore sink clauses where the loop direction is unknown, or where
8679 the offsets are clearly invalid because they are not a multiple
8680 of the loop increment.
8681
8682 For example:
8683
8684 #pragma omp for ordered(2)
8685 for (i=0; i < N; ++i)
8686 for (j=0; j < M; ++j)
8687 {
8688 #pragma omp ordered \
8689 depend(sink:i-8,j-2) \
8690 depend(sink:i,j-1) \ // Completely ignored because i+0.
8691 depend(sink:i-4,j-3) \
8692 depend(sink:i-6,j-4)
8693 #pragma omp ordered depend(source)
8694 }
8695
8696 Folded clause is:
8697
8698 depend(sink:-gcd(8,4,6),-min(2,3,4))
8699 -or-
8700 depend(sink:-2,-2)
8701 */
8702
8703 /* FIXME: Computing GCD's where the first element is zero is
8704 non-trivial in the presence of collapsed loops. Do this later. */
8705 if (fd.collapse > 1)
8706 return;
8707
8708 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8709
8710 /* wide_int is not a POD so it must be default-constructed. */
8711 for (unsigned i = 0; i != 2 * len - 1; ++i)
8712 new (static_cast<void*>(folded_deps + i)) wide_int ();
8713
8714 tree folded_dep = NULL_TREE;
8715 /* TRUE if the first dimension's offset is negative. */
8716 bool neg_offset_p = false;
8717
8718 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8719 unsigned int i;
8720 while ((c = *list_p) != NULL)
8721 {
8722 bool remove = false;
8723
8724 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8725 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8726 goto next_ordered_clause;
8727
8728 tree vec;
8729 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8730 vec && TREE_CODE (vec) == TREE_LIST;
8731 vec = TREE_CHAIN (vec), ++i)
8732 {
8733 gcc_assert (i < len);
8734
8735 /* omp_extract_for_data has canonicalized the condition. */
8736 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8737 || fd.loops[i].cond_code == GT_EXPR);
8738 bool forward = fd.loops[i].cond_code == LT_EXPR;
8739 bool maybe_lexically_later = true;
8740
8741 /* While the committee makes up its mind, bail if we have any
8742 non-constant steps. */
8743 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8744 goto lower_omp_ordered_ret;
8745
8746 tree itype = TREE_TYPE (TREE_VALUE (vec));
8747 if (POINTER_TYPE_P (itype))
8748 itype = sizetype;
8749 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8750 TYPE_PRECISION (itype),
8751 TYPE_SIGN (itype));
8752
8753 /* Ignore invalid offsets that are not multiples of the step. */
8754 if (!wi::multiple_of_p (wi::abs (offset),
8755 wi::abs (wi::to_wide (fd.loops[i].step)),
8756 UNSIGNED))
8757 {
8758 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8759 "ignoring sink clause with offset that is not "
8760 "a multiple of the loop step");
8761 remove = true;
8762 goto next_ordered_clause;
8763 }
8764
8765 /* Calculate the first dimension. The first dimension of
8766 the folded dependency vector is the GCD of the first
8767 elements, while ignoring any first elements whose offset
8768 is 0. */
8769 if (i == 0)
8770 {
8771 /* Ignore dependence vectors whose first dimension is 0. */
8772 if (offset == 0)
8773 {
8774 remove = true;
8775 goto next_ordered_clause;
8776 }
8777 else
8778 {
8779 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8780 {
8781 error_at (OMP_CLAUSE_LOCATION (c),
8782 "first offset must be in opposite direction "
8783 "of loop iterations");
8784 goto lower_omp_ordered_ret;
8785 }
8786 if (forward)
8787 offset = -offset;
8788 neg_offset_p = forward;
8789 /* Initialize the first time around. */
8790 if (folded_dep == NULL_TREE)
8791 {
8792 folded_dep = c;
8793 folded_deps[0] = offset;
8794 }
8795 else
8796 folded_deps[0] = wi::gcd (folded_deps[0],
8797 offset, UNSIGNED);
8798 }
8799 }
8800 /* Calculate minimum for the remaining dimensions. */
8801 else
8802 {
8803 folded_deps[len + i - 1] = offset;
8804 if (folded_dep == c)
8805 folded_deps[i] = offset;
8806 else if (maybe_lexically_later
8807 && !wi::eq_p (folded_deps[i], offset))
8808 {
8809 if (forward ^ wi::gts_p (folded_deps[i], offset))
8810 {
8811 unsigned int j;
8812 folded_dep = c;
8813 for (j = 1; j <= i; j++)
8814 folded_deps[j] = folded_deps[len + j - 1];
8815 }
8816 else
8817 maybe_lexically_later = false;
8818 }
8819 }
8820 }
8821 gcc_assert (i == len);
8822
8823 remove = true;
8824
8825 next_ordered_clause:
8826 if (remove)
8827 *list_p = OMP_CLAUSE_CHAIN (c);
8828 else
8829 list_p = &OMP_CLAUSE_CHAIN (c);
8830 }
8831
8832 if (folded_dep)
8833 {
8834 if (neg_offset_p)
8835 folded_deps[0] = -folded_deps[0];
8836
8837 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8838 if (POINTER_TYPE_P (itype))
8839 itype = sizetype;
8840
8841 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8842 = wide_int_to_tree (itype, folded_deps[0]);
8843 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8844 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8845 }
8846
8847 lower_omp_ordered_ret:
8848
8849 /* Ordered without clauses is #pragma omp threads, while we want
8850 a nop instead if we remove all clauses. */
8851 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8852 gsi_replace (gsi_p, gimple_build_nop (), true);
8853 }
8854
8855
8856 /* Expand code for an OpenMP ordered directive. */
8857
8858 static void
8859 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8860 {
8861 tree block;
8862 gimple *stmt = gsi_stmt (*gsi_p), *g;
8863 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8864 gcall *x;
8865 gbind *bind;
8866 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8867 OMP_CLAUSE_SIMD);
8868 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8869 loop. */
8870 bool maybe_simt
8871 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8872 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8873 OMP_CLAUSE_THREADS);
8874
8875 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8876 OMP_CLAUSE_DEPEND))
8877 {
8878 /* FIXME: This is needs to be moved to the expansion to verify various
8879 conditions only testable on cfg with dominators computed, and also
8880 all the depend clauses to be merged still might need to be available
8881 for the runtime checks. */
8882 if (0)
8883 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8884 return;
8885 }
8886
8887 push_gimplify_context ();
8888
8889 block = make_node (BLOCK);
8890 bind = gimple_build_bind (NULL, NULL, block);
8891 gsi_replace (gsi_p, bind, true);
8892 gimple_bind_add_stmt (bind, stmt);
8893
8894 if (simd)
8895 {
8896 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8897 build_int_cst (NULL_TREE, threads));
8898 cfun->has_simduid_loops = true;
8899 }
8900 else
8901 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8902 0);
8903 gimple_bind_add_stmt (bind, x);
8904
8905 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8906 if (maybe_simt)
8907 {
8908 counter = create_tmp_var (integer_type_node);
8909 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8910 gimple_call_set_lhs (g, counter);
8911 gimple_bind_add_stmt (bind, g);
8912
8913 body = create_artificial_label (UNKNOWN_LOCATION);
8914 test = create_artificial_label (UNKNOWN_LOCATION);
8915 gimple_bind_add_stmt (bind, gimple_build_label (body));
8916
8917 tree simt_pred = create_tmp_var (integer_type_node);
8918 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8919 gimple_call_set_lhs (g, simt_pred);
8920 gimple_bind_add_stmt (bind, g);
8921
8922 tree t = create_artificial_label (UNKNOWN_LOCATION);
8923 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8924 gimple_bind_add_stmt (bind, g);
8925
8926 gimple_bind_add_stmt (bind, gimple_build_label (t));
8927 }
8928 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8929 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8930 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8931 gimple_omp_set_body (stmt, NULL);
8932
8933 if (maybe_simt)
8934 {
8935 gimple_bind_add_stmt (bind, gimple_build_label (test));
8936 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8937 gimple_bind_add_stmt (bind, g);
8938
8939 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8940 tree nonneg = create_tmp_var (integer_type_node);
8941 gimple_seq tseq = NULL;
8942 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8943 gimple_bind_add_seq (bind, tseq);
8944
8945 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8946 gimple_call_set_lhs (g, nonneg);
8947 gimple_bind_add_stmt (bind, g);
8948
8949 tree end = create_artificial_label (UNKNOWN_LOCATION);
8950 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8951 gimple_bind_add_stmt (bind, g);
8952
8953 gimple_bind_add_stmt (bind, gimple_build_label (end));
8954 }
8955 if (simd)
8956 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8957 build_int_cst (NULL_TREE, threads));
8958 else
8959 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8960 0);
8961 gimple_bind_add_stmt (bind, x);
8962
8963 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8964
8965 pop_gimplify_context (bind);
8966
8967 gimple_bind_append_vars (bind, ctx->block_vars);
8968 BLOCK_VARS (block) = gimple_bind_vars (bind);
8969 }
8970
8971
8972 /* Expand code for an OpenMP scan directive and the structured block
8973 before the scan directive. */
8974
8975 static void
8976 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8977 {
8978 gimple *stmt = gsi_stmt (*gsi_p);
8979 bool has_clauses
8980 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8981 tree lane = NULL_TREE;
8982 gimple_seq before = NULL;
8983 omp_context *octx = ctx->outer;
8984 gcc_assert (octx);
8985 if (octx->scan_exclusive && !has_clauses)
8986 {
8987 gimple_stmt_iterator gsi2 = *gsi_p;
8988 gsi_next (&gsi2);
8989 gimple *stmt2 = gsi_stmt (gsi2);
8990 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8991 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8992 the one with exclusive clause(s), comes first. */
8993 if (stmt2
8994 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8995 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8996 {
8997 gsi_remove (gsi_p, false);
8998 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8999 ctx = maybe_lookup_ctx (stmt2);
9000 gcc_assert (ctx);
9001 lower_omp_scan (gsi_p, ctx);
9002 return;
9003 }
9004 }
9005
9006 bool input_phase = has_clauses ^ octx->scan_inclusive;
9007 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9008 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9009 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9010 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9011 && !gimple_omp_for_combined_p (octx->stmt));
9012 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9013 if (is_for_simd && octx->for_simd_scan_phase)
9014 is_simd = false;
9015 if (is_simd)
9016 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9017 OMP_CLAUSE__SIMDUID_))
9018 {
9019 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9020 lane = create_tmp_var (unsigned_type_node);
9021 tree t = build_int_cst (integer_type_node,
9022 input_phase ? 1
9023 : octx->scan_inclusive ? 2 : 3);
9024 gimple *g
9025 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9026 gimple_call_set_lhs (g, lane);
9027 gimple_seq_add_stmt (&before, g);
9028 }
9029
9030 if (is_simd || is_for)
9031 {
9032 for (tree c = gimple_omp_for_clauses (octx->stmt);
9033 c; c = OMP_CLAUSE_CHAIN (c))
9034 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9035 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9036 {
9037 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9038 tree var = OMP_CLAUSE_DECL (c);
9039 tree new_var = lookup_decl (var, octx);
9040 tree val = new_var;
9041 tree var2 = NULL_TREE;
9042 tree var3 = NULL_TREE;
9043 tree var4 = NULL_TREE;
9044 tree lane0 = NULL_TREE;
9045 tree new_vard = new_var;
9046 if (omp_is_reference (var))
9047 {
9048 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9049 val = new_var;
9050 }
9051 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9052 {
9053 val = DECL_VALUE_EXPR (new_vard);
9054 if (new_vard != new_var)
9055 {
9056 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9057 val = TREE_OPERAND (val, 0);
9058 }
9059 if (TREE_CODE (val) == ARRAY_REF
9060 && VAR_P (TREE_OPERAND (val, 0)))
9061 {
9062 tree v = TREE_OPERAND (val, 0);
9063 if (lookup_attribute ("omp simd array",
9064 DECL_ATTRIBUTES (v)))
9065 {
9066 val = unshare_expr (val);
9067 lane0 = TREE_OPERAND (val, 1);
9068 TREE_OPERAND (val, 1) = lane;
9069 var2 = lookup_decl (v, octx);
9070 if (octx->scan_exclusive)
9071 var4 = lookup_decl (var2, octx);
9072 if (input_phase
9073 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9074 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9075 if (!input_phase)
9076 {
9077 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9078 var2, lane, NULL_TREE, NULL_TREE);
9079 TREE_THIS_NOTRAP (var2) = 1;
9080 if (octx->scan_exclusive)
9081 {
9082 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9083 var4, lane, NULL_TREE,
9084 NULL_TREE);
9085 TREE_THIS_NOTRAP (var4) = 1;
9086 }
9087 }
9088 else
9089 var2 = val;
9090 }
9091 }
9092 gcc_assert (var2);
9093 }
9094 else
9095 {
9096 var2 = build_outer_var_ref (var, octx);
9097 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9098 {
9099 var3 = maybe_lookup_decl (new_vard, octx);
9100 if (var3 == new_vard || var3 == NULL_TREE)
9101 var3 = NULL_TREE;
9102 else if (is_simd && octx->scan_exclusive && !input_phase)
9103 {
9104 var4 = maybe_lookup_decl (var3, octx);
9105 if (var4 == var3 || var4 == NULL_TREE)
9106 {
9107 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9108 {
9109 var4 = var3;
9110 var3 = NULL_TREE;
9111 }
9112 else
9113 var4 = NULL_TREE;
9114 }
9115 }
9116 }
9117 if (is_simd
9118 && octx->scan_exclusive
9119 && !input_phase
9120 && var4 == NULL_TREE)
9121 var4 = create_tmp_var (TREE_TYPE (val));
9122 }
9123 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9124 {
9125 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9126 if (input_phase)
9127 {
9128 if (var3)
9129 {
9130 /* If we've added a separate identity element
9131 variable, copy it over into val. */
9132 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9133 var3);
9134 gimplify_and_add (x, &before);
9135 }
9136 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9137 {
9138 /* Otherwise, assign to it the identity element. */
9139 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9140 if (is_for)
9141 tseq = copy_gimple_seq_and_replace_locals (tseq);
9142 tree ref = build_outer_var_ref (var, octx);
9143 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9144 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9145 if (x)
9146 {
9147 if (new_vard != new_var)
9148 val = build_fold_addr_expr_loc (clause_loc, val);
9149 SET_DECL_VALUE_EXPR (new_vard, val);
9150 }
9151 SET_DECL_VALUE_EXPR (placeholder, ref);
9152 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9153 lower_omp (&tseq, octx);
9154 if (x)
9155 SET_DECL_VALUE_EXPR (new_vard, x);
9156 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9157 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9158 gimple_seq_add_seq (&before, tseq);
9159 if (is_simd)
9160 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9161 }
9162 }
9163 else if (is_simd)
9164 {
9165 tree x;
9166 if (octx->scan_exclusive)
9167 {
9168 tree v4 = unshare_expr (var4);
9169 tree v2 = unshare_expr (var2);
9170 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9171 gimplify_and_add (x, &before);
9172 }
9173 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9174 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9175 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9176 tree vexpr = val;
9177 if (x && new_vard != new_var)
9178 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9179 if (x)
9180 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9181 SET_DECL_VALUE_EXPR (placeholder, var2);
9182 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9183 lower_omp (&tseq, octx);
9184 gimple_seq_add_seq (&before, tseq);
9185 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9186 if (x)
9187 SET_DECL_VALUE_EXPR (new_vard, x);
9188 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9189 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9190 if (octx->scan_inclusive)
9191 {
9192 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9193 var2);
9194 gimplify_and_add (x, &before);
9195 }
9196 else if (lane0 == NULL_TREE)
9197 {
9198 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9199 var4);
9200 gimplify_and_add (x, &before);
9201 }
9202 }
9203 }
9204 else
9205 {
9206 if (input_phase)
9207 {
9208 /* input phase. Set val to initializer before
9209 the body. */
9210 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9211 gimplify_assign (val, x, &before);
9212 }
9213 else if (is_simd)
9214 {
9215 /* scan phase. */
9216 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9217 if (code == MINUS_EXPR)
9218 code = PLUS_EXPR;
9219
9220 tree x = build2 (code, TREE_TYPE (var2),
9221 unshare_expr (var2), unshare_expr (val));
9222 if (octx->scan_inclusive)
9223 {
9224 gimplify_assign (unshare_expr (var2), x, &before);
9225 gimplify_assign (val, var2, &before);
9226 }
9227 else
9228 {
9229 gimplify_assign (unshare_expr (var4),
9230 unshare_expr (var2), &before);
9231 gimplify_assign (var2, x, &before);
9232 if (lane0 == NULL_TREE)
9233 gimplify_assign (val, var4, &before);
9234 }
9235 }
9236 }
9237 if (octx->scan_exclusive && !input_phase && lane0)
9238 {
9239 tree vexpr = unshare_expr (var4);
9240 TREE_OPERAND (vexpr, 1) = lane0;
9241 if (new_vard != new_var)
9242 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9243 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9244 }
9245 }
9246 }
9247 if (is_simd && !is_for_simd)
9248 {
9249 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9250 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9251 gsi_replace (gsi_p, gimple_build_nop (), true);
9252 return;
9253 }
9254 lower_omp (gimple_omp_body_ptr (stmt), octx);
9255 if (before)
9256 {
9257 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9258 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9259 }
9260 }
9261
9262
9263 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9264 substitution of a couple of function calls. But in the NAMED case,
9265 requires that languages coordinate a symbol name. It is therefore
9266 best put here in common code. */
9267
9268 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9269
9270 static void
9271 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9272 {
9273 tree block;
9274 tree name, lock, unlock;
9275 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9276 gbind *bind;
9277 location_t loc = gimple_location (stmt);
9278 gimple_seq tbody;
9279
9280 name = gimple_omp_critical_name (stmt);
9281 if (name)
9282 {
9283 tree decl;
9284
9285 if (!critical_name_mutexes)
9286 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9287
9288 tree *n = critical_name_mutexes->get (name);
9289 if (n == NULL)
9290 {
9291 char *new_str;
9292
9293 decl = create_tmp_var_raw (ptr_type_node);
9294
9295 new_str = ACONCAT ((".gomp_critical_user_",
9296 IDENTIFIER_POINTER (name), NULL));
9297 DECL_NAME (decl) = get_identifier (new_str);
9298 TREE_PUBLIC (decl) = 1;
9299 TREE_STATIC (decl) = 1;
9300 DECL_COMMON (decl) = 1;
9301 DECL_ARTIFICIAL (decl) = 1;
9302 DECL_IGNORED_P (decl) = 1;
9303
9304 varpool_node::finalize_decl (decl);
9305
9306 critical_name_mutexes->put (name, decl);
9307 }
9308 else
9309 decl = *n;
9310
9311 /* If '#pragma omp critical' is inside offloaded region or
9312 inside function marked as offloadable, the symbol must be
9313 marked as offloadable too. */
9314 omp_context *octx;
9315 if (cgraph_node::get (current_function_decl)->offloadable)
9316 varpool_node::get_create (decl)->offloadable = 1;
9317 else
9318 for (octx = ctx->outer; octx; octx = octx->outer)
9319 if (is_gimple_omp_offloaded (octx->stmt))
9320 {
9321 varpool_node::get_create (decl)->offloadable = 1;
9322 break;
9323 }
9324
9325 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9326 lock = build_call_expr_loc (loc, lock, 1,
9327 build_fold_addr_expr_loc (loc, decl));
9328
9329 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9330 unlock = build_call_expr_loc (loc, unlock, 1,
9331 build_fold_addr_expr_loc (loc, decl));
9332 }
9333 else
9334 {
9335 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9336 lock = build_call_expr_loc (loc, lock, 0);
9337
9338 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9339 unlock = build_call_expr_loc (loc, unlock, 0);
9340 }
9341
9342 push_gimplify_context ();
9343
9344 block = make_node (BLOCK);
9345 bind = gimple_build_bind (NULL, NULL, block);
9346 gsi_replace (gsi_p, bind, true);
9347 gimple_bind_add_stmt (bind, stmt);
9348
9349 tbody = gimple_bind_body (bind);
9350 gimplify_and_add (lock, &tbody);
9351 gimple_bind_set_body (bind, tbody);
9352
9353 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9354 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9355 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9356 gimple_omp_set_body (stmt, NULL);
9357
9358 tbody = gimple_bind_body (bind);
9359 gimplify_and_add (unlock, &tbody);
9360 gimple_bind_set_body (bind, tbody);
9361
9362 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9363
9364 pop_gimplify_context (bind);
9365 gimple_bind_append_vars (bind, ctx->block_vars);
9366 BLOCK_VARS (block) = gimple_bind_vars (bind);
9367 }
9368
9369 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9370 for a lastprivate clause. Given a loop control predicate of (V
9371 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9372 is appended to *DLIST, iterator initialization is appended to
9373 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9374 to be emitted in a critical section. */
9375
9376 static void
9377 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9378 gimple_seq *dlist, gimple_seq *clist,
9379 struct omp_context *ctx)
9380 {
9381 tree clauses, cond, vinit;
9382 enum tree_code cond_code;
9383 gimple_seq stmts;
9384
9385 cond_code = fd->loop.cond_code;
9386 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9387
9388 /* When possible, use a strict equality expression. This can let VRP
9389 type optimizations deduce the value and remove a copy. */
9390 if (tree_fits_shwi_p (fd->loop.step))
9391 {
9392 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9393 if (step == 1 || step == -1)
9394 cond_code = EQ_EXPR;
9395 }
9396
9397 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9398 || gimple_omp_for_grid_phony (fd->for_stmt))
9399 cond = omp_grid_lastprivate_predicate (fd);
9400 else
9401 {
9402 tree n2 = fd->loop.n2;
9403 if (fd->collapse > 1
9404 && TREE_CODE (n2) != INTEGER_CST
9405 && gimple_omp_for_combined_into_p (fd->for_stmt))
9406 {
9407 struct omp_context *taskreg_ctx = NULL;
9408 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9409 {
9410 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9411 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9412 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9413 {
9414 if (gimple_omp_for_combined_into_p (gfor))
9415 {
9416 gcc_assert (ctx->outer->outer
9417 && is_parallel_ctx (ctx->outer->outer));
9418 taskreg_ctx = ctx->outer->outer;
9419 }
9420 else
9421 {
9422 struct omp_for_data outer_fd;
9423 omp_extract_for_data (gfor, &outer_fd, NULL);
9424 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9425 }
9426 }
9427 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9428 taskreg_ctx = ctx->outer->outer;
9429 }
9430 else if (is_taskreg_ctx (ctx->outer))
9431 taskreg_ctx = ctx->outer;
9432 if (taskreg_ctx)
9433 {
9434 int i;
9435 tree taskreg_clauses
9436 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9437 tree innerc = omp_find_clause (taskreg_clauses,
9438 OMP_CLAUSE__LOOPTEMP_);
9439 gcc_assert (innerc);
9440 for (i = 0; i < fd->collapse; i++)
9441 {
9442 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9443 OMP_CLAUSE__LOOPTEMP_);
9444 gcc_assert (innerc);
9445 }
9446 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9447 OMP_CLAUSE__LOOPTEMP_);
9448 if (innerc)
9449 n2 = fold_convert (TREE_TYPE (n2),
9450 lookup_decl (OMP_CLAUSE_DECL (innerc),
9451 taskreg_ctx));
9452 }
9453 }
9454 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9455 }
9456
9457 clauses = gimple_omp_for_clauses (fd->for_stmt);
9458 stmts = NULL;
9459 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9460 if (!gimple_seq_empty_p (stmts))
9461 {
9462 gimple_seq_add_seq (&stmts, *dlist);
9463 *dlist = stmts;
9464
9465 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9466 vinit = fd->loop.n1;
9467 if (cond_code == EQ_EXPR
9468 && tree_fits_shwi_p (fd->loop.n2)
9469 && ! integer_zerop (fd->loop.n2))
9470 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9471 else
9472 vinit = unshare_expr (vinit);
9473
9474 /* Initialize the iterator variable, so that threads that don't execute
9475 any iterations don't execute the lastprivate clauses by accident. */
9476 gimplify_assign (fd->loop.v, vinit, body_p);
9477 }
9478 }
9479
9480 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9481
9482 static tree
9483 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9484 struct walk_stmt_info *wi)
9485 {
9486 gimple *stmt = gsi_stmt (*gsi_p);
9487
9488 *handled_ops_p = true;
9489 switch (gimple_code (stmt))
9490 {
9491 WALK_SUBSTMTS;
9492
9493 case GIMPLE_OMP_FOR:
9494 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9495 && gimple_omp_for_combined_into_p (stmt))
9496 *handled_ops_p = false;
9497 break;
9498
9499 case GIMPLE_OMP_SCAN:
9500 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9501 return integer_zero_node;
9502 default:
9503 break;
9504 }
9505 return NULL;
9506 }
9507
9508 /* Helper function for lower_omp_for, add transformations for a worksharing
9509 loop with scan directives inside of it.
9510 For worksharing loop not combined with simd, transform:
9511 #pragma omp for reduction(inscan,+:r) private(i)
9512 for (i = 0; i < n; i = i + 1)
9513 {
9514 {
9515 update (r);
9516 }
9517 #pragma omp scan inclusive(r)
9518 {
9519 use (r);
9520 }
9521 }
9522
9523 into two worksharing loops + code to merge results:
9524
9525 num_threads = omp_get_num_threads ();
9526 thread_num = omp_get_thread_num ();
9527 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9528 <D.2099>:
9529 var2 = r;
9530 goto <D.2101>;
9531 <D.2100>:
9532 // For UDRs this is UDR init, or if ctors are needed, copy from
9533 // var3 that has been constructed to contain the neutral element.
9534 var2 = 0;
9535 <D.2101>:
9536 ivar = 0;
9537 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9538 // a shared array with num_threads elements and rprivb to a local array
9539 // number of elements equal to the number of (contiguous) iterations the
9540 // current thread will perform. controlb and controlp variables are
9541 // temporaries to handle deallocation of rprivb at the end of second
9542 // GOMP_FOR.
9543 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9544 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9545 for (i = 0; i < n; i = i + 1)
9546 {
9547 {
9548 // For UDRs this is UDR init or copy from var3.
9549 r = 0;
9550 // This is the input phase from user code.
9551 update (r);
9552 }
9553 {
9554 // For UDRs this is UDR merge.
9555 var2 = var2 + r;
9556 // Rather than handing it over to the user, save to local thread's
9557 // array.
9558 rprivb[ivar] = var2;
9559 // For exclusive scan, the above two statements are swapped.
9560 ivar = ivar + 1;
9561 }
9562 }
9563 // And remember the final value from this thread's into the shared
9564 // rpriva array.
9565 rpriva[(sizetype) thread_num] = var2;
9566 // If more than one thread, compute using Work-Efficient prefix sum
9567 // the inclusive parallel scan of the rpriva array.
9568 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9569 <D.2102>:
9570 GOMP_barrier ();
9571 down = 0;
9572 k = 1;
9573 num_threadsu = (unsigned int) num_threads;
9574 thread_numup1 = (unsigned int) thread_num + 1;
9575 <D.2108>:
9576 twok = k << 1;
9577 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9578 <D.2110>:
9579 down = 4294967295;
9580 k = k >> 1;
9581 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9582 <D.2112>:
9583 k = k >> 1;
9584 <D.2111>:
9585 twok = k << 1;
9586 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9587 mul = REALPART_EXPR <cplx>;
9588 ovf = IMAGPART_EXPR <cplx>;
9589 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9590 <D.2116>:
9591 andv = k & down;
9592 andvm1 = andv + 4294967295;
9593 l = mul + andvm1;
9594 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9595 <D.2120>:
9596 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9597 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9598 rpriva[l] = rpriva[l - k] + rpriva[l];
9599 <D.2117>:
9600 if (down == 0) goto <D.2121>; else goto <D.2122>;
9601 <D.2121>:
9602 k = k << 1;
9603 goto <D.2123>;
9604 <D.2122>:
9605 k = k >> 1;
9606 <D.2123>:
9607 GOMP_barrier ();
9608 if (k != 0) goto <D.2108>; else goto <D.2103>;
9609 <D.2103>:
9610 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9611 <D.2124>:
9612 // For UDRs this is UDR init or copy from var3.
9613 var2 = 0;
9614 goto <D.2126>;
9615 <D.2125>:
9616 var2 = rpriva[thread_num - 1];
9617 <D.2126>:
9618 ivar = 0;
9619 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9620 reduction(inscan,+:r) private(i)
9621 for (i = 0; i < n; i = i + 1)
9622 {
9623 {
9624 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9625 r = var2 + rprivb[ivar];
9626 }
9627 {
9628 // This is the scan phase from user code.
9629 use (r);
9630 // Plus a bump of the iterator.
9631 ivar = ivar + 1;
9632 }
9633 } */
9634
9635 static void
9636 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9637 struct omp_for_data *fd, omp_context *ctx)
9638 {
9639 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9640 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9641
9642 gimple_seq body = gimple_omp_body (stmt);
9643 gimple_stmt_iterator input1_gsi = gsi_none ();
9644 struct walk_stmt_info wi;
9645 memset (&wi, 0, sizeof (wi));
9646 wi.val_only = true;
9647 wi.info = (void *) &input1_gsi;
9648 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9649 gcc_assert (!gsi_end_p (input1_gsi));
9650
9651 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9652 gimple_stmt_iterator gsi = input1_gsi;
9653 gsi_next (&gsi);
9654 gimple_stmt_iterator scan1_gsi = gsi;
9655 gimple *scan_stmt1 = gsi_stmt (gsi);
9656 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9657
9658 gimple_seq input_body = gimple_omp_body (input_stmt1);
9659 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9660 gimple_omp_set_body (input_stmt1, NULL);
9661 gimple_omp_set_body (scan_stmt1, NULL);
9662 gimple_omp_set_body (stmt, NULL);
9663
9664 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9665 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9666 gimple_omp_set_body (stmt, body);
9667 gimple_omp_set_body (input_stmt1, input_body);
9668
9669 gimple_stmt_iterator input2_gsi = gsi_none ();
9670 memset (&wi, 0, sizeof (wi));
9671 wi.val_only = true;
9672 wi.info = (void *) &input2_gsi;
9673 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9674 gcc_assert (!gsi_end_p (input2_gsi));
9675
9676 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9677 gsi = input2_gsi;
9678 gsi_next (&gsi);
9679 gimple_stmt_iterator scan2_gsi = gsi;
9680 gimple *scan_stmt2 = gsi_stmt (gsi);
9681 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9682 gimple_omp_set_body (scan_stmt2, scan_body);
9683
9684 gimple_stmt_iterator input3_gsi = gsi_none ();
9685 gimple_stmt_iterator scan3_gsi = gsi_none ();
9686 gimple_stmt_iterator input4_gsi = gsi_none ();
9687 gimple_stmt_iterator scan4_gsi = gsi_none ();
9688 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9689 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9690 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9691 if (is_for_simd)
9692 {
9693 memset (&wi, 0, sizeof (wi));
9694 wi.val_only = true;
9695 wi.info = (void *) &input3_gsi;
9696 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9697 gcc_assert (!gsi_end_p (input3_gsi));
9698
9699 input_stmt3 = gsi_stmt (input3_gsi);
9700 gsi = input3_gsi;
9701 gsi_next (&gsi);
9702 scan3_gsi = gsi;
9703 scan_stmt3 = gsi_stmt (gsi);
9704 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9705
9706 memset (&wi, 0, sizeof (wi));
9707 wi.val_only = true;
9708 wi.info = (void *) &input4_gsi;
9709 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9710 gcc_assert (!gsi_end_p (input4_gsi));
9711
9712 input_stmt4 = gsi_stmt (input4_gsi);
9713 gsi = input4_gsi;
9714 gsi_next (&gsi);
9715 scan4_gsi = gsi;
9716 scan_stmt4 = gsi_stmt (gsi);
9717 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9718
9719 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9720 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9721 }
9722
9723 tree num_threads = create_tmp_var (integer_type_node);
9724 tree thread_num = create_tmp_var (integer_type_node);
9725 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9726 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9727 gimple *g = gimple_build_call (nthreads_decl, 0);
9728 gimple_call_set_lhs (g, num_threads);
9729 gimple_seq_add_stmt (body_p, g);
9730 g = gimple_build_call (threadnum_decl, 0);
9731 gimple_call_set_lhs (g, thread_num);
9732 gimple_seq_add_stmt (body_p, g);
9733
9734 tree ivar = create_tmp_var (sizetype);
9735 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9736 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9737 tree k = create_tmp_var (unsigned_type_node);
9738 tree l = create_tmp_var (unsigned_type_node);
9739
9740 gimple_seq clist = NULL, mdlist = NULL;
9741 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9742 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9743 gimple_seq scan1_list = NULL, input2_list = NULL;
9744 gimple_seq last_list = NULL, reduc_list = NULL;
9745 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9746 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9747 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9748 {
9749 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9750 tree var = OMP_CLAUSE_DECL (c);
9751 tree new_var = lookup_decl (var, ctx);
9752 tree var3 = NULL_TREE;
9753 tree new_vard = new_var;
9754 if (omp_is_reference (var))
9755 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9756 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9757 {
9758 var3 = maybe_lookup_decl (new_vard, ctx);
9759 if (var3 == new_vard)
9760 var3 = NULL_TREE;
9761 }
9762
9763 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9764 tree rpriva = create_tmp_var (ptype);
9765 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9766 OMP_CLAUSE_DECL (nc) = rpriva;
9767 *cp1 = nc;
9768 cp1 = &OMP_CLAUSE_CHAIN (nc);
9769
9770 tree rprivb = create_tmp_var (ptype);
9771 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9772 OMP_CLAUSE_DECL (nc) = rprivb;
9773 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9774 *cp1 = nc;
9775 cp1 = &OMP_CLAUSE_CHAIN (nc);
9776
9777 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9778 if (new_vard != new_var)
9779 TREE_ADDRESSABLE (var2) = 1;
9780 gimple_add_tmp_var (var2);
9781
9782 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9783 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9784 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9785 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9786 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9787
9788 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9789 thread_num, integer_minus_one_node);
9790 x = fold_convert_loc (clause_loc, sizetype, x);
9791 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9792 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9793 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9794 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9795
9796 x = fold_convert_loc (clause_loc, sizetype, l);
9797 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9798 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9799 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9800 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9801
9802 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9803 x = fold_convert_loc (clause_loc, sizetype, x);
9804 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9805 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9806 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9807 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9808
9809 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9810 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9811 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9812 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9813
9814 tree var4 = is_for_simd ? new_var : var2;
9815 tree var5 = NULL_TREE, var6 = NULL_TREE;
9816 if (is_for_simd)
9817 {
9818 var5 = lookup_decl (var, input_simd_ctx);
9819 var6 = lookup_decl (var, scan_simd_ctx);
9820 if (new_vard != new_var)
9821 {
9822 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9823 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9824 }
9825 }
9826 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9827 {
9828 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9829 tree val = var2;
9830
9831 x = lang_hooks.decls.omp_clause_default_ctor
9832 (c, var2, build_outer_var_ref (var, ctx));
9833 if (x)
9834 gimplify_and_add (x, &clist);
9835
9836 x = build_outer_var_ref (var, ctx);
9837 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9838 x);
9839 gimplify_and_add (x, &thr01_list);
9840
9841 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9842 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9843 if (var3)
9844 {
9845 x = unshare_expr (var4);
9846 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9847 gimplify_and_add (x, &thrn1_list);
9848 x = unshare_expr (var4);
9849 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9850 gimplify_and_add (x, &thr02_list);
9851 }
9852 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9853 {
9854 /* Otherwise, assign to it the identity element. */
9855 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9856 tseq = copy_gimple_seq_and_replace_locals (tseq);
9857 if (!is_for_simd)
9858 {
9859 if (new_vard != new_var)
9860 val = build_fold_addr_expr_loc (clause_loc, val);
9861 SET_DECL_VALUE_EXPR (new_vard, val);
9862 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9863 }
9864 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9865 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9866 lower_omp (&tseq, ctx);
9867 gimple_seq_add_seq (&thrn1_list, tseq);
9868 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9869 lower_omp (&tseq, ctx);
9870 gimple_seq_add_seq (&thr02_list, tseq);
9871 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9872 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9873 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9874 if (y)
9875 SET_DECL_VALUE_EXPR (new_vard, y);
9876 else
9877 {
9878 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9879 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9880 }
9881 }
9882
9883 x = unshare_expr (var4);
9884 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9885 gimplify_and_add (x, &thrn2_list);
9886
9887 if (is_for_simd)
9888 {
9889 x = unshare_expr (rprivb_ref);
9890 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9891 gimplify_and_add (x, &scan1_list);
9892 }
9893 else
9894 {
9895 if (ctx->scan_exclusive)
9896 {
9897 x = unshare_expr (rprivb_ref);
9898 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9899 gimplify_and_add (x, &scan1_list);
9900 }
9901
9902 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9903 tseq = copy_gimple_seq_and_replace_locals (tseq);
9904 SET_DECL_VALUE_EXPR (placeholder, var2);
9905 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9906 lower_omp (&tseq, ctx);
9907 gimple_seq_add_seq (&scan1_list, tseq);
9908
9909 if (ctx->scan_inclusive)
9910 {
9911 x = unshare_expr (rprivb_ref);
9912 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9913 gimplify_and_add (x, &scan1_list);
9914 }
9915 }
9916
9917 x = unshare_expr (rpriva_ref);
9918 x = lang_hooks.decls.omp_clause_assign_op (c, x,
9919 unshare_expr (var4));
9920 gimplify_and_add (x, &mdlist);
9921
9922 x = unshare_expr (is_for_simd ? var6 : new_var);
9923 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
9924 gimplify_and_add (x, &input2_list);
9925
9926 val = rprivb_ref;
9927 if (new_vard != new_var)
9928 val = build_fold_addr_expr_loc (clause_loc, val);
9929
9930 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9931 tseq = copy_gimple_seq_and_replace_locals (tseq);
9932 SET_DECL_VALUE_EXPR (new_vard, val);
9933 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9934 if (is_for_simd)
9935 {
9936 SET_DECL_VALUE_EXPR (placeholder, var6);
9937 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9938 }
9939 else
9940 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9941 lower_omp (&tseq, ctx);
9942 if (y)
9943 SET_DECL_VALUE_EXPR (new_vard, y);
9944 else
9945 {
9946 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9947 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9948 }
9949 if (!is_for_simd)
9950 {
9951 SET_DECL_VALUE_EXPR (placeholder, new_var);
9952 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9953 lower_omp (&tseq, ctx);
9954 }
9955 gimple_seq_add_seq (&input2_list, tseq);
9956
9957 x = build_outer_var_ref (var, ctx);
9958 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9959 gimplify_and_add (x, &last_list);
9960
9961 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9962 gimplify_and_add (x, &reduc_list);
9963 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9964 tseq = copy_gimple_seq_and_replace_locals (tseq);
9965 val = rprival_ref;
9966 if (new_vard != new_var)
9967 val = build_fold_addr_expr_loc (clause_loc, val);
9968 SET_DECL_VALUE_EXPR (new_vard, val);
9969 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9970 SET_DECL_VALUE_EXPR (placeholder, var2);
9971 lower_omp (&tseq, ctx);
9972 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9973 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9974 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9975 if (y)
9976 SET_DECL_VALUE_EXPR (new_vard, y);
9977 else
9978 {
9979 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9980 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9981 }
9982 gimple_seq_add_seq (&reduc_list, tseq);
9983 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9984 gimplify_and_add (x, &reduc_list);
9985
9986 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9987 if (x)
9988 gimplify_and_add (x, dlist);
9989 }
9990 else
9991 {
9992 x = build_outer_var_ref (var, ctx);
9993 gimplify_assign (unshare_expr (var4), x, &thr01_list);
9994
9995 x = omp_reduction_init (c, TREE_TYPE (new_var));
9996 gimplify_assign (unshare_expr (var4), unshare_expr (x),
9997 &thrn1_list);
9998 gimplify_assign (unshare_expr (var4), x, &thr02_list);
9999
10000 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10001
10002 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10003 if (code == MINUS_EXPR)
10004 code = PLUS_EXPR;
10005
10006 if (is_for_simd)
10007 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10008 else
10009 {
10010 if (ctx->scan_exclusive)
10011 gimplify_assign (unshare_expr (rprivb_ref), var2,
10012 &scan1_list);
10013 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10014 gimplify_assign (var2, x, &scan1_list);
10015 if (ctx->scan_inclusive)
10016 gimplify_assign (unshare_expr (rprivb_ref), var2,
10017 &scan1_list);
10018 }
10019
10020 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10021 &mdlist);
10022
10023 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10024 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10025
10026 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10027 &last_list);
10028
10029 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10030 unshare_expr (rprival_ref));
10031 gimplify_assign (rprival_ref, x, &reduc_list);
10032 }
10033 }
10034
10035 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10036 gimple_seq_add_stmt (&scan1_list, g);
10037 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10038 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10039 ? scan_stmt4 : scan_stmt2), g);
10040
10041 tree controlb = create_tmp_var (boolean_type_node);
10042 tree controlp = create_tmp_var (ptr_type_node);
10043 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10044 OMP_CLAUSE_DECL (nc) = controlb;
10045 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10046 *cp1 = nc;
10047 cp1 = &OMP_CLAUSE_CHAIN (nc);
10048 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10049 OMP_CLAUSE_DECL (nc) = controlp;
10050 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10051 *cp1 = nc;
10052 cp1 = &OMP_CLAUSE_CHAIN (nc);
10053 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10054 OMP_CLAUSE_DECL (nc) = controlb;
10055 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10056 *cp2 = nc;
10057 cp2 = &OMP_CLAUSE_CHAIN (nc);
10058 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10059 OMP_CLAUSE_DECL (nc) = controlp;
10060 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10061 *cp2 = nc;
10062 cp2 = &OMP_CLAUSE_CHAIN (nc);
10063
10064 *cp1 = gimple_omp_for_clauses (stmt);
10065 gimple_omp_for_set_clauses (stmt, new_clauses1);
10066 *cp2 = gimple_omp_for_clauses (new_stmt);
10067 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10068
10069 if (is_for_simd)
10070 {
10071 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10072 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10073
10074 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10075 GSI_SAME_STMT);
10076 gsi_remove (&input3_gsi, true);
10077 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10078 GSI_SAME_STMT);
10079 gsi_remove (&scan3_gsi, true);
10080 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10081 GSI_SAME_STMT);
10082 gsi_remove (&input4_gsi, true);
10083 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10084 GSI_SAME_STMT);
10085 gsi_remove (&scan4_gsi, true);
10086 }
10087 else
10088 {
10089 gimple_omp_set_body (scan_stmt1, scan1_list);
10090 gimple_omp_set_body (input_stmt2, input2_list);
10091 }
10092
10093 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10094 GSI_SAME_STMT);
10095 gsi_remove (&input1_gsi, true);
10096 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10097 GSI_SAME_STMT);
10098 gsi_remove (&scan1_gsi, true);
10099 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10100 GSI_SAME_STMT);
10101 gsi_remove (&input2_gsi, true);
10102 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10103 GSI_SAME_STMT);
10104 gsi_remove (&scan2_gsi, true);
10105
10106 gimple_seq_add_seq (body_p, clist);
10107
10108 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10109 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10110 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10111 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10112 gimple_seq_add_stmt (body_p, g);
10113 g = gimple_build_label (lab1);
10114 gimple_seq_add_stmt (body_p, g);
10115 gimple_seq_add_seq (body_p, thr01_list);
10116 g = gimple_build_goto (lab3);
10117 gimple_seq_add_stmt (body_p, g);
10118 g = gimple_build_label (lab2);
10119 gimple_seq_add_stmt (body_p, g);
10120 gimple_seq_add_seq (body_p, thrn1_list);
10121 g = gimple_build_label (lab3);
10122 gimple_seq_add_stmt (body_p, g);
10123
10124 g = gimple_build_assign (ivar, size_zero_node);
10125 gimple_seq_add_stmt (body_p, g);
10126
10127 gimple_seq_add_stmt (body_p, stmt);
10128 gimple_seq_add_seq (body_p, body);
10129 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10130 fd->loop.v));
10131
10132 g = gimple_build_omp_return (true);
10133 gimple_seq_add_stmt (body_p, g);
10134 gimple_seq_add_seq (body_p, mdlist);
10135
10136 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10137 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10138 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10139 gimple_seq_add_stmt (body_p, g);
10140 g = gimple_build_label (lab1);
10141 gimple_seq_add_stmt (body_p, g);
10142
10143 g = omp_build_barrier (NULL);
10144 gimple_seq_add_stmt (body_p, g);
10145
10146 tree down = create_tmp_var (unsigned_type_node);
10147 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10148 gimple_seq_add_stmt (body_p, g);
10149
10150 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10151 gimple_seq_add_stmt (body_p, g);
10152
10153 tree num_threadsu = create_tmp_var (unsigned_type_node);
10154 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10155 gimple_seq_add_stmt (body_p, g);
10156
10157 tree thread_numu = create_tmp_var (unsigned_type_node);
10158 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10159 gimple_seq_add_stmt (body_p, g);
10160
10161 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10162 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10163 build_int_cst (unsigned_type_node, 1));
10164 gimple_seq_add_stmt (body_p, g);
10165
10166 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10167 g = gimple_build_label (lab3);
10168 gimple_seq_add_stmt (body_p, g);
10169
10170 tree twok = create_tmp_var (unsigned_type_node);
10171 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10172 gimple_seq_add_stmt (body_p, g);
10173
10174 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10175 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10176 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10177 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10178 gimple_seq_add_stmt (body_p, g);
10179 g = gimple_build_label (lab4);
10180 gimple_seq_add_stmt (body_p, g);
10181 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10182 gimple_seq_add_stmt (body_p, g);
10183 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10184 gimple_seq_add_stmt (body_p, g);
10185
10186 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10187 gimple_seq_add_stmt (body_p, g);
10188 g = gimple_build_label (lab6);
10189 gimple_seq_add_stmt (body_p, g);
10190
10191 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10192 gimple_seq_add_stmt (body_p, g);
10193
10194 g = gimple_build_label (lab5);
10195 gimple_seq_add_stmt (body_p, g);
10196
10197 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10198 gimple_seq_add_stmt (body_p, g);
10199
10200 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10201 DECL_GIMPLE_REG_P (cplx) = 1;
10202 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10203 gimple_call_set_lhs (g, cplx);
10204 gimple_seq_add_stmt (body_p, g);
10205 tree mul = create_tmp_var (unsigned_type_node);
10206 g = gimple_build_assign (mul, REALPART_EXPR,
10207 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10208 gimple_seq_add_stmt (body_p, g);
10209 tree ovf = create_tmp_var (unsigned_type_node);
10210 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10211 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10212 gimple_seq_add_stmt (body_p, g);
10213
10214 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10215 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10216 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10217 lab7, lab8);
10218 gimple_seq_add_stmt (body_p, g);
10219 g = gimple_build_label (lab7);
10220 gimple_seq_add_stmt (body_p, g);
10221
10222 tree andv = create_tmp_var (unsigned_type_node);
10223 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10224 gimple_seq_add_stmt (body_p, g);
10225 tree andvm1 = create_tmp_var (unsigned_type_node);
10226 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10227 build_minus_one_cst (unsigned_type_node));
10228 gimple_seq_add_stmt (body_p, g);
10229
10230 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10231 gimple_seq_add_stmt (body_p, g);
10232
10233 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10234 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10235 gimple_seq_add_stmt (body_p, g);
10236 g = gimple_build_label (lab9);
10237 gimple_seq_add_stmt (body_p, g);
10238 gimple_seq_add_seq (body_p, reduc_list);
10239 g = gimple_build_label (lab8);
10240 gimple_seq_add_stmt (body_p, g);
10241
10242 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10243 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10244 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10245 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10246 lab10, lab11);
10247 gimple_seq_add_stmt (body_p, g);
10248 g = gimple_build_label (lab10);
10249 gimple_seq_add_stmt (body_p, g);
10250 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10251 gimple_seq_add_stmt (body_p, g);
10252 g = gimple_build_goto (lab12);
10253 gimple_seq_add_stmt (body_p, g);
10254 g = gimple_build_label (lab11);
10255 gimple_seq_add_stmt (body_p, g);
10256 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10257 gimple_seq_add_stmt (body_p, g);
10258 g = gimple_build_label (lab12);
10259 gimple_seq_add_stmt (body_p, g);
10260
10261 g = omp_build_barrier (NULL);
10262 gimple_seq_add_stmt (body_p, g);
10263
10264 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10265 lab3, lab2);
10266 gimple_seq_add_stmt (body_p, g);
10267
10268 g = gimple_build_label (lab2);
10269 gimple_seq_add_stmt (body_p, g);
10270
10271 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10272 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10273 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10274 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10275 gimple_seq_add_stmt (body_p, g);
10276 g = gimple_build_label (lab1);
10277 gimple_seq_add_stmt (body_p, g);
10278 gimple_seq_add_seq (body_p, thr02_list);
10279 g = gimple_build_goto (lab3);
10280 gimple_seq_add_stmt (body_p, g);
10281 g = gimple_build_label (lab2);
10282 gimple_seq_add_stmt (body_p, g);
10283 gimple_seq_add_seq (body_p, thrn2_list);
10284 g = gimple_build_label (lab3);
10285 gimple_seq_add_stmt (body_p, g);
10286
10287 g = gimple_build_assign (ivar, size_zero_node);
10288 gimple_seq_add_stmt (body_p, g);
10289 gimple_seq_add_stmt (body_p, new_stmt);
10290 gimple_seq_add_seq (body_p, new_body);
10291
10292 gimple_seq new_dlist = NULL;
10293 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10294 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10295 tree num_threadsm1 = create_tmp_var (integer_type_node);
10296 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10297 integer_minus_one_node);
10298 gimple_seq_add_stmt (&new_dlist, g);
10299 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10300 gimple_seq_add_stmt (&new_dlist, g);
10301 g = gimple_build_label (lab1);
10302 gimple_seq_add_stmt (&new_dlist, g);
10303 gimple_seq_add_seq (&new_dlist, last_list);
10304 g = gimple_build_label (lab2);
10305 gimple_seq_add_stmt (&new_dlist, g);
10306 gimple_seq_add_seq (&new_dlist, *dlist);
10307 *dlist = new_dlist;
10308 }
10309
10310 /* Lower code for an OMP loop directive. */
10311
10312 static void
10313 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10314 {
10315 tree *rhs_p, block;
10316 struct omp_for_data fd, *fdp = NULL;
10317 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10318 gbind *new_stmt;
10319 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10320 gimple_seq cnt_list = NULL, clist = NULL;
10321 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10322 size_t i;
10323
10324 push_gimplify_context ();
10325
10326 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10327
10328 block = make_node (BLOCK);
10329 new_stmt = gimple_build_bind (NULL, NULL, block);
10330 /* Replace at gsi right away, so that 'stmt' is no member
10331 of a sequence anymore as we're going to add to a different
10332 one below. */
10333 gsi_replace (gsi_p, new_stmt, true);
10334
10335 /* Move declaration of temporaries in the loop body before we make
10336 it go away. */
10337 omp_for_body = gimple_omp_body (stmt);
10338 if (!gimple_seq_empty_p (omp_for_body)
10339 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10340 {
10341 gbind *inner_bind
10342 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10343 tree vars = gimple_bind_vars (inner_bind);
10344 gimple_bind_append_vars (new_stmt, vars);
10345 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10346 keep them on the inner_bind and it's block. */
10347 gimple_bind_set_vars (inner_bind, NULL_TREE);
10348 if (gimple_bind_block (inner_bind))
10349 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10350 }
10351
10352 if (gimple_omp_for_combined_into_p (stmt))
10353 {
10354 omp_extract_for_data (stmt, &fd, NULL);
10355 fdp = &fd;
10356
10357 /* We need two temporaries with fd.loop.v type (istart/iend)
10358 and then (fd.collapse - 1) temporaries with the same
10359 type for count2 ... countN-1 vars if not constant. */
10360 size_t count = 2;
10361 tree type = fd.iter_type;
10362 if (fd.collapse > 1
10363 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10364 count += fd.collapse - 1;
10365 bool taskreg_for
10366 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10367 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10368 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10369 tree simtc = NULL;
10370 tree clauses = *pc;
10371 if (taskreg_for)
10372 outerc
10373 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10374 OMP_CLAUSE__LOOPTEMP_);
10375 if (ctx->simt_stmt)
10376 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10377 OMP_CLAUSE__LOOPTEMP_);
10378 for (i = 0; i < count; i++)
10379 {
10380 tree temp;
10381 if (taskreg_for)
10382 {
10383 gcc_assert (outerc);
10384 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10385 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10386 OMP_CLAUSE__LOOPTEMP_);
10387 }
10388 else
10389 {
10390 /* If there are 2 adjacent SIMD stmts, one with _simt_
10391 clause, another without, make sure they have the same
10392 decls in _looptemp_ clauses, because the outer stmt
10393 they are combined into will look up just one inner_stmt. */
10394 if (ctx->simt_stmt)
10395 temp = OMP_CLAUSE_DECL (simtc);
10396 else
10397 temp = create_tmp_var (type);
10398 insert_decl_map (&ctx->outer->cb, temp, temp);
10399 }
10400 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10401 OMP_CLAUSE_DECL (*pc) = temp;
10402 pc = &OMP_CLAUSE_CHAIN (*pc);
10403 if (ctx->simt_stmt)
10404 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10405 OMP_CLAUSE__LOOPTEMP_);
10406 }
10407 *pc = clauses;
10408 }
10409
10410 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10411 dlist = NULL;
10412 body = NULL;
10413 tree rclauses
10414 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10415 OMP_CLAUSE_REDUCTION);
10416 tree rtmp = NULL_TREE;
10417 if (rclauses)
10418 {
10419 tree type = build_pointer_type (pointer_sized_int_node);
10420 tree temp = create_tmp_var (type);
10421 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10422 OMP_CLAUSE_DECL (c) = temp;
10423 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10424 gimple_omp_for_set_clauses (stmt, c);
10425 lower_omp_task_reductions (ctx, OMP_FOR,
10426 gimple_omp_for_clauses (stmt),
10427 &tred_ilist, &tred_dlist);
10428 rclauses = c;
10429 rtmp = make_ssa_name (type);
10430 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10431 }
10432
10433 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10434 ctx);
10435
10436 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10437 fdp);
10438 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10439 gimple_omp_for_pre_body (stmt));
10440
10441 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10442
10443 /* Lower the header expressions. At this point, we can assume that
10444 the header is of the form:
10445
10446 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10447
10448 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10449 using the .omp_data_s mapping, if needed. */
10450 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10451 {
10452 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10453 if (!is_gimple_min_invariant (*rhs_p))
10454 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10455 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10456 recompute_tree_invariant_for_addr_expr (*rhs_p);
10457
10458 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10459 if (!is_gimple_min_invariant (*rhs_p))
10460 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10461 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10462 recompute_tree_invariant_for_addr_expr (*rhs_p);
10463
10464 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10465 if (!is_gimple_min_invariant (*rhs_p))
10466 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10467 }
10468 if (rclauses)
10469 gimple_seq_add_seq (&tred_ilist, cnt_list);
10470 else
10471 gimple_seq_add_seq (&body, cnt_list);
10472
10473 /* Once lowered, extract the bounds and clauses. */
10474 omp_extract_for_data (stmt, &fd, NULL);
10475
10476 if (is_gimple_omp_oacc (ctx->stmt)
10477 && !ctx_in_oacc_kernels_region (ctx))
10478 lower_oacc_head_tail (gimple_location (stmt),
10479 gimple_omp_for_clauses (stmt),
10480 &oacc_head, &oacc_tail, ctx);
10481
10482 /* Add OpenACC partitioning and reduction markers just before the loop. */
10483 if (oacc_head)
10484 gimple_seq_add_seq (&body, oacc_head);
10485
10486 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10487
10488 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10489 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10490 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10491 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10492 {
10493 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10494 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10495 OMP_CLAUSE_LINEAR_STEP (c)
10496 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10497 ctx);
10498 }
10499
10500 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10501 && gimple_omp_for_grid_phony (stmt));
10502 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10503 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10504 {
10505 gcc_assert (!phony_loop);
10506 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10507 }
10508 else
10509 {
10510 if (!phony_loop)
10511 gimple_seq_add_stmt (&body, stmt);
10512 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10513 }
10514
10515 if (!phony_loop)
10516 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10517 fd.loop.v));
10518
10519 /* After the loop, add exit clauses. */
10520 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10521
10522 if (clist)
10523 {
10524 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10525 gcall *g = gimple_build_call (fndecl, 0);
10526 gimple_seq_add_stmt (&body, g);
10527 gimple_seq_add_seq (&body, clist);
10528 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10529 g = gimple_build_call (fndecl, 0);
10530 gimple_seq_add_stmt (&body, g);
10531 }
10532
10533 if (ctx->cancellable)
10534 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10535
10536 gimple_seq_add_seq (&body, dlist);
10537
10538 if (rclauses)
10539 {
10540 gimple_seq_add_seq (&tred_ilist, body);
10541 body = tred_ilist;
10542 }
10543
10544 body = maybe_catch_exception (body);
10545
10546 if (!phony_loop)
10547 {
10548 /* Region exit marker goes at the end of the loop body. */
10549 gimple *g = gimple_build_omp_return (fd.have_nowait);
10550 gimple_seq_add_stmt (&body, g);
10551
10552 gimple_seq_add_seq (&body, tred_dlist);
10553
10554 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10555
10556 if (rclauses)
10557 OMP_CLAUSE_DECL (rclauses) = rtmp;
10558 }
10559
10560 /* Add OpenACC joining and reduction markers just after the loop. */
10561 if (oacc_tail)
10562 gimple_seq_add_seq (&body, oacc_tail);
10563
10564 pop_gimplify_context (new_stmt);
10565
10566 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10567 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10568 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10569 if (BLOCK_VARS (block))
10570 TREE_USED (block) = 1;
10571
10572 gimple_bind_set_body (new_stmt, body);
10573 gimple_omp_set_body (stmt, NULL);
10574 gimple_omp_for_set_pre_body (stmt, NULL);
10575 }
10576
10577 /* Callback for walk_stmts. Check if the current statement only contains
10578 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10579
10580 static tree
10581 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10582 bool *handled_ops_p,
10583 struct walk_stmt_info *wi)
10584 {
10585 int *info = (int *) wi->info;
10586 gimple *stmt = gsi_stmt (*gsi_p);
10587
10588 *handled_ops_p = true;
10589 switch (gimple_code (stmt))
10590 {
10591 WALK_SUBSTMTS;
10592
10593 case GIMPLE_DEBUG:
10594 break;
10595 case GIMPLE_OMP_FOR:
10596 case GIMPLE_OMP_SECTIONS:
10597 *info = *info == 0 ? 1 : -1;
10598 break;
10599 default:
10600 *info = -1;
10601 break;
10602 }
10603 return NULL;
10604 }
10605
10606 struct omp_taskcopy_context
10607 {
10608 /* This field must be at the beginning, as we do "inheritance": Some
10609 callback functions for tree-inline.c (e.g., omp_copy_decl)
10610 receive a copy_body_data pointer that is up-casted to an
10611 omp_context pointer. */
10612 copy_body_data cb;
10613 omp_context *ctx;
10614 };
10615
10616 static tree
10617 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10618 {
10619 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10620
10621 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10622 return create_tmp_var (TREE_TYPE (var));
10623
10624 return var;
10625 }
10626
10627 static tree
10628 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10629 {
10630 tree name, new_fields = NULL, type, f;
10631
10632 type = lang_hooks.types.make_type (RECORD_TYPE);
10633 name = DECL_NAME (TYPE_NAME (orig_type));
10634 name = build_decl (gimple_location (tcctx->ctx->stmt),
10635 TYPE_DECL, name, type);
10636 TYPE_NAME (type) = name;
10637
10638 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10639 {
10640 tree new_f = copy_node (f);
10641 DECL_CONTEXT (new_f) = type;
10642 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10643 TREE_CHAIN (new_f) = new_fields;
10644 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10645 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10646 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10647 &tcctx->cb, NULL);
10648 new_fields = new_f;
10649 tcctx->cb.decl_map->put (f, new_f);
10650 }
10651 TYPE_FIELDS (type) = nreverse (new_fields);
10652 layout_type (type);
10653 return type;
10654 }
10655
10656 /* Create task copyfn. */
10657
10658 static void
10659 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10660 {
10661 struct function *child_cfun;
10662 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10663 tree record_type, srecord_type, bind, list;
10664 bool record_needs_remap = false, srecord_needs_remap = false;
10665 splay_tree_node n;
10666 struct omp_taskcopy_context tcctx;
10667 location_t loc = gimple_location (task_stmt);
10668 size_t looptempno = 0;
10669
10670 child_fn = gimple_omp_task_copy_fn (task_stmt);
10671 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10672 gcc_assert (child_cfun->cfg == NULL);
10673 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10674
10675 /* Reset DECL_CONTEXT on function arguments. */
10676 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10677 DECL_CONTEXT (t) = child_fn;
10678
10679 /* Populate the function. */
10680 push_gimplify_context ();
10681 push_cfun (child_cfun);
10682
10683 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10684 TREE_SIDE_EFFECTS (bind) = 1;
10685 list = NULL;
10686 DECL_SAVED_TREE (child_fn) = bind;
10687 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10688
10689 /* Remap src and dst argument types if needed. */
10690 record_type = ctx->record_type;
10691 srecord_type = ctx->srecord_type;
10692 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10693 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10694 {
10695 record_needs_remap = true;
10696 break;
10697 }
10698 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10699 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10700 {
10701 srecord_needs_remap = true;
10702 break;
10703 }
10704
10705 if (record_needs_remap || srecord_needs_remap)
10706 {
10707 memset (&tcctx, '\0', sizeof (tcctx));
10708 tcctx.cb.src_fn = ctx->cb.src_fn;
10709 tcctx.cb.dst_fn = child_fn;
10710 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10711 gcc_checking_assert (tcctx.cb.src_node);
10712 tcctx.cb.dst_node = tcctx.cb.src_node;
10713 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10714 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10715 tcctx.cb.eh_lp_nr = 0;
10716 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10717 tcctx.cb.decl_map = new hash_map<tree, tree>;
10718 tcctx.ctx = ctx;
10719
10720 if (record_needs_remap)
10721 record_type = task_copyfn_remap_type (&tcctx, record_type);
10722 if (srecord_needs_remap)
10723 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10724 }
10725 else
10726 tcctx.cb.decl_map = NULL;
10727
10728 arg = DECL_ARGUMENTS (child_fn);
10729 TREE_TYPE (arg) = build_pointer_type (record_type);
10730 sarg = DECL_CHAIN (arg);
10731 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10732
10733 /* First pass: initialize temporaries used in record_type and srecord_type
10734 sizes and field offsets. */
10735 if (tcctx.cb.decl_map)
10736 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10737 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10738 {
10739 tree *p;
10740
10741 decl = OMP_CLAUSE_DECL (c);
10742 p = tcctx.cb.decl_map->get (decl);
10743 if (p == NULL)
10744 continue;
10745 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10746 sf = (tree) n->value;
10747 sf = *tcctx.cb.decl_map->get (sf);
10748 src = build_simple_mem_ref_loc (loc, sarg);
10749 src = omp_build_component_ref (src, sf);
10750 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10751 append_to_statement_list (t, &list);
10752 }
10753
10754 /* Second pass: copy shared var pointers and copy construct non-VLA
10755 firstprivate vars. */
10756 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10757 switch (OMP_CLAUSE_CODE (c))
10758 {
10759 splay_tree_key key;
10760 case OMP_CLAUSE_SHARED:
10761 decl = OMP_CLAUSE_DECL (c);
10762 key = (splay_tree_key) decl;
10763 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10764 key = (splay_tree_key) &DECL_UID (decl);
10765 n = splay_tree_lookup (ctx->field_map, key);
10766 if (n == NULL)
10767 break;
10768 f = (tree) n->value;
10769 if (tcctx.cb.decl_map)
10770 f = *tcctx.cb.decl_map->get (f);
10771 n = splay_tree_lookup (ctx->sfield_map, key);
10772 sf = (tree) n->value;
10773 if (tcctx.cb.decl_map)
10774 sf = *tcctx.cb.decl_map->get (sf);
10775 src = build_simple_mem_ref_loc (loc, sarg);
10776 src = omp_build_component_ref (src, sf);
10777 dst = build_simple_mem_ref_loc (loc, arg);
10778 dst = omp_build_component_ref (dst, f);
10779 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10780 append_to_statement_list (t, &list);
10781 break;
10782 case OMP_CLAUSE_REDUCTION:
10783 case OMP_CLAUSE_IN_REDUCTION:
10784 decl = OMP_CLAUSE_DECL (c);
10785 if (TREE_CODE (decl) == MEM_REF)
10786 {
10787 decl = TREE_OPERAND (decl, 0);
10788 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10789 decl = TREE_OPERAND (decl, 0);
10790 if (TREE_CODE (decl) == INDIRECT_REF
10791 || TREE_CODE (decl) == ADDR_EXPR)
10792 decl = TREE_OPERAND (decl, 0);
10793 }
10794 key = (splay_tree_key) decl;
10795 n = splay_tree_lookup (ctx->field_map, key);
10796 if (n == NULL)
10797 break;
10798 f = (tree) n->value;
10799 if (tcctx.cb.decl_map)
10800 f = *tcctx.cb.decl_map->get (f);
10801 n = splay_tree_lookup (ctx->sfield_map, key);
10802 sf = (tree) n->value;
10803 if (tcctx.cb.decl_map)
10804 sf = *tcctx.cb.decl_map->get (sf);
10805 src = build_simple_mem_ref_loc (loc, sarg);
10806 src = omp_build_component_ref (src, sf);
10807 if (decl != OMP_CLAUSE_DECL (c)
10808 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10809 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10810 src = build_simple_mem_ref_loc (loc, src);
10811 dst = build_simple_mem_ref_loc (loc, arg);
10812 dst = omp_build_component_ref (dst, f);
10813 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10814 append_to_statement_list (t, &list);
10815 break;
10816 case OMP_CLAUSE__LOOPTEMP_:
10817 /* Fields for first two _looptemp_ clauses are initialized by
10818 GOMP_taskloop*, the rest are handled like firstprivate. */
10819 if (looptempno < 2)
10820 {
10821 looptempno++;
10822 break;
10823 }
10824 /* FALLTHRU */
10825 case OMP_CLAUSE__REDUCTEMP_:
10826 case OMP_CLAUSE_FIRSTPRIVATE:
10827 decl = OMP_CLAUSE_DECL (c);
10828 if (is_variable_sized (decl))
10829 break;
10830 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10831 if (n == NULL)
10832 break;
10833 f = (tree) n->value;
10834 if (tcctx.cb.decl_map)
10835 f = *tcctx.cb.decl_map->get (f);
10836 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10837 if (n != NULL)
10838 {
10839 sf = (tree) n->value;
10840 if (tcctx.cb.decl_map)
10841 sf = *tcctx.cb.decl_map->get (sf);
10842 src = build_simple_mem_ref_loc (loc, sarg);
10843 src = omp_build_component_ref (src, sf);
10844 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10845 src = build_simple_mem_ref_loc (loc, src);
10846 }
10847 else
10848 src = decl;
10849 dst = build_simple_mem_ref_loc (loc, arg);
10850 dst = omp_build_component_ref (dst, f);
10851 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10852 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10853 else
10854 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10855 append_to_statement_list (t, &list);
10856 break;
10857 case OMP_CLAUSE_PRIVATE:
10858 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10859 break;
10860 decl = OMP_CLAUSE_DECL (c);
10861 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10862 f = (tree) n->value;
10863 if (tcctx.cb.decl_map)
10864 f = *tcctx.cb.decl_map->get (f);
10865 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10866 if (n != NULL)
10867 {
10868 sf = (tree) n->value;
10869 if (tcctx.cb.decl_map)
10870 sf = *tcctx.cb.decl_map->get (sf);
10871 src = build_simple_mem_ref_loc (loc, sarg);
10872 src = omp_build_component_ref (src, sf);
10873 if (use_pointer_for_field (decl, NULL))
10874 src = build_simple_mem_ref_loc (loc, src);
10875 }
10876 else
10877 src = decl;
10878 dst = build_simple_mem_ref_loc (loc, arg);
10879 dst = omp_build_component_ref (dst, f);
10880 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10881 append_to_statement_list (t, &list);
10882 break;
10883 default:
10884 break;
10885 }
10886
10887 /* Last pass: handle VLA firstprivates. */
10888 if (tcctx.cb.decl_map)
10889 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10890 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10891 {
10892 tree ind, ptr, df;
10893
10894 decl = OMP_CLAUSE_DECL (c);
10895 if (!is_variable_sized (decl))
10896 continue;
10897 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10898 if (n == NULL)
10899 continue;
10900 f = (tree) n->value;
10901 f = *tcctx.cb.decl_map->get (f);
10902 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10903 ind = DECL_VALUE_EXPR (decl);
10904 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10905 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10906 n = splay_tree_lookup (ctx->sfield_map,
10907 (splay_tree_key) TREE_OPERAND (ind, 0));
10908 sf = (tree) n->value;
10909 sf = *tcctx.cb.decl_map->get (sf);
10910 src = build_simple_mem_ref_loc (loc, sarg);
10911 src = omp_build_component_ref (src, sf);
10912 src = build_simple_mem_ref_loc (loc, src);
10913 dst = build_simple_mem_ref_loc (loc, arg);
10914 dst = omp_build_component_ref (dst, f);
10915 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10916 append_to_statement_list (t, &list);
10917 n = splay_tree_lookup (ctx->field_map,
10918 (splay_tree_key) TREE_OPERAND (ind, 0));
10919 df = (tree) n->value;
10920 df = *tcctx.cb.decl_map->get (df);
10921 ptr = build_simple_mem_ref_loc (loc, arg);
10922 ptr = omp_build_component_ref (ptr, df);
10923 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10924 build_fold_addr_expr_loc (loc, dst));
10925 append_to_statement_list (t, &list);
10926 }
10927
10928 t = build1 (RETURN_EXPR, void_type_node, NULL);
10929 append_to_statement_list (t, &list);
10930
10931 if (tcctx.cb.decl_map)
10932 delete tcctx.cb.decl_map;
10933 pop_gimplify_context (NULL);
10934 BIND_EXPR_BODY (bind) = list;
10935 pop_cfun ();
10936 }
10937
10938 static void
10939 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10940 {
10941 tree c, clauses;
10942 gimple *g;
10943 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10944
10945 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10946 gcc_assert (clauses);
10947 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10948 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10949 switch (OMP_CLAUSE_DEPEND_KIND (c))
10950 {
10951 case OMP_CLAUSE_DEPEND_LAST:
10952 /* Lowering already done at gimplification. */
10953 return;
10954 case OMP_CLAUSE_DEPEND_IN:
10955 cnt[2]++;
10956 break;
10957 case OMP_CLAUSE_DEPEND_OUT:
10958 case OMP_CLAUSE_DEPEND_INOUT:
10959 cnt[0]++;
10960 break;
10961 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10962 cnt[1]++;
10963 break;
10964 case OMP_CLAUSE_DEPEND_DEPOBJ:
10965 cnt[3]++;
10966 break;
10967 case OMP_CLAUSE_DEPEND_SOURCE:
10968 case OMP_CLAUSE_DEPEND_SINK:
10969 /* FALLTHRU */
10970 default:
10971 gcc_unreachable ();
10972 }
10973 if (cnt[1] || cnt[3])
10974 idx = 5;
10975 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10976 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10977 tree array = create_tmp_var (type);
10978 TREE_ADDRESSABLE (array) = 1;
10979 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10980 NULL_TREE);
10981 if (idx == 5)
10982 {
10983 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10984 gimple_seq_add_stmt (iseq, g);
10985 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10986 NULL_TREE);
10987 }
10988 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10989 gimple_seq_add_stmt (iseq, g);
10990 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10991 {
10992 r = build4 (ARRAY_REF, ptr_type_node, array,
10993 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10994 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10995 gimple_seq_add_stmt (iseq, g);
10996 }
10997 for (i = 0; i < 4; i++)
10998 {
10999 if (cnt[i] == 0)
11000 continue;
11001 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11002 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11003 continue;
11004 else
11005 {
11006 switch (OMP_CLAUSE_DEPEND_KIND (c))
11007 {
11008 case OMP_CLAUSE_DEPEND_IN:
11009 if (i != 2)
11010 continue;
11011 break;
11012 case OMP_CLAUSE_DEPEND_OUT:
11013 case OMP_CLAUSE_DEPEND_INOUT:
11014 if (i != 0)
11015 continue;
11016 break;
11017 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11018 if (i != 1)
11019 continue;
11020 break;
11021 case OMP_CLAUSE_DEPEND_DEPOBJ:
11022 if (i != 3)
11023 continue;
11024 break;
11025 default:
11026 gcc_unreachable ();
11027 }
11028 tree t = OMP_CLAUSE_DECL (c);
11029 t = fold_convert (ptr_type_node, t);
11030 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11031 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11032 NULL_TREE, NULL_TREE);
11033 g = gimple_build_assign (r, t);
11034 gimple_seq_add_stmt (iseq, g);
11035 }
11036 }
11037 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11038 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11039 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11040 OMP_CLAUSE_CHAIN (c) = *pclauses;
11041 *pclauses = c;
11042 tree clobber = build_clobber (type);
11043 g = gimple_build_assign (array, clobber);
11044 gimple_seq_add_stmt (oseq, g);
11045 }
11046
11047 /* Lower the OpenMP parallel or task directive in the current statement
11048 in GSI_P. CTX holds context information for the directive. */
11049
11050 static void
11051 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11052 {
11053 tree clauses;
11054 tree child_fn, t;
11055 gimple *stmt = gsi_stmt (*gsi_p);
11056 gbind *par_bind, *bind, *dep_bind = NULL;
11057 gimple_seq par_body;
11058 location_t loc = gimple_location (stmt);
11059
11060 clauses = gimple_omp_taskreg_clauses (stmt);
11061 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11062 && gimple_omp_task_taskwait_p (stmt))
11063 {
11064 par_bind = NULL;
11065 par_body = NULL;
11066 }
11067 else
11068 {
11069 par_bind
11070 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11071 par_body = gimple_bind_body (par_bind);
11072 }
11073 child_fn = ctx->cb.dst_fn;
11074 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11075 && !gimple_omp_parallel_combined_p (stmt))
11076 {
11077 struct walk_stmt_info wi;
11078 int ws_num = 0;
11079
11080 memset (&wi, 0, sizeof (wi));
11081 wi.info = &ws_num;
11082 wi.val_only = true;
11083 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11084 if (ws_num == 1)
11085 gimple_omp_parallel_set_combined_p (stmt, true);
11086 }
11087 gimple_seq dep_ilist = NULL;
11088 gimple_seq dep_olist = NULL;
11089 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11090 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11091 {
11092 push_gimplify_context ();
11093 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11094 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11095 &dep_ilist, &dep_olist);
11096 }
11097
11098 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11099 && gimple_omp_task_taskwait_p (stmt))
11100 {
11101 if (dep_bind)
11102 {
11103 gsi_replace (gsi_p, dep_bind, true);
11104 gimple_bind_add_seq (dep_bind, dep_ilist);
11105 gimple_bind_add_stmt (dep_bind, stmt);
11106 gimple_bind_add_seq (dep_bind, dep_olist);
11107 pop_gimplify_context (dep_bind);
11108 }
11109 return;
11110 }
11111
11112 if (ctx->srecord_type)
11113 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11114
11115 gimple_seq tskred_ilist = NULL;
11116 gimple_seq tskred_olist = NULL;
11117 if ((is_task_ctx (ctx)
11118 && gimple_omp_task_taskloop_p (ctx->stmt)
11119 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11120 OMP_CLAUSE_REDUCTION))
11121 || (is_parallel_ctx (ctx)
11122 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11123 OMP_CLAUSE__REDUCTEMP_)))
11124 {
11125 if (dep_bind == NULL)
11126 {
11127 push_gimplify_context ();
11128 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11129 }
11130 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11131 : OMP_PARALLEL,
11132 gimple_omp_taskreg_clauses (ctx->stmt),
11133 &tskred_ilist, &tskred_olist);
11134 }
11135
11136 push_gimplify_context ();
11137
11138 gimple_seq par_olist = NULL;
11139 gimple_seq par_ilist = NULL;
11140 gimple_seq par_rlist = NULL;
11141 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11142 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11143 if (phony_construct && ctx->record_type)
11144 {
11145 gcc_checking_assert (!ctx->receiver_decl);
11146 ctx->receiver_decl = create_tmp_var
11147 (build_reference_type (ctx->record_type), ".omp_rec");
11148 }
11149 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11150 lower_omp (&par_body, ctx);
11151 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
11152 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11153
11154 /* Declare all the variables created by mapping and the variables
11155 declared in the scope of the parallel body. */
11156 record_vars_into (ctx->block_vars, child_fn);
11157 maybe_remove_omp_member_access_dummy_vars (par_bind);
11158 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11159
11160 if (ctx->record_type)
11161 {
11162 ctx->sender_decl
11163 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11164 : ctx->record_type, ".omp_data_o");
11165 DECL_NAMELESS (ctx->sender_decl) = 1;
11166 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11167 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11168 }
11169
11170 gimple_seq olist = NULL;
11171 gimple_seq ilist = NULL;
11172 lower_send_clauses (clauses, &ilist, &olist, ctx);
11173 lower_send_shared_vars (&ilist, &olist, ctx);
11174
11175 if (ctx->record_type)
11176 {
11177 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11178 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11179 clobber));
11180 }
11181
11182 /* Once all the expansions are done, sequence all the different
11183 fragments inside gimple_omp_body. */
11184
11185 gimple_seq new_body = NULL;
11186
11187 if (ctx->record_type)
11188 {
11189 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11190 /* fixup_child_record_type might have changed receiver_decl's type. */
11191 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11192 gimple_seq_add_stmt (&new_body,
11193 gimple_build_assign (ctx->receiver_decl, t));
11194 }
11195
11196 gimple_seq_add_seq (&new_body, par_ilist);
11197 gimple_seq_add_seq (&new_body, par_body);
11198 gimple_seq_add_seq (&new_body, par_rlist);
11199 if (ctx->cancellable)
11200 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11201 gimple_seq_add_seq (&new_body, par_olist);
11202 new_body = maybe_catch_exception (new_body);
11203 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11204 gimple_seq_add_stmt (&new_body,
11205 gimple_build_omp_continue (integer_zero_node,
11206 integer_zero_node));
11207 if (!phony_construct)
11208 {
11209 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11210 gimple_omp_set_body (stmt, new_body);
11211 }
11212
11213 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11214 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11215 else
11216 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11217 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11218 gimple_bind_add_seq (bind, ilist);
11219 if (!phony_construct)
11220 gimple_bind_add_stmt (bind, stmt);
11221 else
11222 gimple_bind_add_seq (bind, new_body);
11223 gimple_bind_add_seq (bind, olist);
11224
11225 pop_gimplify_context (NULL);
11226
11227 if (dep_bind)
11228 {
11229 gimple_bind_add_seq (dep_bind, dep_ilist);
11230 gimple_bind_add_seq (dep_bind, tskred_ilist);
11231 gimple_bind_add_stmt (dep_bind, bind);
11232 gimple_bind_add_seq (dep_bind, tskred_olist);
11233 gimple_bind_add_seq (dep_bind, dep_olist);
11234 pop_gimplify_context (dep_bind);
11235 }
11236 }
11237
11238 /* Lower the GIMPLE_OMP_TARGET in the current statement
11239 in GSI_P. CTX holds context information for the directive. */
11240
11241 static void
11242 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11243 {
11244 tree clauses;
11245 tree child_fn, t, c;
11246 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11247 gbind *tgt_bind, *bind, *dep_bind = NULL;
11248 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11249 location_t loc = gimple_location (stmt);
11250 bool offloaded, data_region;
11251 unsigned int map_cnt = 0;
11252
11253 offloaded = is_gimple_omp_offloaded (stmt);
11254 switch (gimple_omp_target_kind (stmt))
11255 {
11256 case GF_OMP_TARGET_KIND_REGION:
11257 case GF_OMP_TARGET_KIND_UPDATE:
11258 case GF_OMP_TARGET_KIND_ENTER_DATA:
11259 case GF_OMP_TARGET_KIND_EXIT_DATA:
11260 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11261 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11262 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11263 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11264 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11265 data_region = false;
11266 break;
11267 case GF_OMP_TARGET_KIND_DATA:
11268 case GF_OMP_TARGET_KIND_OACC_DATA:
11269 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11270 data_region = true;
11271 break;
11272 default:
11273 gcc_unreachable ();
11274 }
11275
11276 clauses = gimple_omp_target_clauses (stmt);
11277
11278 gimple_seq dep_ilist = NULL;
11279 gimple_seq dep_olist = NULL;
11280 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11281 {
11282 push_gimplify_context ();
11283 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11284 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11285 &dep_ilist, &dep_olist);
11286 }
11287
11288 tgt_bind = NULL;
11289 tgt_body = NULL;
11290 if (offloaded)
11291 {
11292 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11293 tgt_body = gimple_bind_body (tgt_bind);
11294 }
11295 else if (data_region)
11296 tgt_body = gimple_omp_body (stmt);
11297 child_fn = ctx->cb.dst_fn;
11298
11299 push_gimplify_context ();
11300 fplist = NULL;
11301
11302 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11303 switch (OMP_CLAUSE_CODE (c))
11304 {
11305 tree var, x;
11306
11307 default:
11308 break;
11309 case OMP_CLAUSE_MAP:
11310 #if CHECKING_P
11311 /* First check what we're prepared to handle in the following. */
11312 switch (OMP_CLAUSE_MAP_KIND (c))
11313 {
11314 case GOMP_MAP_ALLOC:
11315 case GOMP_MAP_TO:
11316 case GOMP_MAP_FROM:
11317 case GOMP_MAP_TOFROM:
11318 case GOMP_MAP_POINTER:
11319 case GOMP_MAP_TO_PSET:
11320 case GOMP_MAP_DELETE:
11321 case GOMP_MAP_RELEASE:
11322 case GOMP_MAP_ALWAYS_TO:
11323 case GOMP_MAP_ALWAYS_FROM:
11324 case GOMP_MAP_ALWAYS_TOFROM:
11325 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11326 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11327 case GOMP_MAP_STRUCT:
11328 case GOMP_MAP_ALWAYS_POINTER:
11329 break;
11330 case GOMP_MAP_FORCE_ALLOC:
11331 case GOMP_MAP_FORCE_TO:
11332 case GOMP_MAP_FORCE_FROM:
11333 case GOMP_MAP_FORCE_TOFROM:
11334 case GOMP_MAP_FORCE_PRESENT:
11335 case GOMP_MAP_FORCE_DEVICEPTR:
11336 case GOMP_MAP_DEVICE_RESIDENT:
11337 case GOMP_MAP_LINK:
11338 gcc_assert (is_gimple_omp_oacc (stmt));
11339 break;
11340 default:
11341 gcc_unreachable ();
11342 }
11343 #endif
11344 /* FALLTHRU */
11345 case OMP_CLAUSE_TO:
11346 case OMP_CLAUSE_FROM:
11347 oacc_firstprivate:
11348 var = OMP_CLAUSE_DECL (c);
11349 if (!DECL_P (var))
11350 {
11351 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11352 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11353 && (OMP_CLAUSE_MAP_KIND (c)
11354 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11355 map_cnt++;
11356 continue;
11357 }
11358
11359 if (DECL_SIZE (var)
11360 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11361 {
11362 tree var2 = DECL_VALUE_EXPR (var);
11363 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11364 var2 = TREE_OPERAND (var2, 0);
11365 gcc_assert (DECL_P (var2));
11366 var = var2;
11367 }
11368
11369 if (offloaded
11370 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11371 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11372 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11373 {
11374 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11375 {
11376 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11377 && varpool_node::get_create (var)->offloadable)
11378 continue;
11379
11380 tree type = build_pointer_type (TREE_TYPE (var));
11381 tree new_var = lookup_decl (var, ctx);
11382 x = create_tmp_var_raw (type, get_name (new_var));
11383 gimple_add_tmp_var (x);
11384 x = build_simple_mem_ref (x);
11385 SET_DECL_VALUE_EXPR (new_var, x);
11386 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11387 }
11388 continue;
11389 }
11390
11391 if (!maybe_lookup_field (var, ctx))
11392 continue;
11393
11394 /* Don't remap oacc parallel reduction variables, because the
11395 intermediate result must be local to each gang. */
11396 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11397 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11398 {
11399 x = build_receiver_ref (var, true, ctx);
11400 tree new_var = lookup_decl (var, ctx);
11401
11402 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11403 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11404 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11405 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11406 x = build_simple_mem_ref (x);
11407 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11408 {
11409 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11410 if (omp_is_reference (new_var)
11411 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11412 || DECL_BY_REFERENCE (var)))
11413 {
11414 /* Create a local object to hold the instance
11415 value. */
11416 tree type = TREE_TYPE (TREE_TYPE (new_var));
11417 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11418 tree inst = create_tmp_var (type, id);
11419 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11420 x = build_fold_addr_expr (inst);
11421 }
11422 gimplify_assign (new_var, x, &fplist);
11423 }
11424 else if (DECL_P (new_var))
11425 {
11426 SET_DECL_VALUE_EXPR (new_var, x);
11427 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11428 }
11429 else
11430 gcc_unreachable ();
11431 }
11432 map_cnt++;
11433 break;
11434
11435 case OMP_CLAUSE_FIRSTPRIVATE:
11436 if (is_oacc_parallel (ctx))
11437 goto oacc_firstprivate;
11438 map_cnt++;
11439 var = OMP_CLAUSE_DECL (c);
11440 if (!omp_is_reference (var)
11441 && !is_gimple_reg_type (TREE_TYPE (var)))
11442 {
11443 tree new_var = lookup_decl (var, ctx);
11444 if (is_variable_sized (var))
11445 {
11446 tree pvar = DECL_VALUE_EXPR (var);
11447 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11448 pvar = TREE_OPERAND (pvar, 0);
11449 gcc_assert (DECL_P (pvar));
11450 tree new_pvar = lookup_decl (pvar, ctx);
11451 x = build_fold_indirect_ref (new_pvar);
11452 TREE_THIS_NOTRAP (x) = 1;
11453 }
11454 else
11455 x = build_receiver_ref (var, true, ctx);
11456 SET_DECL_VALUE_EXPR (new_var, x);
11457 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11458 }
11459 break;
11460
11461 case OMP_CLAUSE_PRIVATE:
11462 if (is_gimple_omp_oacc (ctx->stmt))
11463 break;
11464 var = OMP_CLAUSE_DECL (c);
11465 if (is_variable_sized (var))
11466 {
11467 tree new_var = lookup_decl (var, ctx);
11468 tree pvar = DECL_VALUE_EXPR (var);
11469 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11470 pvar = TREE_OPERAND (pvar, 0);
11471 gcc_assert (DECL_P (pvar));
11472 tree new_pvar = lookup_decl (pvar, ctx);
11473 x = build_fold_indirect_ref (new_pvar);
11474 TREE_THIS_NOTRAP (x) = 1;
11475 SET_DECL_VALUE_EXPR (new_var, x);
11476 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11477 }
11478 break;
11479
11480 case OMP_CLAUSE_USE_DEVICE_PTR:
11481 case OMP_CLAUSE_USE_DEVICE_ADDR:
11482 case OMP_CLAUSE_IS_DEVICE_PTR:
11483 var = OMP_CLAUSE_DECL (c);
11484 map_cnt++;
11485 if (is_variable_sized (var))
11486 {
11487 tree new_var = lookup_decl (var, ctx);
11488 tree pvar = DECL_VALUE_EXPR (var);
11489 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11490 pvar = TREE_OPERAND (pvar, 0);
11491 gcc_assert (DECL_P (pvar));
11492 tree new_pvar = lookup_decl (pvar, ctx);
11493 x = build_fold_indirect_ref (new_pvar);
11494 TREE_THIS_NOTRAP (x) = 1;
11495 SET_DECL_VALUE_EXPR (new_var, x);
11496 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11497 }
11498 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11499 && !omp_is_reference (var)
11500 && !omp_is_allocatable_or_ptr (var)
11501 && !lang_hooks.decls.omp_array_data (var, true))
11502 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11503 {
11504 tree new_var = lookup_decl (var, ctx);
11505 tree type = build_pointer_type (TREE_TYPE (var));
11506 x = create_tmp_var_raw (type, get_name (new_var));
11507 gimple_add_tmp_var (x);
11508 x = build_simple_mem_ref (x);
11509 SET_DECL_VALUE_EXPR (new_var, x);
11510 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11511 }
11512 else
11513 {
11514 tree new_var = lookup_decl (var, ctx);
11515 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11516 gimple_add_tmp_var (x);
11517 SET_DECL_VALUE_EXPR (new_var, x);
11518 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11519 }
11520 break;
11521 }
11522
11523 if (offloaded)
11524 {
11525 target_nesting_level++;
11526 lower_omp (&tgt_body, ctx);
11527 target_nesting_level--;
11528 }
11529 else if (data_region)
11530 lower_omp (&tgt_body, ctx);
11531
11532 if (offloaded)
11533 {
11534 /* Declare all the variables created by mapping and the variables
11535 declared in the scope of the target body. */
11536 record_vars_into (ctx->block_vars, child_fn);
11537 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11538 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11539 }
11540
11541 olist = NULL;
11542 ilist = NULL;
11543 if (ctx->record_type)
11544 {
11545 ctx->sender_decl
11546 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11547 DECL_NAMELESS (ctx->sender_decl) = 1;
11548 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11549 t = make_tree_vec (3);
11550 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11551 TREE_VEC_ELT (t, 1)
11552 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11553 ".omp_data_sizes");
11554 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11555 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11556 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11557 tree tkind_type = short_unsigned_type_node;
11558 int talign_shift = 8;
11559 TREE_VEC_ELT (t, 2)
11560 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11561 ".omp_data_kinds");
11562 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11563 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11564 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11565 gimple_omp_target_set_data_arg (stmt, t);
11566
11567 vec<constructor_elt, va_gc> *vsize;
11568 vec<constructor_elt, va_gc> *vkind;
11569 vec_alloc (vsize, map_cnt);
11570 vec_alloc (vkind, map_cnt);
11571 unsigned int map_idx = 0;
11572
11573 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11574 switch (OMP_CLAUSE_CODE (c))
11575 {
11576 tree ovar, nc, s, purpose, var, x, type;
11577 unsigned int talign;
11578
11579 default:
11580 break;
11581
11582 case OMP_CLAUSE_MAP:
11583 case OMP_CLAUSE_TO:
11584 case OMP_CLAUSE_FROM:
11585 oacc_firstprivate_map:
11586 nc = c;
11587 ovar = OMP_CLAUSE_DECL (c);
11588 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11589 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11590 || (OMP_CLAUSE_MAP_KIND (c)
11591 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11592 break;
11593 if (!DECL_P (ovar))
11594 {
11595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11596 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11597 {
11598 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11599 == get_base_address (ovar));
11600 nc = OMP_CLAUSE_CHAIN (c);
11601 ovar = OMP_CLAUSE_DECL (nc);
11602 }
11603 else
11604 {
11605 tree x = build_sender_ref (ovar, ctx);
11606 tree v
11607 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11608 gimplify_assign (x, v, &ilist);
11609 nc = NULL_TREE;
11610 }
11611 }
11612 else
11613 {
11614 if (DECL_SIZE (ovar)
11615 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11616 {
11617 tree ovar2 = DECL_VALUE_EXPR (ovar);
11618 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11619 ovar2 = TREE_OPERAND (ovar2, 0);
11620 gcc_assert (DECL_P (ovar2));
11621 ovar = ovar2;
11622 }
11623 if (!maybe_lookup_field (ovar, ctx))
11624 continue;
11625 }
11626
11627 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11628 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11629 talign = DECL_ALIGN_UNIT (ovar);
11630 if (nc)
11631 {
11632 var = lookup_decl_in_outer_ctx (ovar, ctx);
11633 x = build_sender_ref (ovar, ctx);
11634
11635 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11636 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11637 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11638 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11639 {
11640 gcc_assert (offloaded);
11641 tree avar
11642 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11643 mark_addressable (avar);
11644 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11645 talign = DECL_ALIGN_UNIT (avar);
11646 avar = build_fold_addr_expr (avar);
11647 gimplify_assign (x, avar, &ilist);
11648 }
11649 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11650 {
11651 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11652 if (!omp_is_reference (var))
11653 {
11654 if (is_gimple_reg (var)
11655 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11656 TREE_NO_WARNING (var) = 1;
11657 var = build_fold_addr_expr (var);
11658 }
11659 else
11660 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11661 gimplify_assign (x, var, &ilist);
11662 }
11663 else if (is_gimple_reg (var))
11664 {
11665 gcc_assert (offloaded);
11666 tree avar = create_tmp_var (TREE_TYPE (var));
11667 mark_addressable (avar);
11668 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11669 if (GOMP_MAP_COPY_TO_P (map_kind)
11670 || map_kind == GOMP_MAP_POINTER
11671 || map_kind == GOMP_MAP_TO_PSET
11672 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11673 {
11674 /* If we need to initialize a temporary
11675 with VAR because it is not addressable, and
11676 the variable hasn't been initialized yet, then
11677 we'll get a warning for the store to avar.
11678 Don't warn in that case, the mapping might
11679 be implicit. */
11680 TREE_NO_WARNING (var) = 1;
11681 gimplify_assign (avar, var, &ilist);
11682 }
11683 avar = build_fold_addr_expr (avar);
11684 gimplify_assign (x, avar, &ilist);
11685 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11686 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11687 && !TYPE_READONLY (TREE_TYPE (var)))
11688 {
11689 x = unshare_expr (x);
11690 x = build_simple_mem_ref (x);
11691 gimplify_assign (var, x, &olist);
11692 }
11693 }
11694 else
11695 {
11696 /* While MAP is handled explicitly by the FE,
11697 for 'target update', only the identified is passed. */
11698 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11699 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11700 && (omp_is_allocatable_or_ptr (var)
11701 && omp_is_optional_argument (var)))
11702 var = build_fold_indirect_ref (var);
11703 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11704 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11705 || (!omp_is_allocatable_or_ptr (var)
11706 && !omp_is_optional_argument (var)))
11707 var = build_fold_addr_expr (var);
11708 gimplify_assign (x, var, &ilist);
11709 }
11710 }
11711 s = NULL_TREE;
11712 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11713 {
11714 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11715 s = TREE_TYPE (ovar);
11716 if (TREE_CODE (s) == REFERENCE_TYPE)
11717 s = TREE_TYPE (s);
11718 s = TYPE_SIZE_UNIT (s);
11719 }
11720 else
11721 s = OMP_CLAUSE_SIZE (c);
11722 if (s == NULL_TREE)
11723 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11724 s = fold_convert (size_type_node, s);
11725 purpose = size_int (map_idx++);
11726 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11727 if (TREE_CODE (s) != INTEGER_CST)
11728 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11729
11730 unsigned HOST_WIDE_INT tkind, tkind_zero;
11731 switch (OMP_CLAUSE_CODE (c))
11732 {
11733 case OMP_CLAUSE_MAP:
11734 tkind = OMP_CLAUSE_MAP_KIND (c);
11735 tkind_zero = tkind;
11736 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11737 switch (tkind)
11738 {
11739 case GOMP_MAP_ALLOC:
11740 case GOMP_MAP_TO:
11741 case GOMP_MAP_FROM:
11742 case GOMP_MAP_TOFROM:
11743 case GOMP_MAP_ALWAYS_TO:
11744 case GOMP_MAP_ALWAYS_FROM:
11745 case GOMP_MAP_ALWAYS_TOFROM:
11746 case GOMP_MAP_RELEASE:
11747 case GOMP_MAP_FORCE_TO:
11748 case GOMP_MAP_FORCE_FROM:
11749 case GOMP_MAP_FORCE_TOFROM:
11750 case GOMP_MAP_FORCE_PRESENT:
11751 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11752 break;
11753 case GOMP_MAP_DELETE:
11754 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11755 default:
11756 break;
11757 }
11758 if (tkind_zero != tkind)
11759 {
11760 if (integer_zerop (s))
11761 tkind = tkind_zero;
11762 else if (integer_nonzerop (s))
11763 tkind_zero = tkind;
11764 }
11765 break;
11766 case OMP_CLAUSE_FIRSTPRIVATE:
11767 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11768 tkind = GOMP_MAP_TO;
11769 tkind_zero = tkind;
11770 break;
11771 case OMP_CLAUSE_TO:
11772 tkind = GOMP_MAP_TO;
11773 tkind_zero = tkind;
11774 break;
11775 case OMP_CLAUSE_FROM:
11776 tkind = GOMP_MAP_FROM;
11777 tkind_zero = tkind;
11778 break;
11779 default:
11780 gcc_unreachable ();
11781 }
11782 gcc_checking_assert (tkind
11783 < (HOST_WIDE_INT_C (1U) << talign_shift));
11784 gcc_checking_assert (tkind_zero
11785 < (HOST_WIDE_INT_C (1U) << talign_shift));
11786 talign = ceil_log2 (talign);
11787 tkind |= talign << talign_shift;
11788 tkind_zero |= talign << talign_shift;
11789 gcc_checking_assert (tkind
11790 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11791 gcc_checking_assert (tkind_zero
11792 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11793 if (tkind == tkind_zero)
11794 x = build_int_cstu (tkind_type, tkind);
11795 else
11796 {
11797 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11798 x = build3 (COND_EXPR, tkind_type,
11799 fold_build2 (EQ_EXPR, boolean_type_node,
11800 unshare_expr (s), size_zero_node),
11801 build_int_cstu (tkind_type, tkind_zero),
11802 build_int_cstu (tkind_type, tkind));
11803 }
11804 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11805 if (nc && nc != c)
11806 c = nc;
11807 break;
11808
11809 case OMP_CLAUSE_FIRSTPRIVATE:
11810 if (is_oacc_parallel (ctx))
11811 goto oacc_firstprivate_map;
11812 ovar = OMP_CLAUSE_DECL (c);
11813 if (omp_is_reference (ovar))
11814 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11815 else
11816 talign = DECL_ALIGN_UNIT (ovar);
11817 var = lookup_decl_in_outer_ctx (ovar, ctx);
11818 x = build_sender_ref (ovar, ctx);
11819 tkind = GOMP_MAP_FIRSTPRIVATE;
11820 type = TREE_TYPE (ovar);
11821 if (omp_is_reference (ovar))
11822 type = TREE_TYPE (type);
11823 if ((INTEGRAL_TYPE_P (type)
11824 && TYPE_PRECISION (type) <= POINTER_SIZE)
11825 || TREE_CODE (type) == POINTER_TYPE)
11826 {
11827 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11828 tree t = var;
11829 if (omp_is_reference (var))
11830 t = build_simple_mem_ref (var);
11831 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11832 TREE_NO_WARNING (var) = 1;
11833 if (TREE_CODE (type) != POINTER_TYPE)
11834 t = fold_convert (pointer_sized_int_node, t);
11835 t = fold_convert (TREE_TYPE (x), t);
11836 gimplify_assign (x, t, &ilist);
11837 }
11838 else if (omp_is_reference (var))
11839 gimplify_assign (x, var, &ilist);
11840 else if (is_gimple_reg (var))
11841 {
11842 tree avar = create_tmp_var (TREE_TYPE (var));
11843 mark_addressable (avar);
11844 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11845 TREE_NO_WARNING (var) = 1;
11846 gimplify_assign (avar, var, &ilist);
11847 avar = build_fold_addr_expr (avar);
11848 gimplify_assign (x, avar, &ilist);
11849 }
11850 else
11851 {
11852 var = build_fold_addr_expr (var);
11853 gimplify_assign (x, var, &ilist);
11854 }
11855 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11856 s = size_int (0);
11857 else if (omp_is_reference (ovar))
11858 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11859 else
11860 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11861 s = fold_convert (size_type_node, s);
11862 purpose = size_int (map_idx++);
11863 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11864 if (TREE_CODE (s) != INTEGER_CST)
11865 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11866
11867 gcc_checking_assert (tkind
11868 < (HOST_WIDE_INT_C (1U) << talign_shift));
11869 talign = ceil_log2 (talign);
11870 tkind |= talign << talign_shift;
11871 gcc_checking_assert (tkind
11872 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11873 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11874 build_int_cstu (tkind_type, tkind));
11875 break;
11876
11877 case OMP_CLAUSE_USE_DEVICE_PTR:
11878 case OMP_CLAUSE_USE_DEVICE_ADDR:
11879 case OMP_CLAUSE_IS_DEVICE_PTR:
11880 ovar = OMP_CLAUSE_DECL (c);
11881 var = lookup_decl_in_outer_ctx (ovar, ctx);
11882
11883 if (lang_hooks.decls.omp_array_data (ovar, true))
11884 {
11885 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
11886 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
11887 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
11888 }
11889 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
11890 {
11891 tkind = GOMP_MAP_USE_DEVICE_PTR;
11892 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
11893 }
11894 else
11895 {
11896 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11897 x = build_sender_ref (ovar, ctx);
11898 }
11899 type = TREE_TYPE (ovar);
11900 if (lang_hooks.decls.omp_array_data (ovar, true))
11901 var = lang_hooks.decls.omp_array_data (ovar, false);
11902 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11903 && !omp_is_reference (ovar)
11904 && !omp_is_allocatable_or_ptr (ovar))
11905 || TREE_CODE (type) == ARRAY_TYPE)
11906 var = build_fold_addr_expr (var);
11907 else
11908 {
11909 if (omp_is_reference (ovar)
11910 || omp_is_optional_argument (ovar)
11911 || omp_is_allocatable_or_ptr (ovar))
11912 {
11913 type = TREE_TYPE (type);
11914 if (TREE_CODE (type) != ARRAY_TYPE
11915 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
11916 && !omp_is_allocatable_or_ptr (ovar))
11917 || (omp_is_reference (ovar)
11918 && omp_is_allocatable_or_ptr (ovar))))
11919 var = build_simple_mem_ref (var);
11920 var = fold_convert (TREE_TYPE (x), var);
11921 }
11922 }
11923 gimplify_assign (x, var, &ilist);
11924 s = size_int (0);
11925 purpose = size_int (map_idx++);
11926 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11927 gcc_checking_assert (tkind
11928 < (HOST_WIDE_INT_C (1U) << talign_shift));
11929 gcc_checking_assert (tkind
11930 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11931 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11932 build_int_cstu (tkind_type, tkind));
11933 break;
11934 }
11935
11936 gcc_assert (map_idx == map_cnt);
11937
11938 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11939 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11940 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11941 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11942 for (int i = 1; i <= 2; i++)
11943 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11944 {
11945 gimple_seq initlist = NULL;
11946 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11947 TREE_VEC_ELT (t, i)),
11948 &initlist, true, NULL_TREE);
11949 gimple_seq_add_seq (&ilist, initlist);
11950
11951 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
11952 gimple_seq_add_stmt (&olist,
11953 gimple_build_assign (TREE_VEC_ELT (t, i),
11954 clobber));
11955 }
11956
11957 tree clobber = build_clobber (ctx->record_type);
11958 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11959 clobber));
11960 }
11961
11962 /* Once all the expansions are done, sequence all the different
11963 fragments inside gimple_omp_body. */
11964
11965 new_body = NULL;
11966
11967 if (offloaded
11968 && ctx->record_type)
11969 {
11970 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11971 /* fixup_child_record_type might have changed receiver_decl's type. */
11972 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11973 gimple_seq_add_stmt (&new_body,
11974 gimple_build_assign (ctx->receiver_decl, t));
11975 }
11976 gimple_seq_add_seq (&new_body, fplist);
11977
11978 if (offloaded || data_region)
11979 {
11980 tree prev = NULL_TREE;
11981 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11982 switch (OMP_CLAUSE_CODE (c))
11983 {
11984 tree var, x;
11985 default:
11986 break;
11987 case OMP_CLAUSE_FIRSTPRIVATE:
11988 if (is_gimple_omp_oacc (ctx->stmt))
11989 break;
11990 var = OMP_CLAUSE_DECL (c);
11991 if (omp_is_reference (var)
11992 || is_gimple_reg_type (TREE_TYPE (var)))
11993 {
11994 tree new_var = lookup_decl (var, ctx);
11995 tree type;
11996 type = TREE_TYPE (var);
11997 if (omp_is_reference (var))
11998 type = TREE_TYPE (type);
11999 if ((INTEGRAL_TYPE_P (type)
12000 && TYPE_PRECISION (type) <= POINTER_SIZE)
12001 || TREE_CODE (type) == POINTER_TYPE)
12002 {
12003 x = build_receiver_ref (var, false, ctx);
12004 if (TREE_CODE (type) != POINTER_TYPE)
12005 x = fold_convert (pointer_sized_int_node, x);
12006 x = fold_convert (type, x);
12007 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12008 fb_rvalue);
12009 if (omp_is_reference (var))
12010 {
12011 tree v = create_tmp_var_raw (type, get_name (var));
12012 gimple_add_tmp_var (v);
12013 TREE_ADDRESSABLE (v) = 1;
12014 gimple_seq_add_stmt (&new_body,
12015 gimple_build_assign (v, x));
12016 x = build_fold_addr_expr (v);
12017 }
12018 gimple_seq_add_stmt (&new_body,
12019 gimple_build_assign (new_var, x));
12020 }
12021 else
12022 {
12023 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12024 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12025 fb_rvalue);
12026 gimple_seq_add_stmt (&new_body,
12027 gimple_build_assign (new_var, x));
12028 }
12029 }
12030 else if (is_variable_sized (var))
12031 {
12032 tree pvar = DECL_VALUE_EXPR (var);
12033 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12034 pvar = TREE_OPERAND (pvar, 0);
12035 gcc_assert (DECL_P (pvar));
12036 tree new_var = lookup_decl (pvar, ctx);
12037 x = build_receiver_ref (var, false, ctx);
12038 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12039 gimple_seq_add_stmt (&new_body,
12040 gimple_build_assign (new_var, x));
12041 }
12042 break;
12043 case OMP_CLAUSE_PRIVATE:
12044 if (is_gimple_omp_oacc (ctx->stmt))
12045 break;
12046 var = OMP_CLAUSE_DECL (c);
12047 if (omp_is_reference (var))
12048 {
12049 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12050 tree new_var = lookup_decl (var, ctx);
12051 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12052 if (TREE_CONSTANT (x))
12053 {
12054 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12055 get_name (var));
12056 gimple_add_tmp_var (x);
12057 TREE_ADDRESSABLE (x) = 1;
12058 x = build_fold_addr_expr_loc (clause_loc, x);
12059 }
12060 else
12061 break;
12062
12063 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12064 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12065 gimple_seq_add_stmt (&new_body,
12066 gimple_build_assign (new_var, x));
12067 }
12068 break;
12069 case OMP_CLAUSE_USE_DEVICE_PTR:
12070 case OMP_CLAUSE_USE_DEVICE_ADDR:
12071 case OMP_CLAUSE_IS_DEVICE_PTR:
12072 var = OMP_CLAUSE_DECL (c);
12073 bool is_array_data;
12074 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12075
12076 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12077 x = build_sender_ref (is_array_data
12078 ? (splay_tree_key) &DECL_NAME (var)
12079 : (splay_tree_key) &DECL_UID (var), ctx);
12080 else
12081 x = build_receiver_ref (var, false, ctx);
12082
12083 if (is_array_data)
12084 {
12085 bool is_ref = omp_is_reference (var);
12086 /* First, we copy the descriptor data from the host; then
12087 we update its data to point to the target address. */
12088 tree new_var = lookup_decl (var, ctx);
12089 new_var = DECL_VALUE_EXPR (new_var);
12090 tree v = new_var;
12091
12092 if (is_ref)
12093 {
12094 var = build_fold_indirect_ref (var);
12095 gimplify_expr (&var, &new_body, NULL, is_gimple_val,
12096 fb_rvalue);
12097 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12098 gimple_add_tmp_var (v);
12099 TREE_ADDRESSABLE (v) = 1;
12100 gimple_seq_add_stmt (&new_body,
12101 gimple_build_assign (v, var));
12102 tree rhs = build_fold_addr_expr (v);
12103 gimple_seq_add_stmt (&new_body,
12104 gimple_build_assign (new_var, rhs));
12105 }
12106 else
12107 gimple_seq_add_stmt (&new_body,
12108 gimple_build_assign (new_var, var));
12109
12110 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12111 gcc_assert (v2);
12112 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12113 gimple_seq_add_stmt (&new_body,
12114 gimple_build_assign (v2, x));
12115 }
12116 else if (is_variable_sized (var))
12117 {
12118 tree pvar = DECL_VALUE_EXPR (var);
12119 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12120 pvar = TREE_OPERAND (pvar, 0);
12121 gcc_assert (DECL_P (pvar));
12122 tree new_var = lookup_decl (pvar, ctx);
12123 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12124 gimple_seq_add_stmt (&new_body,
12125 gimple_build_assign (new_var, x));
12126 }
12127 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12128 && !omp_is_reference (var)
12129 && !omp_is_allocatable_or_ptr (var))
12130 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12131 {
12132 tree new_var = lookup_decl (var, ctx);
12133 new_var = DECL_VALUE_EXPR (new_var);
12134 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12135 new_var = TREE_OPERAND (new_var, 0);
12136 gcc_assert (DECL_P (new_var));
12137 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12138 gimple_seq_add_stmt (&new_body,
12139 gimple_build_assign (new_var, x));
12140 }
12141 else
12142 {
12143 tree type = TREE_TYPE (var);
12144 tree new_var = lookup_decl (var, ctx);
12145 if (omp_is_reference (var))
12146 {
12147 type = TREE_TYPE (type);
12148 if (TREE_CODE (type) != ARRAY_TYPE
12149 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12150 || (omp_is_reference (var)
12151 && omp_is_allocatable_or_ptr (var))))
12152 {
12153 tree v = create_tmp_var_raw (type, get_name (var));
12154 gimple_add_tmp_var (v);
12155 TREE_ADDRESSABLE (v) = 1;
12156 x = fold_convert (type, x);
12157 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12158 fb_rvalue);
12159 gimple_seq_add_stmt (&new_body,
12160 gimple_build_assign (v, x));
12161 x = build_fold_addr_expr (v);
12162 }
12163 }
12164 new_var = DECL_VALUE_EXPR (new_var);
12165 x = fold_convert (TREE_TYPE (new_var), x);
12166 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12167 gimple_seq_add_stmt (&new_body,
12168 gimple_build_assign (new_var, x));
12169 }
12170 break;
12171 }
12172 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12173 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12174 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12175 or references to VLAs. */
12176 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12177 switch (OMP_CLAUSE_CODE (c))
12178 {
12179 tree var;
12180 default:
12181 break;
12182 case OMP_CLAUSE_MAP:
12183 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12184 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12185 {
12186 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12187 poly_int64 offset = 0;
12188 gcc_assert (prev);
12189 var = OMP_CLAUSE_DECL (c);
12190 if (DECL_P (var)
12191 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12192 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12193 ctx))
12194 && varpool_node::get_create (var)->offloadable)
12195 break;
12196 if (TREE_CODE (var) == INDIRECT_REF
12197 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12198 var = TREE_OPERAND (var, 0);
12199 if (TREE_CODE (var) == COMPONENT_REF)
12200 {
12201 var = get_addr_base_and_unit_offset (var, &offset);
12202 gcc_assert (var != NULL_TREE && DECL_P (var));
12203 }
12204 else if (DECL_SIZE (var)
12205 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12206 {
12207 tree var2 = DECL_VALUE_EXPR (var);
12208 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12209 var2 = TREE_OPERAND (var2, 0);
12210 gcc_assert (DECL_P (var2));
12211 var = var2;
12212 }
12213 tree new_var = lookup_decl (var, ctx), x;
12214 tree type = TREE_TYPE (new_var);
12215 bool is_ref;
12216 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12217 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12218 == COMPONENT_REF))
12219 {
12220 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12221 is_ref = true;
12222 new_var = build2 (MEM_REF, type,
12223 build_fold_addr_expr (new_var),
12224 build_int_cst (build_pointer_type (type),
12225 offset));
12226 }
12227 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12228 {
12229 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12230 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12231 new_var = build2 (MEM_REF, type,
12232 build_fold_addr_expr (new_var),
12233 build_int_cst (build_pointer_type (type),
12234 offset));
12235 }
12236 else
12237 is_ref = omp_is_reference (var);
12238 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12239 is_ref = false;
12240 bool ref_to_array = false;
12241 if (is_ref)
12242 {
12243 type = TREE_TYPE (type);
12244 if (TREE_CODE (type) == ARRAY_TYPE)
12245 {
12246 type = build_pointer_type (type);
12247 ref_to_array = true;
12248 }
12249 }
12250 else if (TREE_CODE (type) == ARRAY_TYPE)
12251 {
12252 tree decl2 = DECL_VALUE_EXPR (new_var);
12253 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12254 decl2 = TREE_OPERAND (decl2, 0);
12255 gcc_assert (DECL_P (decl2));
12256 new_var = decl2;
12257 type = TREE_TYPE (new_var);
12258 }
12259 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12260 x = fold_convert_loc (clause_loc, type, x);
12261 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12262 {
12263 tree bias = OMP_CLAUSE_SIZE (c);
12264 if (DECL_P (bias))
12265 bias = lookup_decl (bias, ctx);
12266 bias = fold_convert_loc (clause_loc, sizetype, bias);
12267 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12268 bias);
12269 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12270 TREE_TYPE (x), x, bias);
12271 }
12272 if (ref_to_array)
12273 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12274 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12275 if (is_ref && !ref_to_array)
12276 {
12277 tree t = create_tmp_var_raw (type, get_name (var));
12278 gimple_add_tmp_var (t);
12279 TREE_ADDRESSABLE (t) = 1;
12280 gimple_seq_add_stmt (&new_body,
12281 gimple_build_assign (t, x));
12282 x = build_fold_addr_expr_loc (clause_loc, t);
12283 }
12284 gimple_seq_add_stmt (&new_body,
12285 gimple_build_assign (new_var, x));
12286 prev = NULL_TREE;
12287 }
12288 else if (OMP_CLAUSE_CHAIN (c)
12289 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12290 == OMP_CLAUSE_MAP
12291 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12292 == GOMP_MAP_FIRSTPRIVATE_POINTER
12293 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12294 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12295 prev = c;
12296 break;
12297 case OMP_CLAUSE_PRIVATE:
12298 var = OMP_CLAUSE_DECL (c);
12299 if (is_variable_sized (var))
12300 {
12301 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12302 tree new_var = lookup_decl (var, ctx);
12303 tree pvar = DECL_VALUE_EXPR (var);
12304 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12305 pvar = TREE_OPERAND (pvar, 0);
12306 gcc_assert (DECL_P (pvar));
12307 tree new_pvar = lookup_decl (pvar, ctx);
12308 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12309 tree al = size_int (DECL_ALIGN (var));
12310 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12311 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12312 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12313 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12314 gimple_seq_add_stmt (&new_body,
12315 gimple_build_assign (new_pvar, x));
12316 }
12317 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12318 {
12319 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12320 tree new_var = lookup_decl (var, ctx);
12321 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12322 if (TREE_CONSTANT (x))
12323 break;
12324 else
12325 {
12326 tree atmp
12327 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12328 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12329 tree al = size_int (TYPE_ALIGN (rtype));
12330 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12331 }
12332
12333 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12334 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12335 gimple_seq_add_stmt (&new_body,
12336 gimple_build_assign (new_var, x));
12337 }
12338 break;
12339 }
12340
12341 gimple_seq fork_seq = NULL;
12342 gimple_seq join_seq = NULL;
12343
12344 if (is_oacc_parallel (ctx))
12345 {
12346 /* If there are reductions on the offloaded region itself, treat
12347 them as a dummy GANG loop. */
12348 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12349
12350 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12351 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12352 }
12353
12354 gimple_seq_add_seq (&new_body, fork_seq);
12355 gimple_seq_add_seq (&new_body, tgt_body);
12356 gimple_seq_add_seq (&new_body, join_seq);
12357
12358 if (offloaded)
12359 new_body = maybe_catch_exception (new_body);
12360
12361 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12362 gimple_omp_set_body (stmt, new_body);
12363 }
12364
12365 bind = gimple_build_bind (NULL, NULL,
12366 tgt_bind ? gimple_bind_block (tgt_bind)
12367 : NULL_TREE);
12368 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12369 gimple_bind_add_seq (bind, ilist);
12370 gimple_bind_add_stmt (bind, stmt);
12371 gimple_bind_add_seq (bind, olist);
12372
12373 pop_gimplify_context (NULL);
12374
12375 if (dep_bind)
12376 {
12377 gimple_bind_add_seq (dep_bind, dep_ilist);
12378 gimple_bind_add_stmt (dep_bind, bind);
12379 gimple_bind_add_seq (dep_bind, dep_olist);
12380 pop_gimplify_context (dep_bind);
12381 }
12382 }
12383
12384 /* Expand code for an OpenMP teams directive. */
12385
12386 static void
12387 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12388 {
12389 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12390 push_gimplify_context ();
12391
12392 tree block = make_node (BLOCK);
12393 gbind *bind = gimple_build_bind (NULL, NULL, block);
12394 gsi_replace (gsi_p, bind, true);
12395 gimple_seq bind_body = NULL;
12396 gimple_seq dlist = NULL;
12397 gimple_seq olist = NULL;
12398
12399 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12400 OMP_CLAUSE_NUM_TEAMS);
12401 if (num_teams == NULL_TREE)
12402 num_teams = build_int_cst (unsigned_type_node, 0);
12403 else
12404 {
12405 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12406 num_teams = fold_convert (unsigned_type_node, num_teams);
12407 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12408 }
12409 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12410 OMP_CLAUSE_THREAD_LIMIT);
12411 if (thread_limit == NULL_TREE)
12412 thread_limit = build_int_cst (unsigned_type_node, 0);
12413 else
12414 {
12415 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12416 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12417 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12418 fb_rvalue);
12419 }
12420
12421 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12422 &bind_body, &dlist, ctx, NULL);
12423 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12424 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12425 NULL, ctx);
12426 if (!gimple_omp_teams_grid_phony (teams_stmt))
12427 {
12428 gimple_seq_add_stmt (&bind_body, teams_stmt);
12429 location_t loc = gimple_location (teams_stmt);
12430 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12431 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12432 gimple_set_location (call, loc);
12433 gimple_seq_add_stmt (&bind_body, call);
12434 }
12435
12436 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12437 gimple_omp_set_body (teams_stmt, NULL);
12438 gimple_seq_add_seq (&bind_body, olist);
12439 gimple_seq_add_seq (&bind_body, dlist);
12440 if (!gimple_omp_teams_grid_phony (teams_stmt))
12441 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12442 gimple_bind_set_body (bind, bind_body);
12443
12444 pop_gimplify_context (bind);
12445
12446 gimple_bind_append_vars (bind, ctx->block_vars);
12447 BLOCK_VARS (block) = ctx->block_vars;
12448 if (BLOCK_VARS (block))
12449 TREE_USED (block) = 1;
12450 }
12451
12452 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12453
12454 static void
12455 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12456 {
12457 gimple *stmt = gsi_stmt (*gsi_p);
12458 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12459 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12460 gimple_build_omp_return (false));
12461 }
12462
12463
12464 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12465 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12466 of OMP context, but with task_shared_vars set. */
12467
12468 static tree
12469 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12470 void *data)
12471 {
12472 tree t = *tp;
12473
12474 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12475 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12476 return t;
12477
12478 if (task_shared_vars
12479 && DECL_P (t)
12480 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12481 return t;
12482
12483 /* If a global variable has been privatized, TREE_CONSTANT on
12484 ADDR_EXPR might be wrong. */
12485 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12486 recompute_tree_invariant_for_addr_expr (t);
12487
12488 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12489 return NULL_TREE;
12490 }
12491
12492 /* Data to be communicated between lower_omp_regimplify_operands and
12493 lower_omp_regimplify_operands_p. */
12494
12495 struct lower_omp_regimplify_operands_data
12496 {
12497 omp_context *ctx;
12498 vec<tree> *decls;
12499 };
12500
12501 /* Helper function for lower_omp_regimplify_operands. Find
12502 omp_member_access_dummy_var vars and adjust temporarily their
12503 DECL_VALUE_EXPRs if needed. */
12504
12505 static tree
12506 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12507 void *data)
12508 {
12509 tree t = omp_member_access_dummy_var (*tp);
12510 if (t)
12511 {
12512 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12513 lower_omp_regimplify_operands_data *ldata
12514 = (lower_omp_regimplify_operands_data *) wi->info;
12515 tree o = maybe_lookup_decl (t, ldata->ctx);
12516 if (o != t)
12517 {
12518 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12519 ldata->decls->safe_push (*tp);
12520 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12521 SET_DECL_VALUE_EXPR (*tp, v);
12522 }
12523 }
12524 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12525 return NULL_TREE;
12526 }
12527
12528 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12529 of omp_member_access_dummy_var vars during regimplification. */
12530
12531 static void
12532 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12533 gimple_stmt_iterator *gsi_p)
12534 {
12535 auto_vec<tree, 10> decls;
12536 if (ctx)
12537 {
12538 struct walk_stmt_info wi;
12539 memset (&wi, '\0', sizeof (wi));
12540 struct lower_omp_regimplify_operands_data data;
12541 data.ctx = ctx;
12542 data.decls = &decls;
12543 wi.info = &data;
12544 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12545 }
12546 gimple_regimplify_operands (stmt, gsi_p);
12547 while (!decls.is_empty ())
12548 {
12549 tree t = decls.pop ();
12550 tree v = decls.pop ();
12551 SET_DECL_VALUE_EXPR (t, v);
12552 }
12553 }
12554
12555 static void
12556 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12557 {
12558 gimple *stmt = gsi_stmt (*gsi_p);
12559 struct walk_stmt_info wi;
12560 gcall *call_stmt;
12561
12562 if (gimple_has_location (stmt))
12563 input_location = gimple_location (stmt);
12564
12565 if (task_shared_vars)
12566 memset (&wi, '\0', sizeof (wi));
12567
12568 /* If we have issued syntax errors, avoid doing any heavy lifting.
12569 Just replace the OMP directives with a NOP to avoid
12570 confusing RTL expansion. */
12571 if (seen_error () && is_gimple_omp (stmt))
12572 {
12573 gsi_replace (gsi_p, gimple_build_nop (), true);
12574 return;
12575 }
12576
12577 switch (gimple_code (stmt))
12578 {
12579 case GIMPLE_COND:
12580 {
12581 gcond *cond_stmt = as_a <gcond *> (stmt);
12582 if ((ctx || task_shared_vars)
12583 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12584 lower_omp_regimplify_p,
12585 ctx ? NULL : &wi, NULL)
12586 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12587 lower_omp_regimplify_p,
12588 ctx ? NULL : &wi, NULL)))
12589 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12590 }
12591 break;
12592 case GIMPLE_CATCH:
12593 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12594 break;
12595 case GIMPLE_EH_FILTER:
12596 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12597 break;
12598 case GIMPLE_TRY:
12599 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12600 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12601 break;
12602 case GIMPLE_TRANSACTION:
12603 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12604 ctx);
12605 break;
12606 case GIMPLE_BIND:
12607 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12608 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12609 break;
12610 case GIMPLE_OMP_PARALLEL:
12611 case GIMPLE_OMP_TASK:
12612 ctx = maybe_lookup_ctx (stmt);
12613 gcc_assert (ctx);
12614 if (ctx->cancellable)
12615 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12616 lower_omp_taskreg (gsi_p, ctx);
12617 break;
12618 case GIMPLE_OMP_FOR:
12619 ctx = maybe_lookup_ctx (stmt);
12620 gcc_assert (ctx);
12621 if (ctx->cancellable)
12622 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12623 lower_omp_for (gsi_p, ctx);
12624 break;
12625 case GIMPLE_OMP_SECTIONS:
12626 ctx = maybe_lookup_ctx (stmt);
12627 gcc_assert (ctx);
12628 if (ctx->cancellable)
12629 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12630 lower_omp_sections (gsi_p, ctx);
12631 break;
12632 case GIMPLE_OMP_SINGLE:
12633 ctx = maybe_lookup_ctx (stmt);
12634 gcc_assert (ctx);
12635 lower_omp_single (gsi_p, ctx);
12636 break;
12637 case GIMPLE_OMP_MASTER:
12638 ctx = maybe_lookup_ctx (stmt);
12639 gcc_assert (ctx);
12640 lower_omp_master (gsi_p, ctx);
12641 break;
12642 case GIMPLE_OMP_TASKGROUP:
12643 ctx = maybe_lookup_ctx (stmt);
12644 gcc_assert (ctx);
12645 lower_omp_taskgroup (gsi_p, ctx);
12646 break;
12647 case GIMPLE_OMP_ORDERED:
12648 ctx = maybe_lookup_ctx (stmt);
12649 gcc_assert (ctx);
12650 lower_omp_ordered (gsi_p, ctx);
12651 break;
12652 case GIMPLE_OMP_SCAN:
12653 ctx = maybe_lookup_ctx (stmt);
12654 gcc_assert (ctx);
12655 lower_omp_scan (gsi_p, ctx);
12656 break;
12657 case GIMPLE_OMP_CRITICAL:
12658 ctx = maybe_lookup_ctx (stmt);
12659 gcc_assert (ctx);
12660 lower_omp_critical (gsi_p, ctx);
12661 break;
12662 case GIMPLE_OMP_ATOMIC_LOAD:
12663 if ((ctx || task_shared_vars)
12664 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12665 as_a <gomp_atomic_load *> (stmt)),
12666 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12667 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12668 break;
12669 case GIMPLE_OMP_TARGET:
12670 ctx = maybe_lookup_ctx (stmt);
12671 gcc_assert (ctx);
12672 lower_omp_target (gsi_p, ctx);
12673 break;
12674 case GIMPLE_OMP_TEAMS:
12675 ctx = maybe_lookup_ctx (stmt);
12676 gcc_assert (ctx);
12677 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12678 lower_omp_taskreg (gsi_p, ctx);
12679 else
12680 lower_omp_teams (gsi_p, ctx);
12681 break;
12682 case GIMPLE_OMP_GRID_BODY:
12683 ctx = maybe_lookup_ctx (stmt);
12684 gcc_assert (ctx);
12685 lower_omp_grid_body (gsi_p, ctx);
12686 break;
12687 case GIMPLE_CALL:
12688 tree fndecl;
12689 call_stmt = as_a <gcall *> (stmt);
12690 fndecl = gimple_call_fndecl (call_stmt);
12691 if (fndecl
12692 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12693 switch (DECL_FUNCTION_CODE (fndecl))
12694 {
12695 case BUILT_IN_GOMP_BARRIER:
12696 if (ctx == NULL)
12697 break;
12698 /* FALLTHRU */
12699 case BUILT_IN_GOMP_CANCEL:
12700 case BUILT_IN_GOMP_CANCELLATION_POINT:
12701 omp_context *cctx;
12702 cctx = ctx;
12703 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12704 cctx = cctx->outer;
12705 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12706 if (!cctx->cancellable)
12707 {
12708 if (DECL_FUNCTION_CODE (fndecl)
12709 == BUILT_IN_GOMP_CANCELLATION_POINT)
12710 {
12711 stmt = gimple_build_nop ();
12712 gsi_replace (gsi_p, stmt, false);
12713 }
12714 break;
12715 }
12716 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12717 {
12718 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12719 gimple_call_set_fndecl (call_stmt, fndecl);
12720 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12721 }
12722 tree lhs;
12723 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12724 gimple_call_set_lhs (call_stmt, lhs);
12725 tree fallthru_label;
12726 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12727 gimple *g;
12728 g = gimple_build_label (fallthru_label);
12729 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12730 g = gimple_build_cond (NE_EXPR, lhs,
12731 fold_convert (TREE_TYPE (lhs),
12732 boolean_false_node),
12733 cctx->cancel_label, fallthru_label);
12734 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12735 break;
12736 default:
12737 break;
12738 }
12739 goto regimplify;
12740
12741 case GIMPLE_ASSIGN:
12742 for (omp_context *up = ctx; up; up = up->outer)
12743 {
12744 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12745 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12746 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12747 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12748 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12749 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12750 && (gimple_omp_target_kind (up->stmt)
12751 == GF_OMP_TARGET_KIND_DATA)))
12752 continue;
12753 else if (!up->lastprivate_conditional_map)
12754 break;
12755 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12756 if (TREE_CODE (lhs) == MEM_REF
12757 && DECL_P (TREE_OPERAND (lhs, 0))
12758 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12759 0))) == REFERENCE_TYPE)
12760 lhs = TREE_OPERAND (lhs, 0);
12761 if (DECL_P (lhs))
12762 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12763 {
12764 tree clauses;
12765 if (up->combined_into_simd_safelen1)
12766 {
12767 up = up->outer;
12768 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12769 up = up->outer;
12770 }
12771 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12772 clauses = gimple_omp_for_clauses (up->stmt);
12773 else
12774 clauses = gimple_omp_sections_clauses (up->stmt);
12775 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12776 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12777 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12778 OMP_CLAUSE__CONDTEMP_);
12779 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12780 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12781 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12782 }
12783 }
12784 /* FALLTHRU */
12785
12786 default:
12787 regimplify:
12788 if ((ctx || task_shared_vars)
12789 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12790 ctx ? NULL : &wi))
12791 {
12792 /* Just remove clobbers, this should happen only if we have
12793 "privatized" local addressable variables in SIMD regions,
12794 the clobber isn't needed in that case and gimplifying address
12795 of the ARRAY_REF into a pointer and creating MEM_REF based
12796 clobber would create worse code than we get with the clobber
12797 dropped. */
12798 if (gimple_clobber_p (stmt))
12799 {
12800 gsi_replace (gsi_p, gimple_build_nop (), true);
12801 break;
12802 }
12803 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12804 }
12805 break;
12806 }
12807 }
12808
12809 static void
12810 lower_omp (gimple_seq *body, omp_context *ctx)
12811 {
12812 location_t saved_location = input_location;
12813 gimple_stmt_iterator gsi;
12814 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12815 lower_omp_1 (&gsi, ctx);
12816 /* During gimplification, we haven't folded statments inside offloading
12817 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12818 if (target_nesting_level || taskreg_nesting_level)
12819 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12820 fold_stmt (&gsi);
12821 input_location = saved_location;
12822 }
12823
12824 /* Main entry point. */
12825
12826 static unsigned int
12827 execute_lower_omp (void)
12828 {
12829 gimple_seq body;
12830 int i;
12831 omp_context *ctx;
12832
12833 /* This pass always runs, to provide PROP_gimple_lomp.
12834 But often, there is nothing to do. */
12835 if (flag_openacc == 0 && flag_openmp == 0
12836 && flag_openmp_simd == 0)
12837 return 0;
12838
12839 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12840 delete_omp_context);
12841
12842 body = gimple_body (current_function_decl);
12843
12844 if (hsa_gen_requested_p ())
12845 omp_grid_gridify_all_targets (&body);
12846
12847 scan_omp (&body, NULL);
12848 gcc_assert (taskreg_nesting_level == 0);
12849 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12850 finish_taskreg_scan (ctx);
12851 taskreg_contexts.release ();
12852
12853 if (all_contexts->root)
12854 {
12855 if (task_shared_vars)
12856 push_gimplify_context ();
12857 lower_omp (&body, NULL);
12858 if (task_shared_vars)
12859 pop_gimplify_context (NULL);
12860 }
12861
12862 if (all_contexts)
12863 {
12864 splay_tree_delete (all_contexts);
12865 all_contexts = NULL;
12866 }
12867 BITMAP_FREE (task_shared_vars);
12868 BITMAP_FREE (global_nonaddressable_vars);
12869
12870 /* If current function is a method, remove artificial dummy VAR_DECL created
12871 for non-static data member privatization, they aren't needed for
12872 debuginfo nor anything else, have been already replaced everywhere in the
12873 IL and cause problems with LTO. */
12874 if (DECL_ARGUMENTS (current_function_decl)
12875 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12876 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12877 == POINTER_TYPE))
12878 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12879 return 0;
12880 }
12881
12882 namespace {
12883
12884 const pass_data pass_data_lower_omp =
12885 {
12886 GIMPLE_PASS, /* type */
12887 "omplower", /* name */
12888 OPTGROUP_OMP, /* optinfo_flags */
12889 TV_NONE, /* tv_id */
12890 PROP_gimple_any, /* properties_required */
12891 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12892 0, /* properties_destroyed */
12893 0, /* todo_flags_start */
12894 0, /* todo_flags_finish */
12895 };
12896
12897 class pass_lower_omp : public gimple_opt_pass
12898 {
12899 public:
12900 pass_lower_omp (gcc::context *ctxt)
12901 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12902 {}
12903
12904 /* opt_pass methods: */
12905 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12906
12907 }; // class pass_lower_omp
12908
12909 } // anon namespace
12910
12911 gimple_opt_pass *
12912 make_pass_lower_omp (gcc::context *ctxt)
12913 {
12914 return new pass_lower_omp (ctxt);
12915 }
12916 \f
12917 /* The following is a utility to diagnose structured block violations.
12918 It is not part of the "omplower" pass, as that's invoked too late. It
12919 should be invoked by the respective front ends after gimplification. */
12920
12921 static splay_tree all_labels;
12922
12923 /* Check for mismatched contexts and generate an error if needed. Return
12924 true if an error is detected. */
12925
12926 static bool
12927 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12928 gimple *branch_ctx, gimple *label_ctx)
12929 {
12930 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12931 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12932
12933 if (label_ctx == branch_ctx)
12934 return false;
12935
12936 const char* kind = NULL;
12937
12938 if (flag_openacc)
12939 {
12940 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12941 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12942 {
12943 gcc_checking_assert (kind == NULL);
12944 kind = "OpenACC";
12945 }
12946 }
12947 if (kind == NULL)
12948 {
12949 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12950 kind = "OpenMP";
12951 }
12952
12953 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12954 so we could traverse it and issue a correct "exit" or "enter" error
12955 message upon a structured block violation.
12956
12957 We built the context by building a list with tree_cons'ing, but there is
12958 no easy counterpart in gimple tuples. It seems like far too much work
12959 for issuing exit/enter error messages. If someone really misses the
12960 distinct error message... patches welcome. */
12961
12962 #if 0
12963 /* Try to avoid confusing the user by producing and error message
12964 with correct "exit" or "enter" verbiage. We prefer "exit"
12965 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12966 if (branch_ctx == NULL)
12967 exit_p = false;
12968 else
12969 {
12970 while (label_ctx)
12971 {
12972 if (TREE_VALUE (label_ctx) == branch_ctx)
12973 {
12974 exit_p = false;
12975 break;
12976 }
12977 label_ctx = TREE_CHAIN (label_ctx);
12978 }
12979 }
12980
12981 if (exit_p)
12982 error ("invalid exit from %s structured block", kind);
12983 else
12984 error ("invalid entry to %s structured block", kind);
12985 #endif
12986
12987 /* If it's obvious we have an invalid entry, be specific about the error. */
12988 if (branch_ctx == NULL)
12989 error ("invalid entry to %s structured block", kind);
12990 else
12991 {
12992 /* Otherwise, be vague and lazy, but efficient. */
12993 error ("invalid branch to/from %s structured block", kind);
12994 }
12995
12996 gsi_replace (gsi_p, gimple_build_nop (), false);
12997 return true;
12998 }
12999
13000 /* Pass 1: Create a minimal tree of structured blocks, and record
13001 where each label is found. */
13002
13003 static tree
13004 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13005 struct walk_stmt_info *wi)
13006 {
13007 gimple *context = (gimple *) wi->info;
13008 gimple *inner_context;
13009 gimple *stmt = gsi_stmt (*gsi_p);
13010
13011 *handled_ops_p = true;
13012
13013 switch (gimple_code (stmt))
13014 {
13015 WALK_SUBSTMTS;
13016
13017 case GIMPLE_OMP_PARALLEL:
13018 case GIMPLE_OMP_TASK:
13019 case GIMPLE_OMP_SECTIONS:
13020 case GIMPLE_OMP_SINGLE:
13021 case GIMPLE_OMP_SECTION:
13022 case GIMPLE_OMP_MASTER:
13023 case GIMPLE_OMP_ORDERED:
13024 case GIMPLE_OMP_SCAN:
13025 case GIMPLE_OMP_CRITICAL:
13026 case GIMPLE_OMP_TARGET:
13027 case GIMPLE_OMP_TEAMS:
13028 case GIMPLE_OMP_TASKGROUP:
13029 /* The minimal context here is just the current OMP construct. */
13030 inner_context = stmt;
13031 wi->info = inner_context;
13032 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13033 wi->info = context;
13034 break;
13035
13036 case GIMPLE_OMP_FOR:
13037 inner_context = stmt;
13038 wi->info = inner_context;
13039 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13040 walk them. */
13041 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13042 diagnose_sb_1, NULL, wi);
13043 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13044 wi->info = context;
13045 break;
13046
13047 case GIMPLE_LABEL:
13048 splay_tree_insert (all_labels,
13049 (splay_tree_key) gimple_label_label (
13050 as_a <glabel *> (stmt)),
13051 (splay_tree_value) context);
13052 break;
13053
13054 default:
13055 break;
13056 }
13057
13058 return NULL_TREE;
13059 }
13060
13061 /* Pass 2: Check each branch and see if its context differs from that of
13062 the destination label's context. */
13063
13064 static tree
13065 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13066 struct walk_stmt_info *wi)
13067 {
13068 gimple *context = (gimple *) wi->info;
13069 splay_tree_node n;
13070 gimple *stmt = gsi_stmt (*gsi_p);
13071
13072 *handled_ops_p = true;
13073
13074 switch (gimple_code (stmt))
13075 {
13076 WALK_SUBSTMTS;
13077
13078 case GIMPLE_OMP_PARALLEL:
13079 case GIMPLE_OMP_TASK:
13080 case GIMPLE_OMP_SECTIONS:
13081 case GIMPLE_OMP_SINGLE:
13082 case GIMPLE_OMP_SECTION:
13083 case GIMPLE_OMP_MASTER:
13084 case GIMPLE_OMP_ORDERED:
13085 case GIMPLE_OMP_SCAN:
13086 case GIMPLE_OMP_CRITICAL:
13087 case GIMPLE_OMP_TARGET:
13088 case GIMPLE_OMP_TEAMS:
13089 case GIMPLE_OMP_TASKGROUP:
13090 wi->info = stmt;
13091 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13092 wi->info = context;
13093 break;
13094
13095 case GIMPLE_OMP_FOR:
13096 wi->info = stmt;
13097 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13098 walk them. */
13099 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13100 diagnose_sb_2, NULL, wi);
13101 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13102 wi->info = context;
13103 break;
13104
13105 case GIMPLE_COND:
13106 {
13107 gcond *cond_stmt = as_a <gcond *> (stmt);
13108 tree lab = gimple_cond_true_label (cond_stmt);
13109 if (lab)
13110 {
13111 n = splay_tree_lookup (all_labels,
13112 (splay_tree_key) lab);
13113 diagnose_sb_0 (gsi_p, context,
13114 n ? (gimple *) n->value : NULL);
13115 }
13116 lab = gimple_cond_false_label (cond_stmt);
13117 if (lab)
13118 {
13119 n = splay_tree_lookup (all_labels,
13120 (splay_tree_key) lab);
13121 diagnose_sb_0 (gsi_p, context,
13122 n ? (gimple *) n->value : NULL);
13123 }
13124 }
13125 break;
13126
13127 case GIMPLE_GOTO:
13128 {
13129 tree lab = gimple_goto_dest (stmt);
13130 if (TREE_CODE (lab) != LABEL_DECL)
13131 break;
13132
13133 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13134 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13135 }
13136 break;
13137
13138 case GIMPLE_SWITCH:
13139 {
13140 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13141 unsigned int i;
13142 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13143 {
13144 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13145 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13146 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13147 break;
13148 }
13149 }
13150 break;
13151
13152 case GIMPLE_RETURN:
13153 diagnose_sb_0 (gsi_p, context, NULL);
13154 break;
13155
13156 default:
13157 break;
13158 }
13159
13160 return NULL_TREE;
13161 }
13162
13163 static unsigned int
13164 diagnose_omp_structured_block_errors (void)
13165 {
13166 struct walk_stmt_info wi;
13167 gimple_seq body = gimple_body (current_function_decl);
13168
13169 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13170
13171 memset (&wi, 0, sizeof (wi));
13172 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13173
13174 memset (&wi, 0, sizeof (wi));
13175 wi.want_locations = true;
13176 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13177
13178 gimple_set_body (current_function_decl, body);
13179
13180 splay_tree_delete (all_labels);
13181 all_labels = NULL;
13182
13183 return 0;
13184 }
13185
13186 namespace {
13187
13188 const pass_data pass_data_diagnose_omp_blocks =
13189 {
13190 GIMPLE_PASS, /* type */
13191 "*diagnose_omp_blocks", /* name */
13192 OPTGROUP_OMP, /* optinfo_flags */
13193 TV_NONE, /* tv_id */
13194 PROP_gimple_any, /* properties_required */
13195 0, /* properties_provided */
13196 0, /* properties_destroyed */
13197 0, /* todo_flags_start */
13198 0, /* todo_flags_finish */
13199 };
13200
13201 class pass_diagnose_omp_blocks : public gimple_opt_pass
13202 {
13203 public:
13204 pass_diagnose_omp_blocks (gcc::context *ctxt)
13205 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13206 {}
13207
13208 /* opt_pass methods: */
13209 virtual bool gate (function *)
13210 {
13211 return flag_openacc || flag_openmp || flag_openmp_simd;
13212 }
13213 virtual unsigned int execute (function *)
13214 {
13215 return diagnose_omp_structured_block_errors ();
13216 }
13217
13218 }; // class pass_diagnose_omp_blocks
13219
13220 } // anon namespace
13221
13222 gimple_opt_pass *
13223 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13224 {
13225 return new pass_diagnose_omp_blocks (ctxt);
13226 }
13227 \f
13228
13229 #include "gt-omp-low.h"