5d1b88c76ed1e6708b22264c816ba90450f1522f
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen1;
144
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
147
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
150
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase;
153
154 /* True if there is order(concurrent) clause on the construct. */
155 bool order_concurrent;
156
157 /* True if there is bind clause on the construct (i.e. a loop construct). */
158 bool loop_p;
159 };
160
161 static splay_tree all_contexts;
162 static int taskreg_nesting_level;
163 static int target_nesting_level;
164 static bitmap task_shared_vars;
165 static bitmap global_nonaddressable_vars;
166 static vec<omp_context *> taskreg_contexts;
167
168 static void scan_omp (gimple_seq *, omp_context *);
169 static tree scan_omp_1_op (tree *, int *, void *);
170
171 #define WALK_SUBSTMTS \
172 case GIMPLE_BIND: \
173 case GIMPLE_TRY: \
174 case GIMPLE_CATCH: \
175 case GIMPLE_EH_FILTER: \
176 case GIMPLE_TRANSACTION: \
177 /* The sub-statements for these should be walked. */ \
178 *handled_ops_p = false; \
179 break;
180
181 /* Return true if CTX corresponds to an oacc parallel region. */
182
183 static bool
184 is_oacc_parallel (omp_context *ctx)
185 {
186 enum gimple_code outer_type = gimple_code (ctx->stmt);
187 return ((outer_type == GIMPLE_OMP_TARGET)
188 && (gimple_omp_target_kind (ctx->stmt)
189 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
190 }
191
192 /* Return true if CTX corresponds to an oacc kernels region. */
193
194 static bool
195 is_oacc_kernels (omp_context *ctx)
196 {
197 enum gimple_code outer_type = gimple_code (ctx->stmt);
198 return ((outer_type == GIMPLE_OMP_TARGET)
199 && (gimple_omp_target_kind (ctx->stmt)
200 == GF_OMP_TARGET_KIND_OACC_KERNELS));
201 }
202
203 /* If DECL is the artificial dummy VAR_DECL created for non-static
204 data member privatization, return the underlying "this" parameter,
205 otherwise return NULL. */
206
207 tree
208 omp_member_access_dummy_var (tree decl)
209 {
210 if (!VAR_P (decl)
211 || !DECL_ARTIFICIAL (decl)
212 || !DECL_IGNORED_P (decl)
213 || !DECL_HAS_VALUE_EXPR_P (decl)
214 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
215 return NULL_TREE;
216
217 tree v = DECL_VALUE_EXPR (decl);
218 if (TREE_CODE (v) != COMPONENT_REF)
219 return NULL_TREE;
220
221 while (1)
222 switch (TREE_CODE (v))
223 {
224 case COMPONENT_REF:
225 case MEM_REF:
226 case INDIRECT_REF:
227 CASE_CONVERT:
228 case POINTER_PLUS_EXPR:
229 v = TREE_OPERAND (v, 0);
230 continue;
231 case PARM_DECL:
232 if (DECL_CONTEXT (v) == current_function_decl
233 && DECL_ARTIFICIAL (v)
234 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
235 return v;
236 return NULL_TREE;
237 default:
238 return NULL_TREE;
239 }
240 }
241
242 /* Helper for unshare_and_remap, called through walk_tree. */
243
244 static tree
245 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
246 {
247 tree *pair = (tree *) data;
248 if (*tp == pair[0])
249 {
250 *tp = unshare_expr (pair[1]);
251 *walk_subtrees = 0;
252 }
253 else if (IS_TYPE_OR_DECL_P (*tp))
254 *walk_subtrees = 0;
255 return NULL_TREE;
256 }
257
258 /* Return unshare_expr (X) with all occurrences of FROM
259 replaced with TO. */
260
261 static tree
262 unshare_and_remap (tree x, tree from, tree to)
263 {
264 tree pair[2] = { from, to };
265 x = unshare_expr (x);
266 walk_tree (&x, unshare_and_remap_1, pair, NULL);
267 return x;
268 }
269
270 /* Convenience function for calling scan_omp_1_op on tree operands. */
271
272 static inline tree
273 scan_omp_op (tree *tp, omp_context *ctx)
274 {
275 struct walk_stmt_info wi;
276
277 memset (&wi, 0, sizeof (wi));
278 wi.info = ctx;
279 wi.want_locations = true;
280
281 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
282 }
283
284 static void lower_omp (gimple_seq *, omp_context *);
285 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
286 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
287
288 /* Return true if CTX is for an omp parallel. */
289
290 static inline bool
291 is_parallel_ctx (omp_context *ctx)
292 {
293 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
294 }
295
296
297 /* Return true if CTX is for an omp task. */
298
299 static inline bool
300 is_task_ctx (omp_context *ctx)
301 {
302 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
303 }
304
305
306 /* Return true if CTX is for an omp taskloop. */
307
308 static inline bool
309 is_taskloop_ctx (omp_context *ctx)
310 {
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
312 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
313 }
314
315
316 /* Return true if CTX is for a host omp teams. */
317
318 static inline bool
319 is_host_teams_ctx (omp_context *ctx)
320 {
321 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
322 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
323 }
324
325 /* Return true if CTX is for an omp parallel or omp task or host omp teams
326 (the last one is strictly not a task region in OpenMP speak, but we
327 need to treat it similarly). */
328
329 static inline bool
330 is_taskreg_ctx (omp_context *ctx)
331 {
332 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
333 }
334
335 /* Return true if EXPR is variable sized. */
336
337 static inline bool
338 is_variable_sized (const_tree expr)
339 {
340 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
341 }
342
343 /* Lookup variables. The "maybe" form
344 allows for the variable form to not have been entered, otherwise we
345 assert that the variable must have been entered. */
346
347 static inline tree
348 lookup_decl (tree var, omp_context *ctx)
349 {
350 tree *n = ctx->cb.decl_map->get (var);
351 return *n;
352 }
353
354 static inline tree
355 maybe_lookup_decl (const_tree var, omp_context *ctx)
356 {
357 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
358 return n ? *n : NULL_TREE;
359 }
360
361 static inline tree
362 lookup_field (tree var, omp_context *ctx)
363 {
364 splay_tree_node n;
365 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
366 return (tree) n->value;
367 }
368
369 static inline tree
370 lookup_sfield (splay_tree_key key, omp_context *ctx)
371 {
372 splay_tree_node n;
373 n = splay_tree_lookup (ctx->sfield_map
374 ? ctx->sfield_map : ctx->field_map, key);
375 return (tree) n->value;
376 }
377
378 static inline tree
379 lookup_sfield (tree var, omp_context *ctx)
380 {
381 return lookup_sfield ((splay_tree_key) var, ctx);
382 }
383
384 static inline tree
385 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
386 {
387 splay_tree_node n;
388 n = splay_tree_lookup (ctx->field_map, key);
389 return n ? (tree) n->value : NULL_TREE;
390 }
391
392 static inline tree
393 maybe_lookup_field (tree var, omp_context *ctx)
394 {
395 return maybe_lookup_field ((splay_tree_key) var, ctx);
396 }
397
398 /* Return true if DECL should be copied by pointer. SHARED_CTX is
399 the parallel context if DECL is to be shared. */
400
401 static bool
402 use_pointer_for_field (tree decl, omp_context *shared_ctx)
403 {
404 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
405 || TYPE_ATOMIC (TREE_TYPE (decl)))
406 return true;
407
408 /* We can only use copy-in/copy-out semantics for shared variables
409 when we know the value is not accessible from an outer scope. */
410 if (shared_ctx)
411 {
412 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
413
414 /* ??? Trivially accessible from anywhere. But why would we even
415 be passing an address in this case? Should we simply assert
416 this to be false, or should we have a cleanup pass that removes
417 these from the list of mappings? */
418 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
419 return true;
420
421 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
422 without analyzing the expression whether or not its location
423 is accessible to anyone else. In the case of nested parallel
424 regions it certainly may be. */
425 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
426 return true;
427
428 /* Do not use copy-in/copy-out for variables that have their
429 address taken. */
430 if (is_global_var (decl))
431 {
432 /* For file scope vars, track whether we've seen them as
433 non-addressable initially and in that case, keep the same
434 answer for the duration of the pass, even when they are made
435 addressable later on e.g. through reduction expansion. Global
436 variables which weren't addressable before the pass will not
437 have their privatized copies address taken. See PR91216. */
438 if (!TREE_ADDRESSABLE (decl))
439 {
440 if (!global_nonaddressable_vars)
441 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
443 }
444 else if (!global_nonaddressable_vars
445 || !bitmap_bit_p (global_nonaddressable_vars,
446 DECL_UID (decl)))
447 return true;
448 }
449 else if (TREE_ADDRESSABLE (decl))
450 return true;
451
452 /* lower_send_shared_vars only uses copy-in, but not copy-out
453 for these. */
454 if (TREE_READONLY (decl)
455 || ((TREE_CODE (decl) == RESULT_DECL
456 || TREE_CODE (decl) == PARM_DECL)
457 && DECL_BY_REFERENCE (decl)))
458 return false;
459
460 /* Disallow copy-in/out in nested parallel if
461 decl is shared in outer parallel, otherwise
462 each thread could store the shared variable
463 in its own copy-in location, making the
464 variable no longer really shared. */
465 if (shared_ctx->is_nested)
466 {
467 omp_context *up;
468
469 for (up = shared_ctx->outer; up; up = up->outer)
470 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
471 break;
472
473 if (up)
474 {
475 tree c;
476
477 for (c = gimple_omp_taskreg_clauses (up->stmt);
478 c; c = OMP_CLAUSE_CHAIN (c))
479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
480 && OMP_CLAUSE_DECL (c) == decl)
481 break;
482
483 if (c)
484 goto maybe_mark_addressable_and_ret;
485 }
486 }
487
488 /* For tasks avoid using copy-in/out. As tasks can be
489 deferred or executed in different thread, when GOMP_task
490 returns, the task hasn't necessarily terminated. */
491 if (is_task_ctx (shared_ctx))
492 {
493 tree outer;
494 maybe_mark_addressable_and_ret:
495 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
496 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
497 {
498 /* Taking address of OUTER in lower_send_shared_vars
499 might need regimplification of everything that uses the
500 variable. */
501 if (!task_shared_vars)
502 task_shared_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
504 TREE_ADDRESSABLE (outer) = 1;
505 }
506 return true;
507 }
508 }
509
510 return false;
511 }
512
513 /* Construct a new automatic decl similar to VAR. */
514
515 static tree
516 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
517 {
518 tree copy = copy_var_decl (var, name, type);
519
520 DECL_CONTEXT (copy) = current_function_decl;
521 DECL_CHAIN (copy) = ctx->block_vars;
522 /* If VAR is listed in task_shared_vars, it means it wasn't
523 originally addressable and is just because task needs to take
524 it's address. But we don't need to take address of privatizations
525 from that var. */
526 if (TREE_ADDRESSABLE (var)
527 && ((task_shared_vars
528 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
529 || (global_nonaddressable_vars
530 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
531 TREE_ADDRESSABLE (copy) = 0;
532 ctx->block_vars = copy;
533
534 return copy;
535 }
536
537 static tree
538 omp_copy_decl_1 (tree var, omp_context *ctx)
539 {
540 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
541 }
542
543 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
544 as appropriate. */
545 static tree
546 omp_build_component_ref (tree obj, tree field)
547 {
548 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
549 if (TREE_THIS_VOLATILE (field))
550 TREE_THIS_VOLATILE (ret) |= 1;
551 if (TREE_READONLY (field))
552 TREE_READONLY (ret) |= 1;
553 return ret;
554 }
555
556 /* Build tree nodes to access the field for VAR on the receiver side. */
557
558 static tree
559 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
560 {
561 tree x, field = lookup_field (var, ctx);
562
563 /* If the receiver record type was remapped in the child function,
564 remap the field into the new record type. */
565 x = maybe_lookup_field (field, ctx);
566 if (x != NULL)
567 field = x;
568
569 x = build_simple_mem_ref (ctx->receiver_decl);
570 TREE_THIS_NOTRAP (x) = 1;
571 x = omp_build_component_ref (x, field);
572 if (by_ref)
573 {
574 x = build_simple_mem_ref (x);
575 TREE_THIS_NOTRAP (x) = 1;
576 }
577
578 return x;
579 }
580
581 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
582 of a parallel, this is a component reference; for workshare constructs
583 this is some variable. */
584
585 static tree
586 build_outer_var_ref (tree var, omp_context *ctx,
587 enum omp_clause_code code = OMP_CLAUSE_ERROR)
588 {
589 tree x;
590 omp_context *outer = ctx->outer;
591 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
592 outer = outer->outer;
593
594 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
595 x = var;
596 else if (is_variable_sized (var))
597 {
598 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
599 x = build_outer_var_ref (x, ctx, code);
600 x = build_simple_mem_ref (x);
601 }
602 else if (is_taskreg_ctx (ctx))
603 {
604 bool by_ref = use_pointer_for_field (var, NULL);
605 x = build_receiver_ref (var, by_ref, ctx);
606 }
607 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
608 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
609 || ctx->loop_p
610 || (code == OMP_CLAUSE_PRIVATE
611 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
612 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
613 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
614 {
615 /* #pragma omp simd isn't a worksharing construct, and can reference
616 even private vars in its linear etc. clauses.
617 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
618 to private vars in all worksharing constructs. */
619 x = NULL_TREE;
620 if (outer && is_taskreg_ctx (outer))
621 x = lookup_decl (var, outer);
622 else if (outer)
623 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
624 if (x == NULL_TREE)
625 x = var;
626 }
627 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
628 {
629 gcc_assert (outer);
630 splay_tree_node n
631 = splay_tree_lookup (outer->field_map,
632 (splay_tree_key) &DECL_UID (var));
633 if (n == NULL)
634 {
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
636 x = var;
637 else
638 x = lookup_decl (var, outer);
639 }
640 else
641 {
642 tree field = (tree) n->value;
643 /* If the receiver record type was remapped in the child function,
644 remap the field into the new record type. */
645 x = maybe_lookup_field (field, outer);
646 if (x != NULL)
647 field = x;
648
649 x = build_simple_mem_ref (outer->receiver_decl);
650 x = omp_build_component_ref (x, field);
651 if (use_pointer_for_field (var, outer))
652 x = build_simple_mem_ref (x);
653 }
654 }
655 else if (outer)
656 {
657 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
658 {
659 outer = outer->outer;
660 gcc_assert (outer
661 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
662 }
663 x = lookup_decl (var, outer);
664 }
665 else if (omp_is_reference (var))
666 /* This can happen with orphaned constructs. If var is reference, it is
667 possible it is shared and as such valid. */
668 x = var;
669 else if (omp_member_access_dummy_var (var))
670 x = var;
671 else
672 gcc_unreachable ();
673
674 if (x == var)
675 {
676 tree t = omp_member_access_dummy_var (var);
677 if (t)
678 {
679 x = DECL_VALUE_EXPR (var);
680 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
681 if (o != t)
682 x = unshare_and_remap (x, t, o);
683 else
684 x = unshare_expr (x);
685 }
686 }
687
688 if (omp_is_reference (var))
689 x = build_simple_mem_ref (x);
690
691 return x;
692 }
693
694 /* Build tree nodes to access the field for VAR on the sender side. */
695
696 static tree
697 build_sender_ref (splay_tree_key key, omp_context *ctx)
698 {
699 tree field = lookup_sfield (key, ctx);
700 return omp_build_component_ref (ctx->sender_decl, field);
701 }
702
703 static tree
704 build_sender_ref (tree var, omp_context *ctx)
705 {
706 return build_sender_ref ((splay_tree_key) var, ctx);
707 }
708
709 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
710 BASE_POINTERS_RESTRICT, declare the field with restrict. */
711
712 static void
713 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
714 {
715 tree field, type, sfield = NULL_TREE;
716 splay_tree_key key = (splay_tree_key) var;
717
718 if ((mask & 8) != 0)
719 {
720 key = (splay_tree_key) &DECL_UID (var);
721 gcc_checking_assert (key != (splay_tree_key) var);
722 }
723 gcc_assert ((mask & 1) == 0
724 || !splay_tree_lookup (ctx->field_map, key));
725 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
726 || !splay_tree_lookup (ctx->sfield_map, key));
727 gcc_assert ((mask & 3) == 3
728 || !is_gimple_omp_oacc (ctx->stmt));
729
730 type = TREE_TYPE (var);
731 /* Prevent redeclaring the var in the split-off function with a restrict
732 pointer type. Note that we only clear type itself, restrict qualifiers in
733 the pointed-to type will be ignored by points-to analysis. */
734 if (POINTER_TYPE_P (type)
735 && TYPE_RESTRICT (type))
736 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
737
738 if (mask & 4)
739 {
740 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
741 type = build_pointer_type (build_pointer_type (type));
742 }
743 else if (by_ref)
744 type = build_pointer_type (type);
745 else if ((mask & 3) == 1 && omp_is_reference (var))
746 type = TREE_TYPE (type);
747
748 field = build_decl (DECL_SOURCE_LOCATION (var),
749 FIELD_DECL, DECL_NAME (var), type);
750
751 /* Remember what variable this field was created for. This does have a
752 side effect of making dwarf2out ignore this member, so for helpful
753 debugging we clear it later in delete_omp_context. */
754 DECL_ABSTRACT_ORIGIN (field) = var;
755 if (type == TREE_TYPE (var))
756 {
757 SET_DECL_ALIGN (field, DECL_ALIGN (var));
758 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
759 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
760 }
761 else
762 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
763
764 if ((mask & 3) == 3)
765 {
766 insert_field_into_struct (ctx->record_type, field);
767 if (ctx->srecord_type)
768 {
769 sfield = build_decl (DECL_SOURCE_LOCATION (var),
770 FIELD_DECL, DECL_NAME (var), type);
771 DECL_ABSTRACT_ORIGIN (sfield) = var;
772 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
773 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
774 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
775 insert_field_into_struct (ctx->srecord_type, sfield);
776 }
777 }
778 else
779 {
780 if (ctx->srecord_type == NULL_TREE)
781 {
782 tree t;
783
784 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
785 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
786 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
787 {
788 sfield = build_decl (DECL_SOURCE_LOCATION (t),
789 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
790 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
791 insert_field_into_struct (ctx->srecord_type, sfield);
792 splay_tree_insert (ctx->sfield_map,
793 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
794 (splay_tree_value) sfield);
795 }
796 }
797 sfield = field;
798 insert_field_into_struct ((mask & 1) ? ctx->record_type
799 : ctx->srecord_type, field);
800 }
801
802 if (mask & 1)
803 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
804 if ((mask & 2) && ctx->sfield_map)
805 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
806 }
807
808 static tree
809 install_var_local (tree var, omp_context *ctx)
810 {
811 tree new_var = omp_copy_decl_1 (var, ctx);
812 insert_decl_map (&ctx->cb, var, new_var);
813 return new_var;
814 }
815
816 /* Adjust the replacement for DECL in CTX for the new context. This means
817 copying the DECL_VALUE_EXPR, and fixing up the type. */
818
819 static void
820 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
821 {
822 tree new_decl, size;
823
824 new_decl = lookup_decl (decl, ctx);
825
826 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
827
828 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
829 && DECL_HAS_VALUE_EXPR_P (decl))
830 {
831 tree ve = DECL_VALUE_EXPR (decl);
832 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
833 SET_DECL_VALUE_EXPR (new_decl, ve);
834 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
835 }
836
837 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
838 {
839 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
840 if (size == error_mark_node)
841 size = TYPE_SIZE (TREE_TYPE (new_decl));
842 DECL_SIZE (new_decl) = size;
843
844 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
845 if (size == error_mark_node)
846 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
847 DECL_SIZE_UNIT (new_decl) = size;
848 }
849 }
850
851 /* The callback for remap_decl. Search all containing contexts for a
852 mapping of the variable; this avoids having to duplicate the splay
853 tree ahead of time. We know a mapping doesn't already exist in the
854 given context. Create new mappings to implement default semantics. */
855
856 static tree
857 omp_copy_decl (tree var, copy_body_data *cb)
858 {
859 omp_context *ctx = (omp_context *) cb;
860 tree new_var;
861
862 if (TREE_CODE (var) == LABEL_DECL)
863 {
864 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
865 return var;
866 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
867 DECL_CONTEXT (new_var) = current_function_decl;
868 insert_decl_map (&ctx->cb, var, new_var);
869 return new_var;
870 }
871
872 while (!is_taskreg_ctx (ctx))
873 {
874 ctx = ctx->outer;
875 if (ctx == NULL)
876 return var;
877 new_var = maybe_lookup_decl (var, ctx);
878 if (new_var)
879 return new_var;
880 }
881
882 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
883 return var;
884
885 return error_mark_node;
886 }
887
888 /* Create a new context, with OUTER_CTX being the surrounding context. */
889
890 static omp_context *
891 new_omp_context (gimple *stmt, omp_context *outer_ctx)
892 {
893 omp_context *ctx = XCNEW (omp_context);
894
895 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
896 (splay_tree_value) ctx);
897 ctx->stmt = stmt;
898
899 if (outer_ctx)
900 {
901 ctx->outer = outer_ctx;
902 ctx->cb = outer_ctx->cb;
903 ctx->cb.block = NULL;
904 ctx->depth = outer_ctx->depth + 1;
905 }
906 else
907 {
908 ctx->cb.src_fn = current_function_decl;
909 ctx->cb.dst_fn = current_function_decl;
910 ctx->cb.src_node = cgraph_node::get (current_function_decl);
911 gcc_checking_assert (ctx->cb.src_node);
912 ctx->cb.dst_node = ctx->cb.src_node;
913 ctx->cb.src_cfun = cfun;
914 ctx->cb.copy_decl = omp_copy_decl;
915 ctx->cb.eh_lp_nr = 0;
916 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
917 ctx->cb.adjust_array_error_bounds = true;
918 ctx->cb.dont_remap_vla_if_no_change = true;
919 ctx->depth = 1;
920 }
921
922 ctx->cb.decl_map = new hash_map<tree, tree>;
923
924 return ctx;
925 }
926
927 static gimple_seq maybe_catch_exception (gimple_seq);
928
929 /* Finalize task copyfn. */
930
931 static void
932 finalize_task_copyfn (gomp_task *task_stmt)
933 {
934 struct function *child_cfun;
935 tree child_fn;
936 gimple_seq seq = NULL, new_seq;
937 gbind *bind;
938
939 child_fn = gimple_omp_task_copy_fn (task_stmt);
940 if (child_fn == NULL_TREE)
941 return;
942
943 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
944 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
945
946 push_cfun (child_cfun);
947 bind = gimplify_body (child_fn, false);
948 gimple_seq_add_stmt (&seq, bind);
949 new_seq = maybe_catch_exception (seq);
950 if (new_seq != seq)
951 {
952 bind = gimple_build_bind (NULL, new_seq, NULL);
953 seq = NULL;
954 gimple_seq_add_stmt (&seq, bind);
955 }
956 gimple_set_body (child_fn, seq);
957 pop_cfun ();
958
959 /* Inform the callgraph about the new function. */
960 cgraph_node *node = cgraph_node::get_create (child_fn);
961 node->parallelized_function = 1;
962 cgraph_node::add_new_function (child_fn, false);
963 }
964
965 /* Destroy a omp_context data structures. Called through the splay tree
966 value delete callback. */
967
968 static void
969 delete_omp_context (splay_tree_value value)
970 {
971 omp_context *ctx = (omp_context *) value;
972
973 delete ctx->cb.decl_map;
974
975 if (ctx->field_map)
976 splay_tree_delete (ctx->field_map);
977 if (ctx->sfield_map)
978 splay_tree_delete (ctx->sfield_map);
979
980 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
981 it produces corrupt debug information. */
982 if (ctx->record_type)
983 {
984 tree t;
985 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
986 DECL_ABSTRACT_ORIGIN (t) = NULL;
987 }
988 if (ctx->srecord_type)
989 {
990 tree t;
991 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
992 DECL_ABSTRACT_ORIGIN (t) = NULL;
993 }
994
995 if (is_task_ctx (ctx))
996 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
997
998 if (ctx->task_reduction_map)
999 {
1000 ctx->task_reductions.release ();
1001 delete ctx->task_reduction_map;
1002 }
1003
1004 delete ctx->lastprivate_conditional_map;
1005
1006 XDELETE (ctx);
1007 }
1008
1009 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1010 context. */
1011
1012 static void
1013 fixup_child_record_type (omp_context *ctx)
1014 {
1015 tree f, type = ctx->record_type;
1016
1017 if (!ctx->receiver_decl)
1018 return;
1019 /* ??? It isn't sufficient to just call remap_type here, because
1020 variably_modified_type_p doesn't work the way we expect for
1021 record types. Testing each field for whether it needs remapping
1022 and creating a new record by hand works, however. */
1023 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1024 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1025 break;
1026 if (f)
1027 {
1028 tree name, new_fields = NULL;
1029
1030 type = lang_hooks.types.make_type (RECORD_TYPE);
1031 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1032 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1033 TYPE_DECL, name, type);
1034 TYPE_NAME (type) = name;
1035
1036 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1037 {
1038 tree new_f = copy_node (f);
1039 DECL_CONTEXT (new_f) = type;
1040 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1041 DECL_CHAIN (new_f) = new_fields;
1042 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1043 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1044 &ctx->cb, NULL);
1045 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1046 &ctx->cb, NULL);
1047 new_fields = new_f;
1048
1049 /* Arrange to be able to look up the receiver field
1050 given the sender field. */
1051 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1052 (splay_tree_value) new_f);
1053 }
1054 TYPE_FIELDS (type) = nreverse (new_fields);
1055 layout_type (type);
1056 }
1057
1058 /* In a target region we never modify any of the pointers in *.omp_data_i,
1059 so attempt to help the optimizers. */
1060 if (is_gimple_omp_offloaded (ctx->stmt))
1061 type = build_qualified_type (type, TYPE_QUAL_CONST);
1062
1063 TREE_TYPE (ctx->receiver_decl)
1064 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1065 }
1066
1067 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1068 specified by CLAUSES. */
1069
1070 static void
1071 scan_sharing_clauses (tree clauses, omp_context *ctx)
1072 {
1073 tree c, decl;
1074 bool scan_array_reductions = false;
1075
1076 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1077 {
1078 bool by_ref;
1079
1080 switch (OMP_CLAUSE_CODE (c))
1081 {
1082 case OMP_CLAUSE_PRIVATE:
1083 decl = OMP_CLAUSE_DECL (c);
1084 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1085 goto do_private;
1086 else if (!is_variable_sized (decl))
1087 install_var_local (decl, ctx);
1088 break;
1089
1090 case OMP_CLAUSE_SHARED:
1091 decl = OMP_CLAUSE_DECL (c);
1092 /* Ignore shared directives in teams construct inside of
1093 target construct. */
1094 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1095 && !is_host_teams_ctx (ctx))
1096 {
1097 /* Global variables don't need to be copied,
1098 the receiver side will use them directly. */
1099 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1100 if (is_global_var (odecl))
1101 break;
1102 insert_decl_map (&ctx->cb, decl, odecl);
1103 break;
1104 }
1105 gcc_assert (is_taskreg_ctx (ctx));
1106 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1107 || !is_variable_sized (decl));
1108 /* Global variables don't need to be copied,
1109 the receiver side will use them directly. */
1110 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1111 break;
1112 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1113 {
1114 use_pointer_for_field (decl, ctx);
1115 break;
1116 }
1117 by_ref = use_pointer_for_field (decl, NULL);
1118 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1119 || TREE_ADDRESSABLE (decl)
1120 || by_ref
1121 || omp_is_reference (decl))
1122 {
1123 by_ref = use_pointer_for_field (decl, ctx);
1124 install_var_field (decl, by_ref, 3, ctx);
1125 install_var_local (decl, ctx);
1126 break;
1127 }
1128 /* We don't need to copy const scalar vars back. */
1129 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1130 goto do_private;
1131
1132 case OMP_CLAUSE_REDUCTION:
1133 case OMP_CLAUSE_IN_REDUCTION:
1134 decl = OMP_CLAUSE_DECL (c);
1135 if (TREE_CODE (decl) == MEM_REF)
1136 {
1137 tree t = TREE_OPERAND (decl, 0);
1138 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1139 t = TREE_OPERAND (t, 0);
1140 if (TREE_CODE (t) == INDIRECT_REF
1141 || TREE_CODE (t) == ADDR_EXPR)
1142 t = TREE_OPERAND (t, 0);
1143 install_var_local (t, ctx);
1144 if (is_taskreg_ctx (ctx)
1145 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1146 || (is_task_ctx (ctx)
1147 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1148 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1149 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1150 == POINTER_TYPE)))))
1151 && !is_variable_sized (t)
1152 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1153 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1154 && !is_task_ctx (ctx))))
1155 {
1156 by_ref = use_pointer_for_field (t, NULL);
1157 if (is_task_ctx (ctx)
1158 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1159 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1160 {
1161 install_var_field (t, false, 1, ctx);
1162 install_var_field (t, by_ref, 2, ctx);
1163 }
1164 else
1165 install_var_field (t, by_ref, 3, ctx);
1166 }
1167 break;
1168 }
1169 if (is_task_ctx (ctx)
1170 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1171 && OMP_CLAUSE_REDUCTION_TASK (c)
1172 && is_parallel_ctx (ctx)))
1173 {
1174 /* Global variables don't need to be copied,
1175 the receiver side will use them directly. */
1176 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1177 {
1178 by_ref = use_pointer_for_field (decl, ctx);
1179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1180 install_var_field (decl, by_ref, 3, ctx);
1181 }
1182 install_var_local (decl, ctx);
1183 break;
1184 }
1185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1186 && OMP_CLAUSE_REDUCTION_TASK (c))
1187 {
1188 install_var_local (decl, ctx);
1189 break;
1190 }
1191 goto do_private;
1192
1193 case OMP_CLAUSE_LASTPRIVATE:
1194 /* Let the corresponding firstprivate clause create
1195 the variable. */
1196 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1197 break;
1198 /* FALLTHRU */
1199
1200 case OMP_CLAUSE_FIRSTPRIVATE:
1201 case OMP_CLAUSE_LINEAR:
1202 decl = OMP_CLAUSE_DECL (c);
1203 do_private:
1204 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1205 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1206 && is_gimple_omp_offloaded (ctx->stmt))
1207 {
1208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1209 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1210 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1211 install_var_field (decl, true, 3, ctx);
1212 else
1213 install_var_field (decl, false, 3, ctx);
1214 }
1215 if (is_variable_sized (decl))
1216 {
1217 if (is_task_ctx (ctx))
1218 install_var_field (decl, false, 1, ctx);
1219 break;
1220 }
1221 else if (is_taskreg_ctx (ctx))
1222 {
1223 bool global
1224 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1225 by_ref = use_pointer_for_field (decl, NULL);
1226
1227 if (is_task_ctx (ctx)
1228 && (global || by_ref || omp_is_reference (decl)))
1229 {
1230 install_var_field (decl, false, 1, ctx);
1231 if (!global)
1232 install_var_field (decl, by_ref, 2, ctx);
1233 }
1234 else if (!global)
1235 install_var_field (decl, by_ref, 3, ctx);
1236 }
1237 install_var_local (decl, ctx);
1238 break;
1239
1240 case OMP_CLAUSE_USE_DEVICE_PTR:
1241 decl = OMP_CLAUSE_DECL (c);
1242 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1243 install_var_field (decl, true, 3, ctx);
1244 else
1245 install_var_field (decl, false, 3, ctx);
1246 if (DECL_SIZE (decl)
1247 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1248 {
1249 tree decl2 = DECL_VALUE_EXPR (decl);
1250 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1251 decl2 = TREE_OPERAND (decl2, 0);
1252 gcc_assert (DECL_P (decl2));
1253 install_var_local (decl2, ctx);
1254 }
1255 install_var_local (decl, ctx);
1256 break;
1257
1258 case OMP_CLAUSE_IS_DEVICE_PTR:
1259 decl = OMP_CLAUSE_DECL (c);
1260 goto do_private;
1261
1262 case OMP_CLAUSE__LOOPTEMP_:
1263 case OMP_CLAUSE__REDUCTEMP_:
1264 gcc_assert (is_taskreg_ctx (ctx));
1265 decl = OMP_CLAUSE_DECL (c);
1266 install_var_field (decl, false, 3, ctx);
1267 install_var_local (decl, ctx);
1268 break;
1269
1270 case OMP_CLAUSE_COPYPRIVATE:
1271 case OMP_CLAUSE_COPYIN:
1272 decl = OMP_CLAUSE_DECL (c);
1273 by_ref = use_pointer_for_field (decl, NULL);
1274 install_var_field (decl, by_ref, 3, ctx);
1275 break;
1276
1277 case OMP_CLAUSE_FINAL:
1278 case OMP_CLAUSE_IF:
1279 case OMP_CLAUSE_NUM_THREADS:
1280 case OMP_CLAUSE_NUM_TEAMS:
1281 case OMP_CLAUSE_THREAD_LIMIT:
1282 case OMP_CLAUSE_DEVICE:
1283 case OMP_CLAUSE_SCHEDULE:
1284 case OMP_CLAUSE_DIST_SCHEDULE:
1285 case OMP_CLAUSE_DEPEND:
1286 case OMP_CLAUSE_PRIORITY:
1287 case OMP_CLAUSE_GRAINSIZE:
1288 case OMP_CLAUSE_NUM_TASKS:
1289 case OMP_CLAUSE_NUM_GANGS:
1290 case OMP_CLAUSE_NUM_WORKERS:
1291 case OMP_CLAUSE_VECTOR_LENGTH:
1292 if (ctx->outer)
1293 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1294 break;
1295
1296 case OMP_CLAUSE_TO:
1297 case OMP_CLAUSE_FROM:
1298 case OMP_CLAUSE_MAP:
1299 if (ctx->outer)
1300 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1301 decl = OMP_CLAUSE_DECL (c);
1302 /* Global variables with "omp declare target" attribute
1303 don't need to be copied, the receiver side will use them
1304 directly. However, global variables with "omp declare target link"
1305 attribute need to be copied. Or when ALWAYS modifier is used. */
1306 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1307 && DECL_P (decl)
1308 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1309 && (OMP_CLAUSE_MAP_KIND (c)
1310 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1311 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1312 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1313 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1314 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1315 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1316 && varpool_node::get_create (decl)->offloadable
1317 && !lookup_attribute ("omp declare target link",
1318 DECL_ATTRIBUTES (decl)))
1319 break;
1320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1321 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1322 {
1323 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1324 not offloaded; there is nothing to map for those. */
1325 if (!is_gimple_omp_offloaded (ctx->stmt)
1326 && !POINTER_TYPE_P (TREE_TYPE (decl))
1327 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1328 break;
1329 }
1330 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1331 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1332 || (OMP_CLAUSE_MAP_KIND (c)
1333 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1334 {
1335 if (TREE_CODE (decl) == COMPONENT_REF
1336 || (TREE_CODE (decl) == INDIRECT_REF
1337 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1338 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1339 == REFERENCE_TYPE)))
1340 break;
1341 if (DECL_SIZE (decl)
1342 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1343 {
1344 tree decl2 = DECL_VALUE_EXPR (decl);
1345 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1346 decl2 = TREE_OPERAND (decl2, 0);
1347 gcc_assert (DECL_P (decl2));
1348 install_var_local (decl2, ctx);
1349 }
1350 install_var_local (decl, ctx);
1351 break;
1352 }
1353 if (DECL_P (decl))
1354 {
1355 if (DECL_SIZE (decl)
1356 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1357 {
1358 tree decl2 = DECL_VALUE_EXPR (decl);
1359 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1360 decl2 = TREE_OPERAND (decl2, 0);
1361 gcc_assert (DECL_P (decl2));
1362 install_var_field (decl2, true, 3, ctx);
1363 install_var_local (decl2, ctx);
1364 install_var_local (decl, ctx);
1365 }
1366 else
1367 {
1368 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1369 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1370 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1371 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1372 install_var_field (decl, true, 7, ctx);
1373 else
1374 install_var_field (decl, true, 3, ctx);
1375 if (is_gimple_omp_offloaded (ctx->stmt)
1376 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1377 install_var_local (decl, ctx);
1378 }
1379 }
1380 else
1381 {
1382 tree base = get_base_address (decl);
1383 tree nc = OMP_CLAUSE_CHAIN (c);
1384 if (DECL_P (base)
1385 && nc != NULL_TREE
1386 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1387 && OMP_CLAUSE_DECL (nc) == base
1388 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1389 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1390 {
1391 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1392 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1393 }
1394 else
1395 {
1396 if (ctx->outer)
1397 {
1398 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1399 decl = OMP_CLAUSE_DECL (c);
1400 }
1401 gcc_assert (!splay_tree_lookup (ctx->field_map,
1402 (splay_tree_key) decl));
1403 tree field
1404 = build_decl (OMP_CLAUSE_LOCATION (c),
1405 FIELD_DECL, NULL_TREE, ptr_type_node);
1406 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1407 insert_field_into_struct (ctx->record_type, field);
1408 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1409 (splay_tree_value) field);
1410 }
1411 }
1412 break;
1413
1414 case OMP_CLAUSE__GRIDDIM_:
1415 if (ctx->outer)
1416 {
1417 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1418 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1419 }
1420 break;
1421
1422 case OMP_CLAUSE_ORDER:
1423 ctx->order_concurrent = true;
1424 break;
1425
1426 case OMP_CLAUSE_BIND:
1427 ctx->loop_p = true;
1428 break;
1429
1430 case OMP_CLAUSE_NOWAIT:
1431 case OMP_CLAUSE_ORDERED:
1432 case OMP_CLAUSE_COLLAPSE:
1433 case OMP_CLAUSE_UNTIED:
1434 case OMP_CLAUSE_MERGEABLE:
1435 case OMP_CLAUSE_PROC_BIND:
1436 case OMP_CLAUSE_SAFELEN:
1437 case OMP_CLAUSE_SIMDLEN:
1438 case OMP_CLAUSE_THREADS:
1439 case OMP_CLAUSE_SIMD:
1440 case OMP_CLAUSE_NOGROUP:
1441 case OMP_CLAUSE_DEFAULTMAP:
1442 case OMP_CLAUSE_ASYNC:
1443 case OMP_CLAUSE_WAIT:
1444 case OMP_CLAUSE_GANG:
1445 case OMP_CLAUSE_WORKER:
1446 case OMP_CLAUSE_VECTOR:
1447 case OMP_CLAUSE_INDEPENDENT:
1448 case OMP_CLAUSE_AUTO:
1449 case OMP_CLAUSE_SEQ:
1450 case OMP_CLAUSE_TILE:
1451 case OMP_CLAUSE__SIMT_:
1452 case OMP_CLAUSE_DEFAULT:
1453 case OMP_CLAUSE_NONTEMPORAL:
1454 case OMP_CLAUSE_IF_PRESENT:
1455 case OMP_CLAUSE_FINALIZE:
1456 case OMP_CLAUSE_TASK_REDUCTION:
1457 break;
1458
1459 case OMP_CLAUSE_ALIGNED:
1460 decl = OMP_CLAUSE_DECL (c);
1461 if (is_global_var (decl)
1462 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1463 install_var_local (decl, ctx);
1464 break;
1465
1466 case OMP_CLAUSE__CONDTEMP_:
1467 decl = OMP_CLAUSE_DECL (c);
1468 if (is_parallel_ctx (ctx))
1469 {
1470 install_var_field (decl, false, 3, ctx);
1471 install_var_local (decl, ctx);
1472 }
1473 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1474 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1475 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1476 install_var_local (decl, ctx);
1477 break;
1478
1479 case OMP_CLAUSE__CACHE_:
1480 default:
1481 gcc_unreachable ();
1482 }
1483 }
1484
1485 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1486 {
1487 switch (OMP_CLAUSE_CODE (c))
1488 {
1489 case OMP_CLAUSE_LASTPRIVATE:
1490 /* Let the corresponding firstprivate clause create
1491 the variable. */
1492 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1493 scan_array_reductions = true;
1494 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1495 break;
1496 /* FALLTHRU */
1497
1498 case OMP_CLAUSE_FIRSTPRIVATE:
1499 case OMP_CLAUSE_PRIVATE:
1500 case OMP_CLAUSE_LINEAR:
1501 case OMP_CLAUSE_IS_DEVICE_PTR:
1502 decl = OMP_CLAUSE_DECL (c);
1503 if (is_variable_sized (decl))
1504 {
1505 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1506 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1507 && is_gimple_omp_offloaded (ctx->stmt))
1508 {
1509 tree decl2 = DECL_VALUE_EXPR (decl);
1510 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1511 decl2 = TREE_OPERAND (decl2, 0);
1512 gcc_assert (DECL_P (decl2));
1513 install_var_local (decl2, ctx);
1514 fixup_remapped_decl (decl2, ctx, false);
1515 }
1516 install_var_local (decl, ctx);
1517 }
1518 fixup_remapped_decl (decl, ctx,
1519 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1520 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1521 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1522 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1523 scan_array_reductions = true;
1524 break;
1525
1526 case OMP_CLAUSE_REDUCTION:
1527 case OMP_CLAUSE_IN_REDUCTION:
1528 decl = OMP_CLAUSE_DECL (c);
1529 if (TREE_CODE (decl) != MEM_REF)
1530 {
1531 if (is_variable_sized (decl))
1532 install_var_local (decl, ctx);
1533 fixup_remapped_decl (decl, ctx, false);
1534 }
1535 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1536 scan_array_reductions = true;
1537 break;
1538
1539 case OMP_CLAUSE_TASK_REDUCTION:
1540 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1541 scan_array_reductions = true;
1542 break;
1543
1544 case OMP_CLAUSE_SHARED:
1545 /* Ignore shared directives in teams construct inside of
1546 target construct. */
1547 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1548 && !is_host_teams_ctx (ctx))
1549 break;
1550 decl = OMP_CLAUSE_DECL (c);
1551 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1552 break;
1553 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1554 {
1555 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1556 ctx->outer)))
1557 break;
1558 bool by_ref = use_pointer_for_field (decl, ctx);
1559 install_var_field (decl, by_ref, 11, ctx);
1560 break;
1561 }
1562 fixup_remapped_decl (decl, ctx, false);
1563 break;
1564
1565 case OMP_CLAUSE_MAP:
1566 if (!is_gimple_omp_offloaded (ctx->stmt))
1567 break;
1568 decl = OMP_CLAUSE_DECL (c);
1569 if (DECL_P (decl)
1570 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1571 && (OMP_CLAUSE_MAP_KIND (c)
1572 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1573 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1574 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1575 && varpool_node::get_create (decl)->offloadable)
1576 break;
1577 if (DECL_P (decl))
1578 {
1579 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1580 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1581 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1582 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1583 {
1584 tree new_decl = lookup_decl (decl, ctx);
1585 TREE_TYPE (new_decl)
1586 = remap_type (TREE_TYPE (decl), &ctx->cb);
1587 }
1588 else if (DECL_SIZE (decl)
1589 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1590 {
1591 tree decl2 = DECL_VALUE_EXPR (decl);
1592 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1593 decl2 = TREE_OPERAND (decl2, 0);
1594 gcc_assert (DECL_P (decl2));
1595 fixup_remapped_decl (decl2, ctx, false);
1596 fixup_remapped_decl (decl, ctx, true);
1597 }
1598 else
1599 fixup_remapped_decl (decl, ctx, false);
1600 }
1601 break;
1602
1603 case OMP_CLAUSE_COPYPRIVATE:
1604 case OMP_CLAUSE_COPYIN:
1605 case OMP_CLAUSE_DEFAULT:
1606 case OMP_CLAUSE_IF:
1607 case OMP_CLAUSE_NUM_THREADS:
1608 case OMP_CLAUSE_NUM_TEAMS:
1609 case OMP_CLAUSE_THREAD_LIMIT:
1610 case OMP_CLAUSE_DEVICE:
1611 case OMP_CLAUSE_SCHEDULE:
1612 case OMP_CLAUSE_DIST_SCHEDULE:
1613 case OMP_CLAUSE_NOWAIT:
1614 case OMP_CLAUSE_ORDERED:
1615 case OMP_CLAUSE_COLLAPSE:
1616 case OMP_CLAUSE_UNTIED:
1617 case OMP_CLAUSE_FINAL:
1618 case OMP_CLAUSE_MERGEABLE:
1619 case OMP_CLAUSE_PROC_BIND:
1620 case OMP_CLAUSE_SAFELEN:
1621 case OMP_CLAUSE_SIMDLEN:
1622 case OMP_CLAUSE_ALIGNED:
1623 case OMP_CLAUSE_DEPEND:
1624 case OMP_CLAUSE__LOOPTEMP_:
1625 case OMP_CLAUSE__REDUCTEMP_:
1626 case OMP_CLAUSE_TO:
1627 case OMP_CLAUSE_FROM:
1628 case OMP_CLAUSE_PRIORITY:
1629 case OMP_CLAUSE_GRAINSIZE:
1630 case OMP_CLAUSE_NUM_TASKS:
1631 case OMP_CLAUSE_THREADS:
1632 case OMP_CLAUSE_SIMD:
1633 case OMP_CLAUSE_NOGROUP:
1634 case OMP_CLAUSE_DEFAULTMAP:
1635 case OMP_CLAUSE_ORDER:
1636 case OMP_CLAUSE_BIND:
1637 case OMP_CLAUSE_USE_DEVICE_PTR:
1638 case OMP_CLAUSE_NONTEMPORAL:
1639 case OMP_CLAUSE_ASYNC:
1640 case OMP_CLAUSE_WAIT:
1641 case OMP_CLAUSE_NUM_GANGS:
1642 case OMP_CLAUSE_NUM_WORKERS:
1643 case OMP_CLAUSE_VECTOR_LENGTH:
1644 case OMP_CLAUSE_GANG:
1645 case OMP_CLAUSE_WORKER:
1646 case OMP_CLAUSE_VECTOR:
1647 case OMP_CLAUSE_INDEPENDENT:
1648 case OMP_CLAUSE_AUTO:
1649 case OMP_CLAUSE_SEQ:
1650 case OMP_CLAUSE_TILE:
1651 case OMP_CLAUSE__GRIDDIM_:
1652 case OMP_CLAUSE__SIMT_:
1653 case OMP_CLAUSE_IF_PRESENT:
1654 case OMP_CLAUSE_FINALIZE:
1655 case OMP_CLAUSE__CONDTEMP_:
1656 break;
1657
1658 case OMP_CLAUSE__CACHE_:
1659 default:
1660 gcc_unreachable ();
1661 }
1662 }
1663
1664 gcc_checking_assert (!scan_array_reductions
1665 || !is_gimple_omp_oacc (ctx->stmt));
1666 if (scan_array_reductions)
1667 {
1668 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1669 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1670 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1671 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1672 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1673 {
1674 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1675 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1676 }
1677 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1678 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1679 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1680 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1681 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1682 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1683 }
1684 }
1685
1686 /* Create a new name for omp child function. Returns an identifier. */
1687
1688 static tree
1689 create_omp_child_function_name (bool task_copy)
1690 {
1691 return clone_function_name_numbered (current_function_decl,
1692 task_copy ? "_omp_cpyfn" : "_omp_fn");
1693 }
1694
1695 /* Return true if CTX may belong to offloaded code: either if current function
1696 is offloaded, or any enclosing context corresponds to a target region. */
1697
1698 static bool
1699 omp_maybe_offloaded_ctx (omp_context *ctx)
1700 {
1701 if (cgraph_node::get (current_function_decl)->offloadable)
1702 return true;
1703 for (; ctx; ctx = ctx->outer)
1704 if (is_gimple_omp_offloaded (ctx->stmt))
1705 return true;
1706 return false;
1707 }
1708
1709 /* Build a decl for the omp child function. It'll not contain a body
1710 yet, just the bare decl. */
1711
1712 static void
1713 create_omp_child_function (omp_context *ctx, bool task_copy)
1714 {
1715 tree decl, type, name, t;
1716
1717 name = create_omp_child_function_name (task_copy);
1718 if (task_copy)
1719 type = build_function_type_list (void_type_node, ptr_type_node,
1720 ptr_type_node, NULL_TREE);
1721 else
1722 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1723
1724 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1725
1726 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1727 || !task_copy);
1728 if (!task_copy)
1729 ctx->cb.dst_fn = decl;
1730 else
1731 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1732
1733 TREE_STATIC (decl) = 1;
1734 TREE_USED (decl) = 1;
1735 DECL_ARTIFICIAL (decl) = 1;
1736 DECL_IGNORED_P (decl) = 0;
1737 TREE_PUBLIC (decl) = 0;
1738 DECL_UNINLINABLE (decl) = 1;
1739 DECL_EXTERNAL (decl) = 0;
1740 DECL_CONTEXT (decl) = NULL_TREE;
1741 DECL_INITIAL (decl) = make_node (BLOCK);
1742 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1743 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1744 /* Remove omp declare simd attribute from the new attributes. */
1745 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1746 {
1747 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1748 a = a2;
1749 a = TREE_CHAIN (a);
1750 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1751 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1752 *p = TREE_CHAIN (*p);
1753 else
1754 {
1755 tree chain = TREE_CHAIN (*p);
1756 *p = copy_node (*p);
1757 p = &TREE_CHAIN (*p);
1758 *p = chain;
1759 }
1760 }
1761 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1762 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1763 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1764 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1765 DECL_FUNCTION_VERSIONED (decl)
1766 = DECL_FUNCTION_VERSIONED (current_function_decl);
1767
1768 if (omp_maybe_offloaded_ctx (ctx))
1769 {
1770 cgraph_node::get_create (decl)->offloadable = 1;
1771 if (ENABLE_OFFLOADING)
1772 g->have_offload = true;
1773 }
1774
1775 if (cgraph_node::get_create (decl)->offloadable
1776 && !lookup_attribute ("omp declare target",
1777 DECL_ATTRIBUTES (current_function_decl)))
1778 {
1779 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1780 ? "omp target entrypoint"
1781 : "omp declare target");
1782 DECL_ATTRIBUTES (decl)
1783 = tree_cons (get_identifier (target_attr),
1784 NULL_TREE, DECL_ATTRIBUTES (decl));
1785 }
1786
1787 t = build_decl (DECL_SOURCE_LOCATION (decl),
1788 RESULT_DECL, NULL_TREE, void_type_node);
1789 DECL_ARTIFICIAL (t) = 1;
1790 DECL_IGNORED_P (t) = 1;
1791 DECL_CONTEXT (t) = decl;
1792 DECL_RESULT (decl) = t;
1793
1794 tree data_name = get_identifier (".omp_data_i");
1795 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1796 ptr_type_node);
1797 DECL_ARTIFICIAL (t) = 1;
1798 DECL_NAMELESS (t) = 1;
1799 DECL_ARG_TYPE (t) = ptr_type_node;
1800 DECL_CONTEXT (t) = current_function_decl;
1801 TREE_USED (t) = 1;
1802 TREE_READONLY (t) = 1;
1803 DECL_ARGUMENTS (decl) = t;
1804 if (!task_copy)
1805 ctx->receiver_decl = t;
1806 else
1807 {
1808 t = build_decl (DECL_SOURCE_LOCATION (decl),
1809 PARM_DECL, get_identifier (".omp_data_o"),
1810 ptr_type_node);
1811 DECL_ARTIFICIAL (t) = 1;
1812 DECL_NAMELESS (t) = 1;
1813 DECL_ARG_TYPE (t) = ptr_type_node;
1814 DECL_CONTEXT (t) = current_function_decl;
1815 TREE_USED (t) = 1;
1816 TREE_ADDRESSABLE (t) = 1;
1817 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1818 DECL_ARGUMENTS (decl) = t;
1819 }
1820
1821 /* Allocate memory for the function structure. The call to
1822 allocate_struct_function clobbers CFUN, so we need to restore
1823 it afterward. */
1824 push_struct_function (decl);
1825 cfun->function_end_locus = gimple_location (ctx->stmt);
1826 init_tree_ssa (cfun);
1827 pop_cfun ();
1828 }
1829
1830 /* Callback for walk_gimple_seq. Check if combined parallel
1831 contains gimple_omp_for_combined_into_p OMP_FOR. */
1832
1833 tree
1834 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1835 bool *handled_ops_p,
1836 struct walk_stmt_info *wi)
1837 {
1838 gimple *stmt = gsi_stmt (*gsi_p);
1839
1840 *handled_ops_p = true;
1841 switch (gimple_code (stmt))
1842 {
1843 WALK_SUBSTMTS;
1844
1845 case GIMPLE_OMP_FOR:
1846 if (gimple_omp_for_combined_into_p (stmt)
1847 && gimple_omp_for_kind (stmt)
1848 == *(const enum gf_mask *) (wi->info))
1849 {
1850 wi->info = stmt;
1851 return integer_zero_node;
1852 }
1853 break;
1854 default:
1855 break;
1856 }
1857 return NULL;
1858 }
1859
1860 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1861
1862 static void
1863 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1864 omp_context *outer_ctx)
1865 {
1866 struct walk_stmt_info wi;
1867
1868 memset (&wi, 0, sizeof (wi));
1869 wi.val_only = true;
1870 wi.info = (void *) &msk;
1871 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1872 if (wi.info != (void *) &msk)
1873 {
1874 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1875 struct omp_for_data fd;
1876 omp_extract_for_data (for_stmt, &fd, NULL);
1877 /* We need two temporaries with fd.loop.v type (istart/iend)
1878 and then (fd.collapse - 1) temporaries with the same
1879 type for count2 ... countN-1 vars if not constant. */
1880 size_t count = 2, i;
1881 tree type = fd.iter_type;
1882 if (fd.collapse > 1
1883 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1884 {
1885 count += fd.collapse - 1;
1886 /* If there are lastprivate clauses on the inner
1887 GIMPLE_OMP_FOR, add one more temporaries for the total number
1888 of iterations (product of count1 ... countN-1). */
1889 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1890 OMP_CLAUSE_LASTPRIVATE))
1891 count++;
1892 else if (msk == GF_OMP_FOR_KIND_FOR
1893 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1894 OMP_CLAUSE_LASTPRIVATE))
1895 count++;
1896 }
1897 for (i = 0; i < count; i++)
1898 {
1899 tree temp = create_tmp_var (type);
1900 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1901 insert_decl_map (&outer_ctx->cb, temp, temp);
1902 OMP_CLAUSE_DECL (c) = temp;
1903 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1904 gimple_omp_taskreg_set_clauses (stmt, c);
1905 }
1906 }
1907 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1908 && omp_find_clause (gimple_omp_task_clauses (stmt),
1909 OMP_CLAUSE_REDUCTION))
1910 {
1911 tree type = build_pointer_type (pointer_sized_int_node);
1912 tree temp = create_tmp_var (type);
1913 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1914 insert_decl_map (&outer_ctx->cb, temp, temp);
1915 OMP_CLAUSE_DECL (c) = temp;
1916 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1917 gimple_omp_task_set_clauses (stmt, c);
1918 }
1919 }
1920
1921 /* Scan an OpenMP parallel directive. */
1922
1923 static void
1924 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1925 {
1926 omp_context *ctx;
1927 tree name;
1928 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1929
1930 /* Ignore parallel directives with empty bodies, unless there
1931 are copyin clauses. */
1932 if (optimize > 0
1933 && empty_body_p (gimple_omp_body (stmt))
1934 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1935 OMP_CLAUSE_COPYIN) == NULL)
1936 {
1937 gsi_replace (gsi, gimple_build_nop (), false);
1938 return;
1939 }
1940
1941 if (gimple_omp_parallel_combined_p (stmt))
1942 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1943 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1944 OMP_CLAUSE_REDUCTION);
1945 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1946 if (OMP_CLAUSE_REDUCTION_TASK (c))
1947 {
1948 tree type = build_pointer_type (pointer_sized_int_node);
1949 tree temp = create_tmp_var (type);
1950 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1951 if (outer_ctx)
1952 insert_decl_map (&outer_ctx->cb, temp, temp);
1953 OMP_CLAUSE_DECL (c) = temp;
1954 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1955 gimple_omp_parallel_set_clauses (stmt, c);
1956 break;
1957 }
1958 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1959 break;
1960
1961 ctx = new_omp_context (stmt, outer_ctx);
1962 taskreg_contexts.safe_push (ctx);
1963 if (taskreg_nesting_level > 1)
1964 ctx->is_nested = true;
1965 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1966 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1967 name = create_tmp_var_name (".omp_data_s");
1968 name = build_decl (gimple_location (stmt),
1969 TYPE_DECL, name, ctx->record_type);
1970 DECL_ARTIFICIAL (name) = 1;
1971 DECL_NAMELESS (name) = 1;
1972 TYPE_NAME (ctx->record_type) = name;
1973 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1974 if (!gimple_omp_parallel_grid_phony (stmt))
1975 {
1976 create_omp_child_function (ctx, false);
1977 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1978 }
1979
1980 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1981 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1982
1983 if (TYPE_FIELDS (ctx->record_type) == NULL)
1984 ctx->record_type = ctx->receiver_decl = NULL;
1985 }
1986
1987 /* Scan an OpenMP task directive. */
1988
1989 static void
1990 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1991 {
1992 omp_context *ctx;
1993 tree name, t;
1994 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1995
1996 /* Ignore task directives with empty bodies, unless they have depend
1997 clause. */
1998 if (optimize > 0
1999 && gimple_omp_body (stmt)
2000 && empty_body_p (gimple_omp_body (stmt))
2001 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2002 {
2003 gsi_replace (gsi, gimple_build_nop (), false);
2004 return;
2005 }
2006
2007 if (gimple_omp_task_taskloop_p (stmt))
2008 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2009
2010 ctx = new_omp_context (stmt, outer_ctx);
2011
2012 if (gimple_omp_task_taskwait_p (stmt))
2013 {
2014 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2015 return;
2016 }
2017
2018 taskreg_contexts.safe_push (ctx);
2019 if (taskreg_nesting_level > 1)
2020 ctx->is_nested = true;
2021 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2022 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2023 name = create_tmp_var_name (".omp_data_s");
2024 name = build_decl (gimple_location (stmt),
2025 TYPE_DECL, name, ctx->record_type);
2026 DECL_ARTIFICIAL (name) = 1;
2027 DECL_NAMELESS (name) = 1;
2028 TYPE_NAME (ctx->record_type) = name;
2029 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2030 create_omp_child_function (ctx, false);
2031 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2032
2033 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2034
2035 if (ctx->srecord_type)
2036 {
2037 name = create_tmp_var_name (".omp_data_a");
2038 name = build_decl (gimple_location (stmt),
2039 TYPE_DECL, name, ctx->srecord_type);
2040 DECL_ARTIFICIAL (name) = 1;
2041 DECL_NAMELESS (name) = 1;
2042 TYPE_NAME (ctx->srecord_type) = name;
2043 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2044 create_omp_child_function (ctx, true);
2045 }
2046
2047 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2048
2049 if (TYPE_FIELDS (ctx->record_type) == NULL)
2050 {
2051 ctx->record_type = ctx->receiver_decl = NULL;
2052 t = build_int_cst (long_integer_type_node, 0);
2053 gimple_omp_task_set_arg_size (stmt, t);
2054 t = build_int_cst (long_integer_type_node, 1);
2055 gimple_omp_task_set_arg_align (stmt, t);
2056 }
2057 }
2058
2059 /* Helper function for finish_taskreg_scan, called through walk_tree.
2060 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2061 tree, replace it in the expression. */
2062
2063 static tree
2064 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2065 {
2066 if (VAR_P (*tp))
2067 {
2068 omp_context *ctx = (omp_context *) data;
2069 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2070 if (t != *tp)
2071 {
2072 if (DECL_HAS_VALUE_EXPR_P (t))
2073 t = unshare_expr (DECL_VALUE_EXPR (t));
2074 *tp = t;
2075 }
2076 *walk_subtrees = 0;
2077 }
2078 else if (IS_TYPE_OR_DECL_P (*tp))
2079 *walk_subtrees = 0;
2080 return NULL_TREE;
2081 }
2082
2083 /* If any decls have been made addressable during scan_omp,
2084 adjust their fields if needed, and layout record types
2085 of parallel/task constructs. */
2086
2087 static void
2088 finish_taskreg_scan (omp_context *ctx)
2089 {
2090 if (ctx->record_type == NULL_TREE)
2091 return;
2092
2093 /* If any task_shared_vars were needed, verify all
2094 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2095 statements if use_pointer_for_field hasn't changed
2096 because of that. If it did, update field types now. */
2097 if (task_shared_vars)
2098 {
2099 tree c;
2100
2101 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2102 c; c = OMP_CLAUSE_CHAIN (c))
2103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2104 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2105 {
2106 tree decl = OMP_CLAUSE_DECL (c);
2107
2108 /* Global variables don't need to be copied,
2109 the receiver side will use them directly. */
2110 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2111 continue;
2112 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2113 || !use_pointer_for_field (decl, ctx))
2114 continue;
2115 tree field = lookup_field (decl, ctx);
2116 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2117 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2118 continue;
2119 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2120 TREE_THIS_VOLATILE (field) = 0;
2121 DECL_USER_ALIGN (field) = 0;
2122 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2123 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2124 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2125 if (ctx->srecord_type)
2126 {
2127 tree sfield = lookup_sfield (decl, ctx);
2128 TREE_TYPE (sfield) = TREE_TYPE (field);
2129 TREE_THIS_VOLATILE (sfield) = 0;
2130 DECL_USER_ALIGN (sfield) = 0;
2131 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2132 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2133 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2134 }
2135 }
2136 }
2137
2138 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2139 {
2140 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2141 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2142 if (c)
2143 {
2144 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2145 expects to find it at the start of data. */
2146 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2147 tree *p = &TYPE_FIELDS (ctx->record_type);
2148 while (*p)
2149 if (*p == f)
2150 {
2151 *p = DECL_CHAIN (*p);
2152 break;
2153 }
2154 else
2155 p = &DECL_CHAIN (*p);
2156 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2157 TYPE_FIELDS (ctx->record_type) = f;
2158 }
2159 layout_type (ctx->record_type);
2160 fixup_child_record_type (ctx);
2161 }
2162 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2163 {
2164 layout_type (ctx->record_type);
2165 fixup_child_record_type (ctx);
2166 }
2167 else
2168 {
2169 location_t loc = gimple_location (ctx->stmt);
2170 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2171 /* Move VLA fields to the end. */
2172 p = &TYPE_FIELDS (ctx->record_type);
2173 while (*p)
2174 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2175 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2176 {
2177 *q = *p;
2178 *p = TREE_CHAIN (*p);
2179 TREE_CHAIN (*q) = NULL_TREE;
2180 q = &TREE_CHAIN (*q);
2181 }
2182 else
2183 p = &DECL_CHAIN (*p);
2184 *p = vla_fields;
2185 if (gimple_omp_task_taskloop_p (ctx->stmt))
2186 {
2187 /* Move fields corresponding to first and second _looptemp_
2188 clause first. There are filled by GOMP_taskloop
2189 and thus need to be in specific positions. */
2190 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2191 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2192 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2193 OMP_CLAUSE__LOOPTEMP_);
2194 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2195 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2196 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2197 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2198 p = &TYPE_FIELDS (ctx->record_type);
2199 while (*p)
2200 if (*p == f1 || *p == f2 || *p == f3)
2201 *p = DECL_CHAIN (*p);
2202 else
2203 p = &DECL_CHAIN (*p);
2204 DECL_CHAIN (f1) = f2;
2205 if (c3)
2206 {
2207 DECL_CHAIN (f2) = f3;
2208 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2209 }
2210 else
2211 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2212 TYPE_FIELDS (ctx->record_type) = f1;
2213 if (ctx->srecord_type)
2214 {
2215 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2216 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2217 if (c3)
2218 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2219 p = &TYPE_FIELDS (ctx->srecord_type);
2220 while (*p)
2221 if (*p == f1 || *p == f2 || *p == f3)
2222 *p = DECL_CHAIN (*p);
2223 else
2224 p = &DECL_CHAIN (*p);
2225 DECL_CHAIN (f1) = f2;
2226 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2227 if (c3)
2228 {
2229 DECL_CHAIN (f2) = f3;
2230 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2231 }
2232 else
2233 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2234 TYPE_FIELDS (ctx->srecord_type) = f1;
2235 }
2236 }
2237 layout_type (ctx->record_type);
2238 fixup_child_record_type (ctx);
2239 if (ctx->srecord_type)
2240 layout_type (ctx->srecord_type);
2241 tree t = fold_convert_loc (loc, long_integer_type_node,
2242 TYPE_SIZE_UNIT (ctx->record_type));
2243 if (TREE_CODE (t) != INTEGER_CST)
2244 {
2245 t = unshare_expr (t);
2246 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2247 }
2248 gimple_omp_task_set_arg_size (ctx->stmt, t);
2249 t = build_int_cst (long_integer_type_node,
2250 TYPE_ALIGN_UNIT (ctx->record_type));
2251 gimple_omp_task_set_arg_align (ctx->stmt, t);
2252 }
2253 }
2254
2255 /* Find the enclosing offload context. */
2256
2257 static omp_context *
2258 enclosing_target_ctx (omp_context *ctx)
2259 {
2260 for (; ctx; ctx = ctx->outer)
2261 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2262 break;
2263
2264 return ctx;
2265 }
2266
2267 /* Return true if ctx is part of an oacc kernels region. */
2268
2269 static bool
2270 ctx_in_oacc_kernels_region (omp_context *ctx)
2271 {
2272 for (;ctx != NULL; ctx = ctx->outer)
2273 {
2274 gimple *stmt = ctx->stmt;
2275 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2276 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2277 return true;
2278 }
2279
2280 return false;
2281 }
2282
2283 /* Check the parallelism clauses inside a kernels regions.
2284 Until kernels handling moves to use the same loop indirection
2285 scheme as parallel, we need to do this checking early. */
2286
2287 static unsigned
2288 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2289 {
2290 bool checking = true;
2291 unsigned outer_mask = 0;
2292 unsigned this_mask = 0;
2293 bool has_seq = false, has_auto = false;
2294
2295 if (ctx->outer)
2296 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2297 if (!stmt)
2298 {
2299 checking = false;
2300 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2301 return outer_mask;
2302 stmt = as_a <gomp_for *> (ctx->stmt);
2303 }
2304
2305 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2306 {
2307 switch (OMP_CLAUSE_CODE (c))
2308 {
2309 case OMP_CLAUSE_GANG:
2310 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2311 break;
2312 case OMP_CLAUSE_WORKER:
2313 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2314 break;
2315 case OMP_CLAUSE_VECTOR:
2316 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2317 break;
2318 case OMP_CLAUSE_SEQ:
2319 has_seq = true;
2320 break;
2321 case OMP_CLAUSE_AUTO:
2322 has_auto = true;
2323 break;
2324 default:
2325 break;
2326 }
2327 }
2328
2329 if (checking)
2330 {
2331 if (has_seq && (this_mask || has_auto))
2332 error_at (gimple_location (stmt), "%<seq%> overrides other"
2333 " OpenACC loop specifiers");
2334 else if (has_auto && this_mask)
2335 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2336 " OpenACC loop specifiers");
2337
2338 if (this_mask & outer_mask)
2339 error_at (gimple_location (stmt), "inner loop uses same"
2340 " OpenACC parallelism as containing loop");
2341 }
2342
2343 return outer_mask | this_mask;
2344 }
2345
2346 /* Scan a GIMPLE_OMP_FOR. */
2347
2348 static omp_context *
2349 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2350 {
2351 omp_context *ctx;
2352 size_t i;
2353 tree clauses = gimple_omp_for_clauses (stmt);
2354
2355 ctx = new_omp_context (stmt, outer_ctx);
2356
2357 if (is_gimple_omp_oacc (stmt))
2358 {
2359 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2360
2361 if (!tgt || is_oacc_parallel (tgt))
2362 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2363 {
2364 char const *check = NULL;
2365
2366 switch (OMP_CLAUSE_CODE (c))
2367 {
2368 case OMP_CLAUSE_GANG:
2369 check = "gang";
2370 break;
2371
2372 case OMP_CLAUSE_WORKER:
2373 check = "worker";
2374 break;
2375
2376 case OMP_CLAUSE_VECTOR:
2377 check = "vector";
2378 break;
2379
2380 default:
2381 break;
2382 }
2383
2384 if (check && OMP_CLAUSE_OPERAND (c, 0))
2385 error_at (gimple_location (stmt),
2386 "argument not permitted on %qs clause in"
2387 " OpenACC %<parallel%>", check);
2388 }
2389
2390 if (tgt && is_oacc_kernels (tgt))
2391 {
2392 /* Strip out reductions, as they are not handled yet. */
2393 tree *prev_ptr = &clauses;
2394
2395 while (tree probe = *prev_ptr)
2396 {
2397 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2398
2399 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2400 *prev_ptr = *next_ptr;
2401 else
2402 prev_ptr = next_ptr;
2403 }
2404
2405 gimple_omp_for_set_clauses (stmt, clauses);
2406 check_oacc_kernel_gwv (stmt, ctx);
2407 }
2408 }
2409
2410 scan_sharing_clauses (clauses, ctx);
2411
2412 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2413 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2414 {
2415 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2416 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2417 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2418 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2419 }
2420 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2421 return ctx;
2422 }
2423
2424 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2425
2426 static void
2427 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2428 omp_context *outer_ctx)
2429 {
2430 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2431 gsi_replace (gsi, bind, false);
2432 gimple_seq seq = NULL;
2433 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2434 tree cond = create_tmp_var_raw (integer_type_node);
2435 DECL_CONTEXT (cond) = current_function_decl;
2436 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2437 gimple_bind_set_vars (bind, cond);
2438 gimple_call_set_lhs (g, cond);
2439 gimple_seq_add_stmt (&seq, g);
2440 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2441 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2442 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2443 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2444 gimple_seq_add_stmt (&seq, g);
2445 g = gimple_build_label (lab1);
2446 gimple_seq_add_stmt (&seq, g);
2447 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2448 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2449 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2450 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2451 gimple_omp_for_set_clauses (new_stmt, clause);
2452 gimple_seq_add_stmt (&seq, new_stmt);
2453 g = gimple_build_goto (lab3);
2454 gimple_seq_add_stmt (&seq, g);
2455 g = gimple_build_label (lab2);
2456 gimple_seq_add_stmt (&seq, g);
2457 gimple_seq_add_stmt (&seq, stmt);
2458 g = gimple_build_label (lab3);
2459 gimple_seq_add_stmt (&seq, g);
2460 gimple_bind_set_body (bind, seq);
2461 update_stmt (bind);
2462 scan_omp_for (new_stmt, outer_ctx);
2463 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2464 }
2465
2466 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2467 struct walk_stmt_info *);
2468 static omp_context *maybe_lookup_ctx (gimple *);
2469
2470 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2471 for scan phase loop. */
2472
2473 static void
2474 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2475 omp_context *outer_ctx)
2476 {
2477 /* The only change between inclusive and exclusive scan will be
2478 within the first simd loop, so just use inclusive in the
2479 worksharing loop. */
2480 outer_ctx->scan_inclusive = true;
2481 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2482 OMP_CLAUSE_DECL (c) = integer_zero_node;
2483
2484 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2485 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2486 gsi_replace (gsi, input_stmt, false);
2487 gimple_seq input_body = NULL;
2488 gimple_seq_add_stmt (&input_body, stmt);
2489 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2490
2491 gimple_stmt_iterator input1_gsi = gsi_none ();
2492 struct walk_stmt_info wi;
2493 memset (&wi, 0, sizeof (wi));
2494 wi.val_only = true;
2495 wi.info = (void *) &input1_gsi;
2496 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2497 gcc_assert (!gsi_end_p (input1_gsi));
2498
2499 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2500 gsi_next (&input1_gsi);
2501 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2502 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2503 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2504 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2505 std::swap (input_stmt1, scan_stmt1);
2506
2507 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2508 gimple_omp_set_body (input_stmt1, NULL);
2509
2510 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2511 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2512
2513 gimple_omp_set_body (input_stmt1, input_body1);
2514 gimple_omp_set_body (scan_stmt1, NULL);
2515
2516 gimple_stmt_iterator input2_gsi = gsi_none ();
2517 memset (&wi, 0, sizeof (wi));
2518 wi.val_only = true;
2519 wi.info = (void *) &input2_gsi;
2520 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2521 NULL, &wi);
2522 gcc_assert (!gsi_end_p (input2_gsi));
2523
2524 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2525 gsi_next (&input2_gsi);
2526 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2527 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2528 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2529 std::swap (input_stmt2, scan_stmt2);
2530
2531 gimple_omp_set_body (input_stmt2, NULL);
2532
2533 gimple_omp_set_body (input_stmt, input_body);
2534 gimple_omp_set_body (scan_stmt, scan_body);
2535
2536 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2537 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2538
2539 ctx = new_omp_context (scan_stmt, outer_ctx);
2540 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2541
2542 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2543 }
2544
2545 /* Scan an OpenMP sections directive. */
2546
2547 static void
2548 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2549 {
2550 omp_context *ctx;
2551
2552 ctx = new_omp_context (stmt, outer_ctx);
2553 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2554 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2555 }
2556
2557 /* Scan an OpenMP single directive. */
2558
2559 static void
2560 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2561 {
2562 omp_context *ctx;
2563 tree name;
2564
2565 ctx = new_omp_context (stmt, outer_ctx);
2566 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2567 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2568 name = create_tmp_var_name (".omp_copy_s");
2569 name = build_decl (gimple_location (stmt),
2570 TYPE_DECL, name, ctx->record_type);
2571 TYPE_NAME (ctx->record_type) = name;
2572
2573 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2574 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2575
2576 if (TYPE_FIELDS (ctx->record_type) == NULL)
2577 ctx->record_type = NULL;
2578 else
2579 layout_type (ctx->record_type);
2580 }
2581
2582 /* Scan a GIMPLE_OMP_TARGET. */
2583
2584 static void
2585 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2586 {
2587 omp_context *ctx;
2588 tree name;
2589 bool offloaded = is_gimple_omp_offloaded (stmt);
2590 tree clauses = gimple_omp_target_clauses (stmt);
2591
2592 ctx = new_omp_context (stmt, outer_ctx);
2593 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2594 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2595 name = create_tmp_var_name (".omp_data_t");
2596 name = build_decl (gimple_location (stmt),
2597 TYPE_DECL, name, ctx->record_type);
2598 DECL_ARTIFICIAL (name) = 1;
2599 DECL_NAMELESS (name) = 1;
2600 TYPE_NAME (ctx->record_type) = name;
2601 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2602
2603 if (offloaded)
2604 {
2605 create_omp_child_function (ctx, false);
2606 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2607 }
2608
2609 scan_sharing_clauses (clauses, ctx);
2610 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2611
2612 if (TYPE_FIELDS (ctx->record_type) == NULL)
2613 ctx->record_type = ctx->receiver_decl = NULL;
2614 else
2615 {
2616 TYPE_FIELDS (ctx->record_type)
2617 = nreverse (TYPE_FIELDS (ctx->record_type));
2618 if (flag_checking)
2619 {
2620 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2621 for (tree field = TYPE_FIELDS (ctx->record_type);
2622 field;
2623 field = DECL_CHAIN (field))
2624 gcc_assert (DECL_ALIGN (field) == align);
2625 }
2626 layout_type (ctx->record_type);
2627 if (offloaded)
2628 fixup_child_record_type (ctx);
2629 }
2630 }
2631
2632 /* Scan an OpenMP teams directive. */
2633
2634 static void
2635 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2636 {
2637 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2638
2639 if (!gimple_omp_teams_host (stmt))
2640 {
2641 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2642 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2643 return;
2644 }
2645 taskreg_contexts.safe_push (ctx);
2646 gcc_assert (taskreg_nesting_level == 1);
2647 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2648 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2649 tree name = create_tmp_var_name (".omp_data_s");
2650 name = build_decl (gimple_location (stmt),
2651 TYPE_DECL, name, ctx->record_type);
2652 DECL_ARTIFICIAL (name) = 1;
2653 DECL_NAMELESS (name) = 1;
2654 TYPE_NAME (ctx->record_type) = name;
2655 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2656 create_omp_child_function (ctx, false);
2657 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2658
2659 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2660 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2661
2662 if (TYPE_FIELDS (ctx->record_type) == NULL)
2663 ctx->record_type = ctx->receiver_decl = NULL;
2664 }
2665
2666 /* Check nesting restrictions. */
2667 static bool
2668 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2669 {
2670 tree c;
2671
2672 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2673 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2674 the original copy of its contents. */
2675 return true;
2676
2677 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2678 inside an OpenACC CTX. */
2679 if (!(is_gimple_omp (stmt)
2680 && is_gimple_omp_oacc (stmt))
2681 /* Except for atomic codes that we share with OpenMP. */
2682 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2683 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2684 {
2685 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2686 {
2687 error_at (gimple_location (stmt),
2688 "non-OpenACC construct inside of OpenACC routine");
2689 return false;
2690 }
2691 else
2692 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2693 if (is_gimple_omp (octx->stmt)
2694 && is_gimple_omp_oacc (octx->stmt))
2695 {
2696 error_at (gimple_location (stmt),
2697 "non-OpenACC construct inside of OpenACC region");
2698 return false;
2699 }
2700 }
2701
2702 if (ctx != NULL)
2703 {
2704 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2705 && ctx->outer
2706 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2707 ctx = ctx->outer;
2708 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2709 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2710 && !ctx->loop_p)
2711 {
2712 c = NULL_TREE;
2713 if (ctx->order_concurrent
2714 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2715 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2716 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2717 {
2718 error_at (gimple_location (stmt),
2719 "OpenMP constructs other than %<parallel%>, %<loop%>"
2720 " or %<simd%> may not be nested inside a region with"
2721 " the %<order(concurrent)%> clause");
2722 return false;
2723 }
2724 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2725 {
2726 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2727 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2728 {
2729 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2730 && (ctx->outer == NULL
2731 || !gimple_omp_for_combined_into_p (ctx->stmt)
2732 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2733 || (gimple_omp_for_kind (ctx->outer->stmt)
2734 != GF_OMP_FOR_KIND_FOR)
2735 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2736 {
2737 error_at (gimple_location (stmt),
2738 "%<ordered simd threads%> must be closely "
2739 "nested inside of %<for simd%> region");
2740 return false;
2741 }
2742 return true;
2743 }
2744 }
2745 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2746 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2747 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2748 return true;
2749 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2750 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2751 return true;
2752 error_at (gimple_location (stmt),
2753 "OpenMP constructs other than "
2754 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2755 "not be nested inside %<simd%> region");
2756 return false;
2757 }
2758 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2759 {
2760 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2761 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2762 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2763 && omp_find_clause (gimple_omp_for_clauses (stmt),
2764 OMP_CLAUSE_BIND) == NULL_TREE))
2765 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2766 {
2767 error_at (gimple_location (stmt),
2768 "only %<distribute%>, %<parallel%> or %<loop%> "
2769 "regions are allowed to be strictly nested inside "
2770 "%<teams%> region");
2771 return false;
2772 }
2773 }
2774 else if (ctx->order_concurrent
2775 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2776 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2777 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2778 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2779 {
2780 if (ctx->loop_p)
2781 error_at (gimple_location (stmt),
2782 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2783 "%<simd%> may not be nested inside a %<loop%> region");
2784 else
2785 error_at (gimple_location (stmt),
2786 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2787 "%<simd%> may not be nested inside a region with "
2788 "the %<order(concurrent)%> clause");
2789 return false;
2790 }
2791 }
2792 switch (gimple_code (stmt))
2793 {
2794 case GIMPLE_OMP_FOR:
2795 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2796 return true;
2797 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2798 {
2799 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2800 {
2801 error_at (gimple_location (stmt),
2802 "%<distribute%> region must be strictly nested "
2803 "inside %<teams%> construct");
2804 return false;
2805 }
2806 return true;
2807 }
2808 /* We split taskloop into task and nested taskloop in it. */
2809 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2810 return true;
2811 /* For now, hope this will change and loop bind(parallel) will not
2812 be allowed in lots of contexts. */
2813 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2814 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2815 return true;
2816 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2817 {
2818 bool ok = false;
2819
2820 if (ctx)
2821 switch (gimple_code (ctx->stmt))
2822 {
2823 case GIMPLE_OMP_FOR:
2824 ok = (gimple_omp_for_kind (ctx->stmt)
2825 == GF_OMP_FOR_KIND_OACC_LOOP);
2826 break;
2827
2828 case GIMPLE_OMP_TARGET:
2829 switch (gimple_omp_target_kind (ctx->stmt))
2830 {
2831 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2832 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2833 ok = true;
2834 break;
2835
2836 default:
2837 break;
2838 }
2839
2840 default:
2841 break;
2842 }
2843 else if (oacc_get_fn_attrib (current_function_decl))
2844 ok = true;
2845 if (!ok)
2846 {
2847 error_at (gimple_location (stmt),
2848 "OpenACC loop directive must be associated with"
2849 " an OpenACC compute region");
2850 return false;
2851 }
2852 }
2853 /* FALLTHRU */
2854 case GIMPLE_CALL:
2855 if (is_gimple_call (stmt)
2856 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2857 == BUILT_IN_GOMP_CANCEL
2858 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2859 == BUILT_IN_GOMP_CANCELLATION_POINT))
2860 {
2861 const char *bad = NULL;
2862 const char *kind = NULL;
2863 const char *construct
2864 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2865 == BUILT_IN_GOMP_CANCEL)
2866 ? "cancel"
2867 : "cancellation point";
2868 if (ctx == NULL)
2869 {
2870 error_at (gimple_location (stmt), "orphaned %qs construct",
2871 construct);
2872 return false;
2873 }
2874 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2875 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2876 : 0)
2877 {
2878 case 1:
2879 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2880 bad = "parallel";
2881 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2882 == BUILT_IN_GOMP_CANCEL
2883 && !integer_zerop (gimple_call_arg (stmt, 1)))
2884 ctx->cancellable = true;
2885 kind = "parallel";
2886 break;
2887 case 2:
2888 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2889 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2890 bad = "for";
2891 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2892 == BUILT_IN_GOMP_CANCEL
2893 && !integer_zerop (gimple_call_arg (stmt, 1)))
2894 {
2895 ctx->cancellable = true;
2896 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2897 OMP_CLAUSE_NOWAIT))
2898 warning_at (gimple_location (stmt), 0,
2899 "%<cancel for%> inside "
2900 "%<nowait%> for construct");
2901 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2902 OMP_CLAUSE_ORDERED))
2903 warning_at (gimple_location (stmt), 0,
2904 "%<cancel for%> inside "
2905 "%<ordered%> for construct");
2906 }
2907 kind = "for";
2908 break;
2909 case 4:
2910 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2911 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2912 bad = "sections";
2913 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2914 == BUILT_IN_GOMP_CANCEL
2915 && !integer_zerop (gimple_call_arg (stmt, 1)))
2916 {
2917 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2918 {
2919 ctx->cancellable = true;
2920 if (omp_find_clause (gimple_omp_sections_clauses
2921 (ctx->stmt),
2922 OMP_CLAUSE_NOWAIT))
2923 warning_at (gimple_location (stmt), 0,
2924 "%<cancel sections%> inside "
2925 "%<nowait%> sections construct");
2926 }
2927 else
2928 {
2929 gcc_assert (ctx->outer
2930 && gimple_code (ctx->outer->stmt)
2931 == GIMPLE_OMP_SECTIONS);
2932 ctx->outer->cancellable = true;
2933 if (omp_find_clause (gimple_omp_sections_clauses
2934 (ctx->outer->stmt),
2935 OMP_CLAUSE_NOWAIT))
2936 warning_at (gimple_location (stmt), 0,
2937 "%<cancel sections%> inside "
2938 "%<nowait%> sections construct");
2939 }
2940 }
2941 kind = "sections";
2942 break;
2943 case 8:
2944 if (!is_task_ctx (ctx)
2945 && (!is_taskloop_ctx (ctx)
2946 || ctx->outer == NULL
2947 || !is_task_ctx (ctx->outer)))
2948 bad = "task";
2949 else
2950 {
2951 for (omp_context *octx = ctx->outer;
2952 octx; octx = octx->outer)
2953 {
2954 switch (gimple_code (octx->stmt))
2955 {
2956 case GIMPLE_OMP_TASKGROUP:
2957 break;
2958 case GIMPLE_OMP_TARGET:
2959 if (gimple_omp_target_kind (octx->stmt)
2960 != GF_OMP_TARGET_KIND_REGION)
2961 continue;
2962 /* FALLTHRU */
2963 case GIMPLE_OMP_PARALLEL:
2964 case GIMPLE_OMP_TEAMS:
2965 error_at (gimple_location (stmt),
2966 "%<%s taskgroup%> construct not closely "
2967 "nested inside of %<taskgroup%> region",
2968 construct);
2969 return false;
2970 case GIMPLE_OMP_TASK:
2971 if (gimple_omp_task_taskloop_p (octx->stmt)
2972 && octx->outer
2973 && is_taskloop_ctx (octx->outer))
2974 {
2975 tree clauses
2976 = gimple_omp_for_clauses (octx->outer->stmt);
2977 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2978 break;
2979 }
2980 continue;
2981 default:
2982 continue;
2983 }
2984 break;
2985 }
2986 ctx->cancellable = true;
2987 }
2988 kind = "taskgroup";
2989 break;
2990 default:
2991 error_at (gimple_location (stmt), "invalid arguments");
2992 return false;
2993 }
2994 if (bad)
2995 {
2996 error_at (gimple_location (stmt),
2997 "%<%s %s%> construct not closely nested inside of %qs",
2998 construct, kind, bad);
2999 return false;
3000 }
3001 }
3002 /* FALLTHRU */
3003 case GIMPLE_OMP_SECTIONS:
3004 case GIMPLE_OMP_SINGLE:
3005 for (; ctx != NULL; ctx = ctx->outer)
3006 switch (gimple_code (ctx->stmt))
3007 {
3008 case GIMPLE_OMP_FOR:
3009 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3010 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3011 break;
3012 /* FALLTHRU */
3013 case GIMPLE_OMP_SECTIONS:
3014 case GIMPLE_OMP_SINGLE:
3015 case GIMPLE_OMP_ORDERED:
3016 case GIMPLE_OMP_MASTER:
3017 case GIMPLE_OMP_TASK:
3018 case GIMPLE_OMP_CRITICAL:
3019 if (is_gimple_call (stmt))
3020 {
3021 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3022 != BUILT_IN_GOMP_BARRIER)
3023 return true;
3024 error_at (gimple_location (stmt),
3025 "barrier region may not be closely nested inside "
3026 "of work-sharing, %<loop%>, %<critical%>, "
3027 "%<ordered%>, %<master%>, explicit %<task%> or "
3028 "%<taskloop%> region");
3029 return false;
3030 }
3031 error_at (gimple_location (stmt),
3032 "work-sharing region may not be closely nested inside "
3033 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3034 "%<master%>, explicit %<task%> or %<taskloop%> region");
3035 return false;
3036 case GIMPLE_OMP_PARALLEL:
3037 case GIMPLE_OMP_TEAMS:
3038 return true;
3039 case GIMPLE_OMP_TARGET:
3040 if (gimple_omp_target_kind (ctx->stmt)
3041 == GF_OMP_TARGET_KIND_REGION)
3042 return true;
3043 break;
3044 default:
3045 break;
3046 }
3047 break;
3048 case GIMPLE_OMP_MASTER:
3049 for (; ctx != NULL; ctx = ctx->outer)
3050 switch (gimple_code (ctx->stmt))
3051 {
3052 case GIMPLE_OMP_FOR:
3053 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3054 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3055 break;
3056 /* FALLTHRU */
3057 case GIMPLE_OMP_SECTIONS:
3058 case GIMPLE_OMP_SINGLE:
3059 case GIMPLE_OMP_TASK:
3060 error_at (gimple_location (stmt),
3061 "%<master%> region may not be closely nested inside "
3062 "of work-sharing, %<loop%>, explicit %<task%> or "
3063 "%<taskloop%> region");
3064 return false;
3065 case GIMPLE_OMP_PARALLEL:
3066 case GIMPLE_OMP_TEAMS:
3067 return true;
3068 case GIMPLE_OMP_TARGET:
3069 if (gimple_omp_target_kind (ctx->stmt)
3070 == GF_OMP_TARGET_KIND_REGION)
3071 return true;
3072 break;
3073 default:
3074 break;
3075 }
3076 break;
3077 case GIMPLE_OMP_TASK:
3078 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3079 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3080 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3081 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3082 {
3083 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3084 error_at (OMP_CLAUSE_LOCATION (c),
3085 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3086 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3087 return false;
3088 }
3089 break;
3090 case GIMPLE_OMP_ORDERED:
3091 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3092 c; c = OMP_CLAUSE_CHAIN (c))
3093 {
3094 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3095 {
3096 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3097 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3098 continue;
3099 }
3100 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3101 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3102 || kind == OMP_CLAUSE_DEPEND_SINK)
3103 {
3104 tree oclause;
3105 /* Look for containing ordered(N) loop. */
3106 if (ctx == NULL
3107 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3108 || (oclause
3109 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3110 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3111 {
3112 error_at (OMP_CLAUSE_LOCATION (c),
3113 "%<ordered%> construct with %<depend%> clause "
3114 "must be closely nested inside an %<ordered%> "
3115 "loop");
3116 return false;
3117 }
3118 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3119 {
3120 error_at (OMP_CLAUSE_LOCATION (c),
3121 "%<ordered%> construct with %<depend%> clause "
3122 "must be closely nested inside a loop with "
3123 "%<ordered%> clause with a parameter");
3124 return false;
3125 }
3126 }
3127 else
3128 {
3129 error_at (OMP_CLAUSE_LOCATION (c),
3130 "invalid depend kind in omp %<ordered%> %<depend%>");
3131 return false;
3132 }
3133 }
3134 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3135 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3136 {
3137 /* ordered simd must be closely nested inside of simd region,
3138 and simd region must not encounter constructs other than
3139 ordered simd, therefore ordered simd may be either orphaned,
3140 or ctx->stmt must be simd. The latter case is handled already
3141 earlier. */
3142 if (ctx != NULL)
3143 {
3144 error_at (gimple_location (stmt),
3145 "%<ordered%> %<simd%> must be closely nested inside "
3146 "%<simd%> region");
3147 return false;
3148 }
3149 }
3150 for (; ctx != NULL; ctx = ctx->outer)
3151 switch (gimple_code (ctx->stmt))
3152 {
3153 case GIMPLE_OMP_CRITICAL:
3154 case GIMPLE_OMP_TASK:
3155 case GIMPLE_OMP_ORDERED:
3156 ordered_in_taskloop:
3157 error_at (gimple_location (stmt),
3158 "%<ordered%> region may not be closely nested inside "
3159 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3160 "%<taskloop%> region");
3161 return false;
3162 case GIMPLE_OMP_FOR:
3163 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3164 goto ordered_in_taskloop;
3165 tree o;
3166 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3167 OMP_CLAUSE_ORDERED);
3168 if (o == NULL)
3169 {
3170 error_at (gimple_location (stmt),
3171 "%<ordered%> region must be closely nested inside "
3172 "a loop region with an %<ordered%> clause");
3173 return false;
3174 }
3175 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3176 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3177 {
3178 error_at (gimple_location (stmt),
3179 "%<ordered%> region without %<depend%> clause may "
3180 "not be closely nested inside a loop region with "
3181 "an %<ordered%> clause with a parameter");
3182 return false;
3183 }
3184 return true;
3185 case GIMPLE_OMP_TARGET:
3186 if (gimple_omp_target_kind (ctx->stmt)
3187 != GF_OMP_TARGET_KIND_REGION)
3188 break;
3189 /* FALLTHRU */
3190 case GIMPLE_OMP_PARALLEL:
3191 case GIMPLE_OMP_TEAMS:
3192 error_at (gimple_location (stmt),
3193 "%<ordered%> region must be closely nested inside "
3194 "a loop region with an %<ordered%> clause");
3195 return false;
3196 default:
3197 break;
3198 }
3199 break;
3200 case GIMPLE_OMP_CRITICAL:
3201 {
3202 tree this_stmt_name
3203 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3204 for (; ctx != NULL; ctx = ctx->outer)
3205 if (gomp_critical *other_crit
3206 = dyn_cast <gomp_critical *> (ctx->stmt))
3207 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3208 {
3209 error_at (gimple_location (stmt),
3210 "%<critical%> region may not be nested inside "
3211 "a %<critical%> region with the same name");
3212 return false;
3213 }
3214 }
3215 break;
3216 case GIMPLE_OMP_TEAMS:
3217 if (ctx == NULL)
3218 break;
3219 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3220 || (gimple_omp_target_kind (ctx->stmt)
3221 != GF_OMP_TARGET_KIND_REGION))
3222 {
3223 /* Teams construct can appear either strictly nested inside of
3224 target construct with no intervening stmts, or can be encountered
3225 only by initial task (so must not appear inside any OpenMP
3226 construct. */
3227 error_at (gimple_location (stmt),
3228 "%<teams%> construct must be closely nested inside of "
3229 "%<target%> construct or not nested in any OpenMP "
3230 "construct");
3231 return false;
3232 }
3233 break;
3234 case GIMPLE_OMP_TARGET:
3235 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3236 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3237 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3238 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3239 {
3240 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3241 error_at (OMP_CLAUSE_LOCATION (c),
3242 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3243 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3244 return false;
3245 }
3246 if (is_gimple_omp_offloaded (stmt)
3247 && oacc_get_fn_attrib (cfun->decl) != NULL)
3248 {
3249 error_at (gimple_location (stmt),
3250 "OpenACC region inside of OpenACC routine, nested "
3251 "parallelism not supported yet");
3252 return false;
3253 }
3254 for (; ctx != NULL; ctx = ctx->outer)
3255 {
3256 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3257 {
3258 if (is_gimple_omp (stmt)
3259 && is_gimple_omp_oacc (stmt)
3260 && is_gimple_omp (ctx->stmt))
3261 {
3262 error_at (gimple_location (stmt),
3263 "OpenACC construct inside of non-OpenACC region");
3264 return false;
3265 }
3266 continue;
3267 }
3268
3269 const char *stmt_name, *ctx_stmt_name;
3270 switch (gimple_omp_target_kind (stmt))
3271 {
3272 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3273 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3274 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3275 case GF_OMP_TARGET_KIND_ENTER_DATA:
3276 stmt_name = "target enter data"; break;
3277 case GF_OMP_TARGET_KIND_EXIT_DATA:
3278 stmt_name = "target exit data"; break;
3279 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3280 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3281 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3282 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3283 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3284 stmt_name = "enter/exit data"; break;
3285 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3286 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3287 break;
3288 default: gcc_unreachable ();
3289 }
3290 switch (gimple_omp_target_kind (ctx->stmt))
3291 {
3292 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3293 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3294 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3295 ctx_stmt_name = "parallel"; break;
3296 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3297 ctx_stmt_name = "kernels"; break;
3298 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3299 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3300 ctx_stmt_name = "host_data"; break;
3301 default: gcc_unreachable ();
3302 }
3303
3304 /* OpenACC/OpenMP mismatch? */
3305 if (is_gimple_omp_oacc (stmt)
3306 != is_gimple_omp_oacc (ctx->stmt))
3307 {
3308 error_at (gimple_location (stmt),
3309 "%s %qs construct inside of %s %qs region",
3310 (is_gimple_omp_oacc (stmt)
3311 ? "OpenACC" : "OpenMP"), stmt_name,
3312 (is_gimple_omp_oacc (ctx->stmt)
3313 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3314 return false;
3315 }
3316 if (is_gimple_omp_offloaded (ctx->stmt))
3317 {
3318 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3319 if (is_gimple_omp_oacc (ctx->stmt))
3320 {
3321 error_at (gimple_location (stmt),
3322 "%qs construct inside of %qs region",
3323 stmt_name, ctx_stmt_name);
3324 return false;
3325 }
3326 else
3327 {
3328 warning_at (gimple_location (stmt), 0,
3329 "%qs construct inside of %qs region",
3330 stmt_name, ctx_stmt_name);
3331 }
3332 }
3333 }
3334 break;
3335 default:
3336 break;
3337 }
3338 return true;
3339 }
3340
3341
3342 /* Helper function scan_omp.
3343
3344 Callback for walk_tree or operators in walk_gimple_stmt used to
3345 scan for OMP directives in TP. */
3346
3347 static tree
3348 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3349 {
3350 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3351 omp_context *ctx = (omp_context *) wi->info;
3352 tree t = *tp;
3353
3354 switch (TREE_CODE (t))
3355 {
3356 case VAR_DECL:
3357 case PARM_DECL:
3358 case LABEL_DECL:
3359 case RESULT_DECL:
3360 if (ctx)
3361 {
3362 tree repl = remap_decl (t, &ctx->cb);
3363 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3364 *tp = repl;
3365 }
3366 break;
3367
3368 default:
3369 if (ctx && TYPE_P (t))
3370 *tp = remap_type (t, &ctx->cb);
3371 else if (!DECL_P (t))
3372 {
3373 *walk_subtrees = 1;
3374 if (ctx)
3375 {
3376 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3377 if (tem != TREE_TYPE (t))
3378 {
3379 if (TREE_CODE (t) == INTEGER_CST)
3380 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3381 else
3382 TREE_TYPE (t) = tem;
3383 }
3384 }
3385 }
3386 break;
3387 }
3388
3389 return NULL_TREE;
3390 }
3391
3392 /* Return true if FNDECL is a setjmp or a longjmp. */
3393
3394 static bool
3395 setjmp_or_longjmp_p (const_tree fndecl)
3396 {
3397 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3398 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3399 return true;
3400
3401 tree declname = DECL_NAME (fndecl);
3402 if (!declname
3403 || (DECL_CONTEXT (fndecl) != NULL_TREE
3404 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3405 || !TREE_PUBLIC (fndecl))
3406 return false;
3407
3408 const char *name = IDENTIFIER_POINTER (declname);
3409 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3410 }
3411
3412 /* Return true if FNDECL is an omp_* runtime API call. */
3413
3414 static bool
3415 omp_runtime_api_call (const_tree fndecl)
3416 {
3417 tree declname = DECL_NAME (fndecl);
3418 if (!declname
3419 || (DECL_CONTEXT (fndecl) != NULL_TREE
3420 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3421 || !TREE_PUBLIC (fndecl))
3422 return false;
3423
3424 const char *name = IDENTIFIER_POINTER (declname);
3425 if (strncmp (name, "omp_", 4) != 0)
3426 return false;
3427
3428 static const char *omp_runtime_apis[] =
3429 {
3430 /* This array has 3 sections. First omp_* calls that don't
3431 have any suffixes. */
3432 "target_alloc",
3433 "target_associate_ptr",
3434 "target_disassociate_ptr",
3435 "target_free",
3436 "target_is_present",
3437 "target_memcpy",
3438 "target_memcpy_rect",
3439 NULL,
3440 /* Now omp_* calls that are available as omp_* and omp_*_. */
3441 "capture_affinity",
3442 "destroy_lock",
3443 "destroy_nest_lock",
3444 "display_affinity",
3445 "get_active_level",
3446 "get_affinity_format",
3447 "get_cancellation",
3448 "get_default_device",
3449 "get_dynamic",
3450 "get_initial_device",
3451 "get_level",
3452 "get_max_active_levels",
3453 "get_max_task_priority",
3454 "get_max_threads",
3455 "get_nested",
3456 "get_num_devices",
3457 "get_num_places",
3458 "get_num_procs",
3459 "get_num_teams",
3460 "get_num_threads",
3461 "get_partition_num_places",
3462 "get_place_num",
3463 "get_proc_bind",
3464 "get_team_num",
3465 "get_thread_limit",
3466 "get_thread_num",
3467 "get_wtick",
3468 "get_wtime",
3469 "in_final",
3470 "in_parallel",
3471 "init_lock",
3472 "init_nest_lock",
3473 "is_initial_device",
3474 "pause_resource",
3475 "pause_resource_all",
3476 "set_affinity_format",
3477 "set_lock",
3478 "set_nest_lock",
3479 "test_lock",
3480 "test_nest_lock",
3481 "unset_lock",
3482 "unset_nest_lock",
3483 NULL,
3484 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3485 "get_ancestor_thread_num",
3486 "get_partition_place_nums",
3487 "get_place_num_procs",
3488 "get_place_proc_ids",
3489 "get_schedule",
3490 "get_team_size",
3491 "set_default_device",
3492 "set_dynamic",
3493 "set_max_active_levels",
3494 "set_nested",
3495 "set_num_threads",
3496 "set_schedule"
3497 };
3498
3499 int mode = 0;
3500 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3501 {
3502 if (omp_runtime_apis[i] == NULL)
3503 {
3504 mode++;
3505 continue;
3506 }
3507 size_t len = strlen (omp_runtime_apis[i]);
3508 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3509 && (name[4 + len] == '\0'
3510 || (mode > 0
3511 && name[4 + len] == '_'
3512 && (name[4 + len + 1] == '\0'
3513 || (mode > 1
3514 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3515 return true;
3516 }
3517 return false;
3518 }
3519
3520 /* Helper function for scan_omp.
3521
3522 Callback for walk_gimple_stmt used to scan for OMP directives in
3523 the current statement in GSI. */
3524
3525 static tree
3526 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3527 struct walk_stmt_info *wi)
3528 {
3529 gimple *stmt = gsi_stmt (*gsi);
3530 omp_context *ctx = (omp_context *) wi->info;
3531
3532 if (gimple_has_location (stmt))
3533 input_location = gimple_location (stmt);
3534
3535 /* Check the nesting restrictions. */
3536 bool remove = false;
3537 if (is_gimple_omp (stmt))
3538 remove = !check_omp_nesting_restrictions (stmt, ctx);
3539 else if (is_gimple_call (stmt))
3540 {
3541 tree fndecl = gimple_call_fndecl (stmt);
3542 if (fndecl)
3543 {
3544 if (ctx
3545 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3546 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3547 && setjmp_or_longjmp_p (fndecl)
3548 && !ctx->loop_p)
3549 {
3550 remove = true;
3551 error_at (gimple_location (stmt),
3552 "setjmp/longjmp inside %<simd%> construct");
3553 }
3554 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3555 switch (DECL_FUNCTION_CODE (fndecl))
3556 {
3557 case BUILT_IN_GOMP_BARRIER:
3558 case BUILT_IN_GOMP_CANCEL:
3559 case BUILT_IN_GOMP_CANCELLATION_POINT:
3560 case BUILT_IN_GOMP_TASKYIELD:
3561 case BUILT_IN_GOMP_TASKWAIT:
3562 case BUILT_IN_GOMP_TASKGROUP_START:
3563 case BUILT_IN_GOMP_TASKGROUP_END:
3564 remove = !check_omp_nesting_restrictions (stmt, ctx);
3565 break;
3566 default:
3567 break;
3568 }
3569 else if (ctx)
3570 {
3571 omp_context *octx = ctx;
3572 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3573 octx = ctx->outer;
3574 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3575 {
3576 remove = true;
3577 error_at (gimple_location (stmt),
3578 "OpenMP runtime API call %qD in a region with "
3579 "%<order(concurrent)%> clause", fndecl);
3580 }
3581 }
3582 }
3583 }
3584 if (remove)
3585 {
3586 stmt = gimple_build_nop ();
3587 gsi_replace (gsi, stmt, false);
3588 }
3589
3590 *handled_ops_p = true;
3591
3592 switch (gimple_code (stmt))
3593 {
3594 case GIMPLE_OMP_PARALLEL:
3595 taskreg_nesting_level++;
3596 scan_omp_parallel (gsi, ctx);
3597 taskreg_nesting_level--;
3598 break;
3599
3600 case GIMPLE_OMP_TASK:
3601 taskreg_nesting_level++;
3602 scan_omp_task (gsi, ctx);
3603 taskreg_nesting_level--;
3604 break;
3605
3606 case GIMPLE_OMP_FOR:
3607 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3608 == GF_OMP_FOR_KIND_SIMD)
3609 && gimple_omp_for_combined_into_p (stmt)
3610 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3611 {
3612 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3613 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3614 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3615 {
3616 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3617 break;
3618 }
3619 }
3620 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3621 == GF_OMP_FOR_KIND_SIMD)
3622 && omp_maybe_offloaded_ctx (ctx)
3623 && omp_max_simt_vf ())
3624 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3625 else
3626 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3627 break;
3628
3629 case GIMPLE_OMP_SECTIONS:
3630 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3631 break;
3632
3633 case GIMPLE_OMP_SINGLE:
3634 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3635 break;
3636
3637 case GIMPLE_OMP_SCAN:
3638 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3639 {
3640 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3641 ctx->scan_inclusive = true;
3642 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3643 ctx->scan_exclusive = true;
3644 }
3645 /* FALLTHRU */
3646 case GIMPLE_OMP_SECTION:
3647 case GIMPLE_OMP_MASTER:
3648 case GIMPLE_OMP_ORDERED:
3649 case GIMPLE_OMP_CRITICAL:
3650 case GIMPLE_OMP_GRID_BODY:
3651 ctx = new_omp_context (stmt, ctx);
3652 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3653 break;
3654
3655 case GIMPLE_OMP_TASKGROUP:
3656 ctx = new_omp_context (stmt, ctx);
3657 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3658 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3659 break;
3660
3661 case GIMPLE_OMP_TARGET:
3662 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3663 break;
3664
3665 case GIMPLE_OMP_TEAMS:
3666 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3667 {
3668 taskreg_nesting_level++;
3669 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3670 taskreg_nesting_level--;
3671 }
3672 else
3673 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3674 break;
3675
3676 case GIMPLE_BIND:
3677 {
3678 tree var;
3679
3680 *handled_ops_p = false;
3681 if (ctx)
3682 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3683 var ;
3684 var = DECL_CHAIN (var))
3685 insert_decl_map (&ctx->cb, var, var);
3686 }
3687 break;
3688 default:
3689 *handled_ops_p = false;
3690 break;
3691 }
3692
3693 return NULL_TREE;
3694 }
3695
3696
3697 /* Scan all the statements starting at the current statement. CTX
3698 contains context information about the OMP directives and
3699 clauses found during the scan. */
3700
3701 static void
3702 scan_omp (gimple_seq *body_p, omp_context *ctx)
3703 {
3704 location_t saved_location;
3705 struct walk_stmt_info wi;
3706
3707 memset (&wi, 0, sizeof (wi));
3708 wi.info = ctx;
3709 wi.want_locations = true;
3710
3711 saved_location = input_location;
3712 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3713 input_location = saved_location;
3714 }
3715 \f
3716 /* Re-gimplification and code generation routines. */
3717
3718 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3719 of BIND if in a method. */
3720
3721 static void
3722 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3723 {
3724 if (DECL_ARGUMENTS (current_function_decl)
3725 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3726 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3727 == POINTER_TYPE))
3728 {
3729 tree vars = gimple_bind_vars (bind);
3730 for (tree *pvar = &vars; *pvar; )
3731 if (omp_member_access_dummy_var (*pvar))
3732 *pvar = DECL_CHAIN (*pvar);
3733 else
3734 pvar = &DECL_CHAIN (*pvar);
3735 gimple_bind_set_vars (bind, vars);
3736 }
3737 }
3738
3739 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3740 block and its subblocks. */
3741
3742 static void
3743 remove_member_access_dummy_vars (tree block)
3744 {
3745 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3746 if (omp_member_access_dummy_var (*pvar))
3747 *pvar = DECL_CHAIN (*pvar);
3748 else
3749 pvar = &DECL_CHAIN (*pvar);
3750
3751 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3752 remove_member_access_dummy_vars (block);
3753 }
3754
3755 /* If a context was created for STMT when it was scanned, return it. */
3756
3757 static omp_context *
3758 maybe_lookup_ctx (gimple *stmt)
3759 {
3760 splay_tree_node n;
3761 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3762 return n ? (omp_context *) n->value : NULL;
3763 }
3764
3765
3766 /* Find the mapping for DECL in CTX or the immediately enclosing
3767 context that has a mapping for DECL.
3768
3769 If CTX is a nested parallel directive, we may have to use the decl
3770 mappings created in CTX's parent context. Suppose that we have the
3771 following parallel nesting (variable UIDs showed for clarity):
3772
3773 iD.1562 = 0;
3774 #omp parallel shared(iD.1562) -> outer parallel
3775 iD.1562 = iD.1562 + 1;
3776
3777 #omp parallel shared (iD.1562) -> inner parallel
3778 iD.1562 = iD.1562 - 1;
3779
3780 Each parallel structure will create a distinct .omp_data_s structure
3781 for copying iD.1562 in/out of the directive:
3782
3783 outer parallel .omp_data_s.1.i -> iD.1562
3784 inner parallel .omp_data_s.2.i -> iD.1562
3785
3786 A shared variable mapping will produce a copy-out operation before
3787 the parallel directive and a copy-in operation after it. So, in
3788 this case we would have:
3789
3790 iD.1562 = 0;
3791 .omp_data_o.1.i = iD.1562;
3792 #omp parallel shared(iD.1562) -> outer parallel
3793 .omp_data_i.1 = &.omp_data_o.1
3794 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3795
3796 .omp_data_o.2.i = iD.1562; -> **
3797 #omp parallel shared(iD.1562) -> inner parallel
3798 .omp_data_i.2 = &.omp_data_o.2
3799 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3800
3801
3802 ** This is a problem. The symbol iD.1562 cannot be referenced
3803 inside the body of the outer parallel region. But since we are
3804 emitting this copy operation while expanding the inner parallel
3805 directive, we need to access the CTX structure of the outer
3806 parallel directive to get the correct mapping:
3807
3808 .omp_data_o.2.i = .omp_data_i.1->i
3809
3810 Since there may be other workshare or parallel directives enclosing
3811 the parallel directive, it may be necessary to walk up the context
3812 parent chain. This is not a problem in general because nested
3813 parallelism happens only rarely. */
3814
3815 static tree
3816 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3817 {
3818 tree t;
3819 omp_context *up;
3820
3821 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3822 t = maybe_lookup_decl (decl, up);
3823
3824 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3825
3826 return t ? t : decl;
3827 }
3828
3829
3830 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3831 in outer contexts. */
3832
3833 static tree
3834 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3835 {
3836 tree t = NULL;
3837 omp_context *up;
3838
3839 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3840 t = maybe_lookup_decl (decl, up);
3841
3842 return t ? t : decl;
3843 }
3844
3845
3846 /* Construct the initialization value for reduction operation OP. */
3847
3848 tree
3849 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3850 {
3851 switch (op)
3852 {
3853 case PLUS_EXPR:
3854 case MINUS_EXPR:
3855 case BIT_IOR_EXPR:
3856 case BIT_XOR_EXPR:
3857 case TRUTH_OR_EXPR:
3858 case TRUTH_ORIF_EXPR:
3859 case TRUTH_XOR_EXPR:
3860 case NE_EXPR:
3861 return build_zero_cst (type);
3862
3863 case MULT_EXPR:
3864 case TRUTH_AND_EXPR:
3865 case TRUTH_ANDIF_EXPR:
3866 case EQ_EXPR:
3867 return fold_convert_loc (loc, type, integer_one_node);
3868
3869 case BIT_AND_EXPR:
3870 return fold_convert_loc (loc, type, integer_minus_one_node);
3871
3872 case MAX_EXPR:
3873 if (SCALAR_FLOAT_TYPE_P (type))
3874 {
3875 REAL_VALUE_TYPE max, min;
3876 if (HONOR_INFINITIES (type))
3877 {
3878 real_inf (&max);
3879 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3880 }
3881 else
3882 real_maxval (&min, 1, TYPE_MODE (type));
3883 return build_real (type, min);
3884 }
3885 else if (POINTER_TYPE_P (type))
3886 {
3887 wide_int min
3888 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3889 return wide_int_to_tree (type, min);
3890 }
3891 else
3892 {
3893 gcc_assert (INTEGRAL_TYPE_P (type));
3894 return TYPE_MIN_VALUE (type);
3895 }
3896
3897 case MIN_EXPR:
3898 if (SCALAR_FLOAT_TYPE_P (type))
3899 {
3900 REAL_VALUE_TYPE max;
3901 if (HONOR_INFINITIES (type))
3902 real_inf (&max);
3903 else
3904 real_maxval (&max, 0, TYPE_MODE (type));
3905 return build_real (type, max);
3906 }
3907 else if (POINTER_TYPE_P (type))
3908 {
3909 wide_int max
3910 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3911 return wide_int_to_tree (type, max);
3912 }
3913 else
3914 {
3915 gcc_assert (INTEGRAL_TYPE_P (type));
3916 return TYPE_MAX_VALUE (type);
3917 }
3918
3919 default:
3920 gcc_unreachable ();
3921 }
3922 }
3923
3924 /* Construct the initialization value for reduction CLAUSE. */
3925
3926 tree
3927 omp_reduction_init (tree clause, tree type)
3928 {
3929 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3930 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3931 }
3932
3933 /* Return alignment to be assumed for var in CLAUSE, which should be
3934 OMP_CLAUSE_ALIGNED. */
3935
3936 static tree
3937 omp_clause_aligned_alignment (tree clause)
3938 {
3939 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3940 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3941
3942 /* Otherwise return implementation defined alignment. */
3943 unsigned int al = 1;
3944 opt_scalar_mode mode_iter;
3945 auto_vector_sizes sizes;
3946 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3947 poly_uint64 vs = 0;
3948 for (unsigned int i = 0; i < sizes.length (); ++i)
3949 vs = ordered_max (vs, sizes[i]);
3950 static enum mode_class classes[]
3951 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3952 for (int i = 0; i < 4; i += 2)
3953 /* The for loop above dictates that we only walk through scalar classes. */
3954 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3955 {
3956 scalar_mode mode = mode_iter.require ();
3957 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3958 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3959 continue;
3960 while (maybe_ne (vs, 0U)
3961 && known_lt (GET_MODE_SIZE (vmode), vs)
3962 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3963 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3964
3965 tree type = lang_hooks.types.type_for_mode (mode, 1);
3966 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3967 continue;
3968 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3969 GET_MODE_SIZE (mode));
3970 type = build_vector_type (type, nelts);
3971 if (TYPE_MODE (type) != vmode)
3972 continue;
3973 if (TYPE_ALIGN_UNIT (type) > al)
3974 al = TYPE_ALIGN_UNIT (type);
3975 }
3976 return build_int_cst (integer_type_node, al);
3977 }
3978
3979
3980 /* This structure is part of the interface between lower_rec_simd_input_clauses
3981 and lower_rec_input_clauses. */
3982
3983 class omplow_simd_context {
3984 public:
3985 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3986 tree idx;
3987 tree lane;
3988 tree lastlane;
3989 vec<tree, va_heap> simt_eargs;
3990 gimple_seq simt_dlist;
3991 poly_uint64_pod max_vf;
3992 bool is_simt;
3993 };
3994
3995 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3996 privatization. */
3997
3998 static bool
3999 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4000 omplow_simd_context *sctx, tree &ivar,
4001 tree &lvar, tree *rvar = NULL,
4002 tree *rvar2 = NULL)
4003 {
4004 if (known_eq (sctx->max_vf, 0U))
4005 {
4006 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4007 if (maybe_gt (sctx->max_vf, 1U))
4008 {
4009 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4010 OMP_CLAUSE_SAFELEN);
4011 if (c)
4012 {
4013 poly_uint64 safe_len;
4014 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4015 || maybe_lt (safe_len, 1U))
4016 sctx->max_vf = 1;
4017 else
4018 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4019 }
4020 }
4021 if (maybe_gt (sctx->max_vf, 1U))
4022 {
4023 sctx->idx = create_tmp_var (unsigned_type_node);
4024 sctx->lane = create_tmp_var (unsigned_type_node);
4025 }
4026 }
4027 if (known_eq (sctx->max_vf, 1U))
4028 return false;
4029
4030 if (sctx->is_simt)
4031 {
4032 if (is_gimple_reg (new_var))
4033 {
4034 ivar = lvar = new_var;
4035 return true;
4036 }
4037 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4038 ivar = lvar = create_tmp_var (type);
4039 TREE_ADDRESSABLE (ivar) = 1;
4040 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4041 NULL, DECL_ATTRIBUTES (ivar));
4042 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4043 tree clobber = build_constructor (type, NULL);
4044 TREE_THIS_VOLATILE (clobber) = 1;
4045 gimple *g = gimple_build_assign (ivar, clobber);
4046 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4047 }
4048 else
4049 {
4050 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4051 tree avar = create_tmp_var_raw (atype);
4052 if (TREE_ADDRESSABLE (new_var))
4053 TREE_ADDRESSABLE (avar) = 1;
4054 DECL_ATTRIBUTES (avar)
4055 = tree_cons (get_identifier ("omp simd array"), NULL,
4056 DECL_ATTRIBUTES (avar));
4057 gimple_add_tmp_var (avar);
4058 tree iavar = avar;
4059 if (rvar && !ctx->for_simd_scan_phase)
4060 {
4061 /* For inscan reductions, create another array temporary,
4062 which will hold the reduced value. */
4063 iavar = create_tmp_var_raw (atype);
4064 if (TREE_ADDRESSABLE (new_var))
4065 TREE_ADDRESSABLE (iavar) = 1;
4066 DECL_ATTRIBUTES (iavar)
4067 = tree_cons (get_identifier ("omp simd array"), NULL,
4068 tree_cons (get_identifier ("omp simd inscan"), NULL,
4069 DECL_ATTRIBUTES (iavar)));
4070 gimple_add_tmp_var (iavar);
4071 ctx->cb.decl_map->put (avar, iavar);
4072 if (sctx->lastlane == NULL_TREE)
4073 sctx->lastlane = create_tmp_var (unsigned_type_node);
4074 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4075 sctx->lastlane, NULL_TREE, NULL_TREE);
4076 TREE_THIS_NOTRAP (*rvar) = 1;
4077
4078 if (ctx->scan_exclusive)
4079 {
4080 /* And for exclusive scan yet another one, which will
4081 hold the value during the scan phase. */
4082 tree savar = create_tmp_var_raw (atype);
4083 if (TREE_ADDRESSABLE (new_var))
4084 TREE_ADDRESSABLE (savar) = 1;
4085 DECL_ATTRIBUTES (savar)
4086 = tree_cons (get_identifier ("omp simd array"), NULL,
4087 tree_cons (get_identifier ("omp simd inscan "
4088 "exclusive"), NULL,
4089 DECL_ATTRIBUTES (savar)));
4090 gimple_add_tmp_var (savar);
4091 ctx->cb.decl_map->put (iavar, savar);
4092 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4093 sctx->idx, NULL_TREE, NULL_TREE);
4094 TREE_THIS_NOTRAP (*rvar2) = 1;
4095 }
4096 }
4097 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4098 NULL_TREE, NULL_TREE);
4099 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4100 NULL_TREE, NULL_TREE);
4101 TREE_THIS_NOTRAP (ivar) = 1;
4102 TREE_THIS_NOTRAP (lvar) = 1;
4103 }
4104 if (DECL_P (new_var))
4105 {
4106 SET_DECL_VALUE_EXPR (new_var, lvar);
4107 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4108 }
4109 return true;
4110 }
4111
4112 /* Helper function of lower_rec_input_clauses. For a reference
4113 in simd reduction, add an underlying variable it will reference. */
4114
4115 static void
4116 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4117 {
4118 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4119 if (TREE_CONSTANT (z))
4120 {
4121 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4122 get_name (new_vard));
4123 gimple_add_tmp_var (z);
4124 TREE_ADDRESSABLE (z) = 1;
4125 z = build_fold_addr_expr_loc (loc, z);
4126 gimplify_assign (new_vard, z, ilist);
4127 }
4128 }
4129
4130 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4131 code to emit (type) (tskred_temp[idx]). */
4132
4133 static tree
4134 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4135 unsigned idx)
4136 {
4137 unsigned HOST_WIDE_INT sz
4138 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4139 tree r = build2 (MEM_REF, pointer_sized_int_node,
4140 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4141 idx * sz));
4142 tree v = create_tmp_var (pointer_sized_int_node);
4143 gimple *g = gimple_build_assign (v, r);
4144 gimple_seq_add_stmt (ilist, g);
4145 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4146 {
4147 v = create_tmp_var (type);
4148 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4149 gimple_seq_add_stmt (ilist, g);
4150 }
4151 return v;
4152 }
4153
4154 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4155 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4156 private variables. Initialization statements go in ILIST, while calls
4157 to destructors go in DLIST. */
4158
4159 static void
4160 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4161 omp_context *ctx, struct omp_for_data *fd)
4162 {
4163 tree c, copyin_seq, x, ptr;
4164 bool copyin_by_ref = false;
4165 bool lastprivate_firstprivate = false;
4166 bool reduction_omp_orig_ref = false;
4167 int pass;
4168 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4169 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4170 omplow_simd_context sctx = omplow_simd_context ();
4171 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4172 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4173 gimple_seq llist[4] = { };
4174 tree nonconst_simd_if = NULL_TREE;
4175
4176 copyin_seq = NULL;
4177 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4178
4179 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4180 with data sharing clauses referencing variable sized vars. That
4181 is unnecessarily hard to support and very unlikely to result in
4182 vectorized code anyway. */
4183 if (is_simd)
4184 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4185 switch (OMP_CLAUSE_CODE (c))
4186 {
4187 case OMP_CLAUSE_LINEAR:
4188 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4189 sctx.max_vf = 1;
4190 /* FALLTHRU */
4191 case OMP_CLAUSE_PRIVATE:
4192 case OMP_CLAUSE_FIRSTPRIVATE:
4193 case OMP_CLAUSE_LASTPRIVATE:
4194 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4195 sctx.max_vf = 1;
4196 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4197 {
4198 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4199 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4200 sctx.max_vf = 1;
4201 }
4202 break;
4203 case OMP_CLAUSE_REDUCTION:
4204 case OMP_CLAUSE_IN_REDUCTION:
4205 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4206 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4207 sctx.max_vf = 1;
4208 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4209 {
4210 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4211 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4212 sctx.max_vf = 1;
4213 }
4214 break;
4215 case OMP_CLAUSE_IF:
4216 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4217 sctx.max_vf = 1;
4218 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4219 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4220 break;
4221 case OMP_CLAUSE_SIMDLEN:
4222 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4223 sctx.max_vf = 1;
4224 break;
4225 case OMP_CLAUSE__CONDTEMP_:
4226 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4227 if (sctx.is_simt)
4228 sctx.max_vf = 1;
4229 break;
4230 default:
4231 continue;
4232 }
4233
4234 /* Add a placeholder for simduid. */
4235 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4236 sctx.simt_eargs.safe_push (NULL_TREE);
4237
4238 unsigned task_reduction_cnt = 0;
4239 unsigned task_reduction_cntorig = 0;
4240 unsigned task_reduction_cnt_full = 0;
4241 unsigned task_reduction_cntorig_full = 0;
4242 unsigned task_reduction_other_cnt = 0;
4243 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4244 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4245 /* Do all the fixed sized types in the first pass, and the variable sized
4246 types in the second pass. This makes sure that the scalar arguments to
4247 the variable sized types are processed before we use them in the
4248 variable sized operations. For task reductions we use 4 passes, in the
4249 first two we ignore them, in the third one gather arguments for
4250 GOMP_task_reduction_remap call and in the last pass actually handle
4251 the task reductions. */
4252 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4253 ? 4 : 2); ++pass)
4254 {
4255 if (pass == 2 && task_reduction_cnt)
4256 {
4257 tskred_atype
4258 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4259 + task_reduction_cntorig);
4260 tskred_avar = create_tmp_var_raw (tskred_atype);
4261 gimple_add_tmp_var (tskred_avar);
4262 TREE_ADDRESSABLE (tskred_avar) = 1;
4263 task_reduction_cnt_full = task_reduction_cnt;
4264 task_reduction_cntorig_full = task_reduction_cntorig;
4265 }
4266 else if (pass == 3 && task_reduction_cnt)
4267 {
4268 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4269 gimple *g
4270 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4271 size_int (task_reduction_cntorig),
4272 build_fold_addr_expr (tskred_avar));
4273 gimple_seq_add_stmt (ilist, g);
4274 }
4275 if (pass == 3 && task_reduction_other_cnt)
4276 {
4277 /* For reduction clauses, build
4278 tskred_base = (void *) tskred_temp[2]
4279 + omp_get_thread_num () * tskred_temp[1]
4280 or if tskred_temp[1] is known to be constant, that constant
4281 directly. This is the start of the private reduction copy block
4282 for the current thread. */
4283 tree v = create_tmp_var (integer_type_node);
4284 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4285 gimple *g = gimple_build_call (x, 0);
4286 gimple_call_set_lhs (g, v);
4287 gimple_seq_add_stmt (ilist, g);
4288 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4289 tskred_temp = OMP_CLAUSE_DECL (c);
4290 if (is_taskreg_ctx (ctx))
4291 tskred_temp = lookup_decl (tskred_temp, ctx);
4292 tree v2 = create_tmp_var (sizetype);
4293 g = gimple_build_assign (v2, NOP_EXPR, v);
4294 gimple_seq_add_stmt (ilist, g);
4295 if (ctx->task_reductions[0])
4296 v = fold_convert (sizetype, ctx->task_reductions[0]);
4297 else
4298 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4299 tree v3 = create_tmp_var (sizetype);
4300 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4301 gimple_seq_add_stmt (ilist, g);
4302 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4303 tskred_base = create_tmp_var (ptr_type_node);
4304 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4305 gimple_seq_add_stmt (ilist, g);
4306 }
4307 task_reduction_cnt = 0;
4308 task_reduction_cntorig = 0;
4309 task_reduction_other_cnt = 0;
4310 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4311 {
4312 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4313 tree var, new_var;
4314 bool by_ref;
4315 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4316 bool task_reduction_p = false;
4317 bool task_reduction_needs_orig_p = false;
4318 tree cond = NULL_TREE;
4319
4320 switch (c_kind)
4321 {
4322 case OMP_CLAUSE_PRIVATE:
4323 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4324 continue;
4325 break;
4326 case OMP_CLAUSE_SHARED:
4327 /* Ignore shared directives in teams construct inside
4328 of target construct. */
4329 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4330 && !is_host_teams_ctx (ctx))
4331 continue;
4332 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4333 {
4334 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4335 || is_global_var (OMP_CLAUSE_DECL (c)));
4336 continue;
4337 }
4338 case OMP_CLAUSE_FIRSTPRIVATE:
4339 case OMP_CLAUSE_COPYIN:
4340 break;
4341 case OMP_CLAUSE_LINEAR:
4342 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4343 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4344 lastprivate_firstprivate = true;
4345 break;
4346 case OMP_CLAUSE_REDUCTION:
4347 case OMP_CLAUSE_IN_REDUCTION:
4348 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4349 {
4350 task_reduction_p = true;
4351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4352 {
4353 task_reduction_other_cnt++;
4354 if (pass == 2)
4355 continue;
4356 }
4357 else
4358 task_reduction_cnt++;
4359 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4360 {
4361 var = OMP_CLAUSE_DECL (c);
4362 /* If var is a global variable that isn't privatized
4363 in outer contexts, we don't need to look up the
4364 original address, it is always the address of the
4365 global variable itself. */
4366 if (!DECL_P (var)
4367 || omp_is_reference (var)
4368 || !is_global_var
4369 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4370 {
4371 task_reduction_needs_orig_p = true;
4372 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4373 task_reduction_cntorig++;
4374 }
4375 }
4376 }
4377 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4378 reduction_omp_orig_ref = true;
4379 break;
4380 case OMP_CLAUSE__REDUCTEMP_:
4381 if (!is_taskreg_ctx (ctx))
4382 continue;
4383 /* FALLTHRU */
4384 case OMP_CLAUSE__LOOPTEMP_:
4385 /* Handle _looptemp_/_reductemp_ clauses only on
4386 parallel/task. */
4387 if (fd)
4388 continue;
4389 break;
4390 case OMP_CLAUSE_LASTPRIVATE:
4391 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4392 {
4393 lastprivate_firstprivate = true;
4394 if (pass != 0 || is_taskloop_ctx (ctx))
4395 continue;
4396 }
4397 /* Even without corresponding firstprivate, if
4398 decl is Fortran allocatable, it needs outer var
4399 reference. */
4400 else if (pass == 0
4401 && lang_hooks.decls.omp_private_outer_ref
4402 (OMP_CLAUSE_DECL (c)))
4403 lastprivate_firstprivate = true;
4404 break;
4405 case OMP_CLAUSE_ALIGNED:
4406 if (pass != 1)
4407 continue;
4408 var = OMP_CLAUSE_DECL (c);
4409 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4410 && !is_global_var (var))
4411 {
4412 new_var = maybe_lookup_decl (var, ctx);
4413 if (new_var == NULL_TREE)
4414 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4415 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4416 tree alarg = omp_clause_aligned_alignment (c);
4417 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4418 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4419 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4420 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4421 gimplify_and_add (x, ilist);
4422 }
4423 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4424 && is_global_var (var))
4425 {
4426 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4427 new_var = lookup_decl (var, ctx);
4428 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4429 t = build_fold_addr_expr_loc (clause_loc, t);
4430 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4431 tree alarg = omp_clause_aligned_alignment (c);
4432 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4433 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4434 t = fold_convert_loc (clause_loc, ptype, t);
4435 x = create_tmp_var (ptype);
4436 t = build2 (MODIFY_EXPR, ptype, x, t);
4437 gimplify_and_add (t, ilist);
4438 t = build_simple_mem_ref_loc (clause_loc, x);
4439 SET_DECL_VALUE_EXPR (new_var, t);
4440 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4441 }
4442 continue;
4443 case OMP_CLAUSE__CONDTEMP_:
4444 if (is_parallel_ctx (ctx)
4445 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4446 break;
4447 continue;
4448 default:
4449 continue;
4450 }
4451
4452 if (task_reduction_p != (pass >= 2))
4453 continue;
4454
4455 new_var = var = OMP_CLAUSE_DECL (c);
4456 if ((c_kind == OMP_CLAUSE_REDUCTION
4457 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4458 && TREE_CODE (var) == MEM_REF)
4459 {
4460 var = TREE_OPERAND (var, 0);
4461 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4462 var = TREE_OPERAND (var, 0);
4463 if (TREE_CODE (var) == INDIRECT_REF
4464 || TREE_CODE (var) == ADDR_EXPR)
4465 var = TREE_OPERAND (var, 0);
4466 if (is_variable_sized (var))
4467 {
4468 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4469 var = DECL_VALUE_EXPR (var);
4470 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4471 var = TREE_OPERAND (var, 0);
4472 gcc_assert (DECL_P (var));
4473 }
4474 new_var = var;
4475 }
4476 if (c_kind != OMP_CLAUSE_COPYIN)
4477 new_var = lookup_decl (var, ctx);
4478
4479 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4480 {
4481 if (pass != 0)
4482 continue;
4483 }
4484 /* C/C++ array section reductions. */
4485 else if ((c_kind == OMP_CLAUSE_REDUCTION
4486 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4487 && var != OMP_CLAUSE_DECL (c))
4488 {
4489 if (pass == 0)
4490 continue;
4491
4492 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4493 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4494
4495 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4496 {
4497 tree b = TREE_OPERAND (orig_var, 1);
4498 b = maybe_lookup_decl (b, ctx);
4499 if (b == NULL)
4500 {
4501 b = TREE_OPERAND (orig_var, 1);
4502 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4503 }
4504 if (integer_zerop (bias))
4505 bias = b;
4506 else
4507 {
4508 bias = fold_convert_loc (clause_loc,
4509 TREE_TYPE (b), bias);
4510 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4511 TREE_TYPE (b), b, bias);
4512 }
4513 orig_var = TREE_OPERAND (orig_var, 0);
4514 }
4515 if (pass == 2)
4516 {
4517 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4518 if (is_global_var (out)
4519 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4520 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4521 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4522 != POINTER_TYPE)))
4523 x = var;
4524 else
4525 {
4526 bool by_ref = use_pointer_for_field (var, NULL);
4527 x = build_receiver_ref (var, by_ref, ctx);
4528 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4529 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4530 == POINTER_TYPE))
4531 x = build_fold_addr_expr (x);
4532 }
4533 if (TREE_CODE (orig_var) == INDIRECT_REF)
4534 x = build_simple_mem_ref (x);
4535 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4536 {
4537 if (var == TREE_OPERAND (orig_var, 0))
4538 x = build_fold_addr_expr (x);
4539 }
4540 bias = fold_convert (sizetype, bias);
4541 x = fold_convert (ptr_type_node, x);
4542 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4543 TREE_TYPE (x), x, bias);
4544 unsigned cnt = task_reduction_cnt - 1;
4545 if (!task_reduction_needs_orig_p)
4546 cnt += (task_reduction_cntorig_full
4547 - task_reduction_cntorig);
4548 else
4549 cnt = task_reduction_cntorig - 1;
4550 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4551 size_int (cnt), NULL_TREE, NULL_TREE);
4552 gimplify_assign (r, x, ilist);
4553 continue;
4554 }
4555
4556 if (TREE_CODE (orig_var) == INDIRECT_REF
4557 || TREE_CODE (orig_var) == ADDR_EXPR)
4558 orig_var = TREE_OPERAND (orig_var, 0);
4559 tree d = OMP_CLAUSE_DECL (c);
4560 tree type = TREE_TYPE (d);
4561 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4562 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4563 const char *name = get_name (orig_var);
4564 if (pass == 3)
4565 {
4566 tree xv = create_tmp_var (ptr_type_node);
4567 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4568 {
4569 unsigned cnt = task_reduction_cnt - 1;
4570 if (!task_reduction_needs_orig_p)
4571 cnt += (task_reduction_cntorig_full
4572 - task_reduction_cntorig);
4573 else
4574 cnt = task_reduction_cntorig - 1;
4575 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4576 size_int (cnt), NULL_TREE, NULL_TREE);
4577
4578 gimple *g = gimple_build_assign (xv, x);
4579 gimple_seq_add_stmt (ilist, g);
4580 }
4581 else
4582 {
4583 unsigned int idx = *ctx->task_reduction_map->get (c);
4584 tree off;
4585 if (ctx->task_reductions[1 + idx])
4586 off = fold_convert (sizetype,
4587 ctx->task_reductions[1 + idx]);
4588 else
4589 off = task_reduction_read (ilist, tskred_temp, sizetype,
4590 7 + 3 * idx + 1);
4591 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4592 tskred_base, off);
4593 gimple_seq_add_stmt (ilist, g);
4594 }
4595 x = fold_convert (build_pointer_type (boolean_type_node),
4596 xv);
4597 if (TREE_CONSTANT (v))
4598 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4599 TYPE_SIZE_UNIT (type));
4600 else
4601 {
4602 tree t = maybe_lookup_decl (v, ctx);
4603 if (t)
4604 v = t;
4605 else
4606 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4607 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4608 fb_rvalue);
4609 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4610 TREE_TYPE (v), v,
4611 build_int_cst (TREE_TYPE (v), 1));
4612 t = fold_build2_loc (clause_loc, MULT_EXPR,
4613 TREE_TYPE (v), t,
4614 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4615 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4616 }
4617 cond = create_tmp_var (TREE_TYPE (x));
4618 gimplify_assign (cond, x, ilist);
4619 x = xv;
4620 }
4621 else if (TREE_CONSTANT (v))
4622 {
4623 x = create_tmp_var_raw (type, name);
4624 gimple_add_tmp_var (x);
4625 TREE_ADDRESSABLE (x) = 1;
4626 x = build_fold_addr_expr_loc (clause_loc, x);
4627 }
4628 else
4629 {
4630 tree atmp
4631 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4632 tree t = maybe_lookup_decl (v, ctx);
4633 if (t)
4634 v = t;
4635 else
4636 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4637 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4638 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4639 TREE_TYPE (v), v,
4640 build_int_cst (TREE_TYPE (v), 1));
4641 t = fold_build2_loc (clause_loc, MULT_EXPR,
4642 TREE_TYPE (v), t,
4643 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4644 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4645 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4646 }
4647
4648 tree ptype = build_pointer_type (TREE_TYPE (type));
4649 x = fold_convert_loc (clause_loc, ptype, x);
4650 tree y = create_tmp_var (ptype, name);
4651 gimplify_assign (y, x, ilist);
4652 x = y;
4653 tree yb = y;
4654
4655 if (!integer_zerop (bias))
4656 {
4657 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4658 bias);
4659 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4660 x);
4661 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4662 pointer_sized_int_node, yb, bias);
4663 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4664 yb = create_tmp_var (ptype, name);
4665 gimplify_assign (yb, x, ilist);
4666 x = yb;
4667 }
4668
4669 d = TREE_OPERAND (d, 0);
4670 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4671 d = TREE_OPERAND (d, 0);
4672 if (TREE_CODE (d) == ADDR_EXPR)
4673 {
4674 if (orig_var != var)
4675 {
4676 gcc_assert (is_variable_sized (orig_var));
4677 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4678 x);
4679 gimplify_assign (new_var, x, ilist);
4680 tree new_orig_var = lookup_decl (orig_var, ctx);
4681 tree t = build_fold_indirect_ref (new_var);
4682 DECL_IGNORED_P (new_var) = 0;
4683 TREE_THIS_NOTRAP (t) = 1;
4684 SET_DECL_VALUE_EXPR (new_orig_var, t);
4685 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4686 }
4687 else
4688 {
4689 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4690 build_int_cst (ptype, 0));
4691 SET_DECL_VALUE_EXPR (new_var, x);
4692 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4693 }
4694 }
4695 else
4696 {
4697 gcc_assert (orig_var == var);
4698 if (TREE_CODE (d) == INDIRECT_REF)
4699 {
4700 x = create_tmp_var (ptype, name);
4701 TREE_ADDRESSABLE (x) = 1;
4702 gimplify_assign (x, yb, ilist);
4703 x = build_fold_addr_expr_loc (clause_loc, x);
4704 }
4705 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4706 gimplify_assign (new_var, x, ilist);
4707 }
4708 /* GOMP_taskgroup_reduction_register memsets the whole
4709 array to zero. If the initializer is zero, we don't
4710 need to initialize it again, just mark it as ever
4711 used unconditionally, i.e. cond = true. */
4712 if (cond
4713 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4714 && initializer_zerop (omp_reduction_init (c,
4715 TREE_TYPE (type))))
4716 {
4717 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4718 boolean_true_node);
4719 gimple_seq_add_stmt (ilist, g);
4720 continue;
4721 }
4722 tree end = create_artificial_label (UNKNOWN_LOCATION);
4723 if (cond)
4724 {
4725 gimple *g;
4726 if (!is_parallel_ctx (ctx))
4727 {
4728 tree condv = create_tmp_var (boolean_type_node);
4729 g = gimple_build_assign (condv,
4730 build_simple_mem_ref (cond));
4731 gimple_seq_add_stmt (ilist, g);
4732 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4733 g = gimple_build_cond (NE_EXPR, condv,
4734 boolean_false_node, end, lab1);
4735 gimple_seq_add_stmt (ilist, g);
4736 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4737 }
4738 g = gimple_build_assign (build_simple_mem_ref (cond),
4739 boolean_true_node);
4740 gimple_seq_add_stmt (ilist, g);
4741 }
4742
4743 tree y1 = create_tmp_var (ptype);
4744 gimplify_assign (y1, y, ilist);
4745 tree i2 = NULL_TREE, y2 = NULL_TREE;
4746 tree body2 = NULL_TREE, end2 = NULL_TREE;
4747 tree y3 = NULL_TREE, y4 = NULL_TREE;
4748 if (task_reduction_needs_orig_p)
4749 {
4750 y3 = create_tmp_var (ptype);
4751 tree ref;
4752 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4753 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4754 size_int (task_reduction_cnt_full
4755 + task_reduction_cntorig - 1),
4756 NULL_TREE, NULL_TREE);
4757 else
4758 {
4759 unsigned int idx = *ctx->task_reduction_map->get (c);
4760 ref = task_reduction_read (ilist, tskred_temp, ptype,
4761 7 + 3 * idx);
4762 }
4763 gimplify_assign (y3, ref, ilist);
4764 }
4765 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4766 {
4767 if (pass != 3)
4768 {
4769 y2 = create_tmp_var (ptype);
4770 gimplify_assign (y2, y, ilist);
4771 }
4772 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4773 {
4774 tree ref = build_outer_var_ref (var, ctx);
4775 /* For ref build_outer_var_ref already performs this. */
4776 if (TREE_CODE (d) == INDIRECT_REF)
4777 gcc_assert (omp_is_reference (var));
4778 else if (TREE_CODE (d) == ADDR_EXPR)
4779 ref = build_fold_addr_expr (ref);
4780 else if (omp_is_reference (var))
4781 ref = build_fold_addr_expr (ref);
4782 ref = fold_convert_loc (clause_loc, ptype, ref);
4783 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4784 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4785 {
4786 y3 = create_tmp_var (ptype);
4787 gimplify_assign (y3, unshare_expr (ref), ilist);
4788 }
4789 if (is_simd)
4790 {
4791 y4 = create_tmp_var (ptype);
4792 gimplify_assign (y4, ref, dlist);
4793 }
4794 }
4795 }
4796 tree i = create_tmp_var (TREE_TYPE (v));
4797 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4798 tree body = create_artificial_label (UNKNOWN_LOCATION);
4799 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4800 if (y2)
4801 {
4802 i2 = create_tmp_var (TREE_TYPE (v));
4803 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4804 body2 = create_artificial_label (UNKNOWN_LOCATION);
4805 end2 = create_artificial_label (UNKNOWN_LOCATION);
4806 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4807 }
4808 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4809 {
4810 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4811 tree decl_placeholder
4812 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4813 SET_DECL_VALUE_EXPR (decl_placeholder,
4814 build_simple_mem_ref (y1));
4815 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4816 SET_DECL_VALUE_EXPR (placeholder,
4817 y3 ? build_simple_mem_ref (y3)
4818 : error_mark_node);
4819 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4820 x = lang_hooks.decls.omp_clause_default_ctor
4821 (c, build_simple_mem_ref (y1),
4822 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4823 if (x)
4824 gimplify_and_add (x, ilist);
4825 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4826 {
4827 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4828 lower_omp (&tseq, ctx);
4829 gimple_seq_add_seq (ilist, tseq);
4830 }
4831 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4832 if (is_simd)
4833 {
4834 SET_DECL_VALUE_EXPR (decl_placeholder,
4835 build_simple_mem_ref (y2));
4836 SET_DECL_VALUE_EXPR (placeholder,
4837 build_simple_mem_ref (y4));
4838 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4839 lower_omp (&tseq, ctx);
4840 gimple_seq_add_seq (dlist, tseq);
4841 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4842 }
4843 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4844 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4845 if (y2)
4846 {
4847 x = lang_hooks.decls.omp_clause_dtor
4848 (c, build_simple_mem_ref (y2));
4849 if (x)
4850 gimplify_and_add (x, dlist);
4851 }
4852 }
4853 else
4854 {
4855 x = omp_reduction_init (c, TREE_TYPE (type));
4856 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4857
4858 /* reduction(-:var) sums up the partial results, so it
4859 acts identically to reduction(+:var). */
4860 if (code == MINUS_EXPR)
4861 code = PLUS_EXPR;
4862
4863 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4864 if (is_simd)
4865 {
4866 x = build2 (code, TREE_TYPE (type),
4867 build_simple_mem_ref (y4),
4868 build_simple_mem_ref (y2));
4869 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4870 }
4871 }
4872 gimple *g
4873 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4874 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4875 gimple_seq_add_stmt (ilist, g);
4876 if (y3)
4877 {
4878 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4879 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4880 gimple_seq_add_stmt (ilist, g);
4881 }
4882 g = gimple_build_assign (i, PLUS_EXPR, i,
4883 build_int_cst (TREE_TYPE (i), 1));
4884 gimple_seq_add_stmt (ilist, g);
4885 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4886 gimple_seq_add_stmt (ilist, g);
4887 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4888 if (y2)
4889 {
4890 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4891 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4892 gimple_seq_add_stmt (dlist, g);
4893 if (y4)
4894 {
4895 g = gimple_build_assign
4896 (y4, POINTER_PLUS_EXPR, y4,
4897 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4898 gimple_seq_add_stmt (dlist, g);
4899 }
4900 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4901 build_int_cst (TREE_TYPE (i2), 1));
4902 gimple_seq_add_stmt (dlist, g);
4903 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4904 gimple_seq_add_stmt (dlist, g);
4905 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4906 }
4907 continue;
4908 }
4909 else if (pass == 2)
4910 {
4911 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4912 x = var;
4913 else
4914 {
4915 bool by_ref = use_pointer_for_field (var, ctx);
4916 x = build_receiver_ref (var, by_ref, ctx);
4917 }
4918 if (!omp_is_reference (var))
4919 x = build_fold_addr_expr (x);
4920 x = fold_convert (ptr_type_node, x);
4921 unsigned cnt = task_reduction_cnt - 1;
4922 if (!task_reduction_needs_orig_p)
4923 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4924 else
4925 cnt = task_reduction_cntorig - 1;
4926 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4927 size_int (cnt), NULL_TREE, NULL_TREE);
4928 gimplify_assign (r, x, ilist);
4929 continue;
4930 }
4931 else if (pass == 3)
4932 {
4933 tree type = TREE_TYPE (new_var);
4934 if (!omp_is_reference (var))
4935 type = build_pointer_type (type);
4936 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4937 {
4938 unsigned cnt = task_reduction_cnt - 1;
4939 if (!task_reduction_needs_orig_p)
4940 cnt += (task_reduction_cntorig_full
4941 - task_reduction_cntorig);
4942 else
4943 cnt = task_reduction_cntorig - 1;
4944 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4945 size_int (cnt), NULL_TREE, NULL_TREE);
4946 }
4947 else
4948 {
4949 unsigned int idx = *ctx->task_reduction_map->get (c);
4950 tree off;
4951 if (ctx->task_reductions[1 + idx])
4952 off = fold_convert (sizetype,
4953 ctx->task_reductions[1 + idx]);
4954 else
4955 off = task_reduction_read (ilist, tskred_temp, sizetype,
4956 7 + 3 * idx + 1);
4957 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4958 tskred_base, off);
4959 }
4960 x = fold_convert (type, x);
4961 tree t;
4962 if (omp_is_reference (var))
4963 {
4964 gimplify_assign (new_var, x, ilist);
4965 t = new_var;
4966 new_var = build_simple_mem_ref (new_var);
4967 }
4968 else
4969 {
4970 t = create_tmp_var (type);
4971 gimplify_assign (t, x, ilist);
4972 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4973 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4974 }
4975 t = fold_convert (build_pointer_type (boolean_type_node), t);
4976 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4977 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4978 cond = create_tmp_var (TREE_TYPE (t));
4979 gimplify_assign (cond, t, ilist);
4980 }
4981 else if (is_variable_sized (var))
4982 {
4983 /* For variable sized types, we need to allocate the
4984 actual storage here. Call alloca and store the
4985 result in the pointer decl that we created elsewhere. */
4986 if (pass == 0)
4987 continue;
4988
4989 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4990 {
4991 gcall *stmt;
4992 tree tmp, atmp;
4993
4994 ptr = DECL_VALUE_EXPR (new_var);
4995 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4996 ptr = TREE_OPERAND (ptr, 0);
4997 gcc_assert (DECL_P (ptr));
4998 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4999
5000 /* void *tmp = __builtin_alloca */
5001 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5002 stmt = gimple_build_call (atmp, 2, x,
5003 size_int (DECL_ALIGN (var)));
5004 tmp = create_tmp_var_raw (ptr_type_node);
5005 gimple_add_tmp_var (tmp);
5006 gimple_call_set_lhs (stmt, tmp);
5007
5008 gimple_seq_add_stmt (ilist, stmt);
5009
5010 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5011 gimplify_assign (ptr, x, ilist);
5012 }
5013 }
5014 else if (omp_is_reference (var)
5015 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5016 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5017 {
5018 /* For references that are being privatized for Fortran,
5019 allocate new backing storage for the new pointer
5020 variable. This allows us to avoid changing all the
5021 code that expects a pointer to something that expects
5022 a direct variable. */
5023 if (pass == 0)
5024 continue;
5025
5026 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5027 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5028 {
5029 x = build_receiver_ref (var, false, ctx);
5030 x = build_fold_addr_expr_loc (clause_loc, x);
5031 }
5032 else if (TREE_CONSTANT (x))
5033 {
5034 /* For reduction in SIMD loop, defer adding the
5035 initialization of the reference, because if we decide
5036 to use SIMD array for it, the initilization could cause
5037 expansion ICE. Ditto for other privatization clauses. */
5038 if (is_simd)
5039 x = NULL_TREE;
5040 else
5041 {
5042 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5043 get_name (var));
5044 gimple_add_tmp_var (x);
5045 TREE_ADDRESSABLE (x) = 1;
5046 x = build_fold_addr_expr_loc (clause_loc, x);
5047 }
5048 }
5049 else
5050 {
5051 tree atmp
5052 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5053 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5054 tree al = size_int (TYPE_ALIGN (rtype));
5055 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5056 }
5057
5058 if (x)
5059 {
5060 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5061 gimplify_assign (new_var, x, ilist);
5062 }
5063
5064 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5065 }
5066 else if ((c_kind == OMP_CLAUSE_REDUCTION
5067 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5068 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5069 {
5070 if (pass == 0)
5071 continue;
5072 }
5073 else if (pass != 0)
5074 continue;
5075
5076 switch (OMP_CLAUSE_CODE (c))
5077 {
5078 case OMP_CLAUSE_SHARED:
5079 /* Ignore shared directives in teams construct inside
5080 target construct. */
5081 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5082 && !is_host_teams_ctx (ctx))
5083 continue;
5084 /* Shared global vars are just accessed directly. */
5085 if (is_global_var (new_var))
5086 break;
5087 /* For taskloop firstprivate/lastprivate, represented
5088 as firstprivate and shared clause on the task, new_var
5089 is the firstprivate var. */
5090 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5091 break;
5092 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5093 needs to be delayed until after fixup_child_record_type so
5094 that we get the correct type during the dereference. */
5095 by_ref = use_pointer_for_field (var, ctx);
5096 x = build_receiver_ref (var, by_ref, ctx);
5097 SET_DECL_VALUE_EXPR (new_var, x);
5098 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5099
5100 /* ??? If VAR is not passed by reference, and the variable
5101 hasn't been initialized yet, then we'll get a warning for
5102 the store into the omp_data_s structure. Ideally, we'd be
5103 able to notice this and not store anything at all, but
5104 we're generating code too early. Suppress the warning. */
5105 if (!by_ref)
5106 TREE_NO_WARNING (var) = 1;
5107 break;
5108
5109 case OMP_CLAUSE__CONDTEMP_:
5110 if (is_parallel_ctx (ctx))
5111 {
5112 x = build_receiver_ref (var, false, ctx);
5113 SET_DECL_VALUE_EXPR (new_var, x);
5114 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5115 }
5116 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5117 {
5118 x = build_zero_cst (TREE_TYPE (var));
5119 goto do_private;
5120 }
5121 break;
5122
5123 case OMP_CLAUSE_LASTPRIVATE:
5124 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5125 break;
5126 /* FALLTHRU */
5127
5128 case OMP_CLAUSE_PRIVATE:
5129 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5130 x = build_outer_var_ref (var, ctx);
5131 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5132 {
5133 if (is_task_ctx (ctx))
5134 x = build_receiver_ref (var, false, ctx);
5135 else
5136 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5137 }
5138 else
5139 x = NULL;
5140 do_private:
5141 tree nx;
5142 nx = lang_hooks.decls.omp_clause_default_ctor
5143 (c, unshare_expr (new_var), x);
5144 if (is_simd)
5145 {
5146 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5147 if ((TREE_ADDRESSABLE (new_var) || nx || y
5148 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5149 && (gimple_omp_for_collapse (ctx->stmt) != 1
5150 || (gimple_omp_for_index (ctx->stmt, 0)
5151 != new_var)))
5152 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5153 || omp_is_reference (var))
5154 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5155 ivar, lvar))
5156 {
5157 if (omp_is_reference (var))
5158 {
5159 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5160 tree new_vard = TREE_OPERAND (new_var, 0);
5161 gcc_assert (DECL_P (new_vard));
5162 SET_DECL_VALUE_EXPR (new_vard,
5163 build_fold_addr_expr (lvar));
5164 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5165 }
5166
5167 if (nx)
5168 x = lang_hooks.decls.omp_clause_default_ctor
5169 (c, unshare_expr (ivar), x);
5170 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5171 {
5172 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5173 unshare_expr (ivar), x);
5174 nx = x;
5175 }
5176 if (nx && x)
5177 gimplify_and_add (x, &llist[0]);
5178 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5179 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5180 {
5181 tree v = new_var;
5182 if (!DECL_P (v))
5183 {
5184 gcc_assert (TREE_CODE (v) == MEM_REF);
5185 v = TREE_OPERAND (v, 0);
5186 gcc_assert (DECL_P (v));
5187 }
5188 v = *ctx->lastprivate_conditional_map->get (v);
5189 tree t = create_tmp_var (TREE_TYPE (v));
5190 tree z = build_zero_cst (TREE_TYPE (v));
5191 tree orig_v
5192 = build_outer_var_ref (var, ctx,
5193 OMP_CLAUSE_LASTPRIVATE);
5194 gimple_seq_add_stmt (dlist,
5195 gimple_build_assign (t, z));
5196 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5197 tree civar = DECL_VALUE_EXPR (v);
5198 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5199 civar = unshare_expr (civar);
5200 TREE_OPERAND (civar, 1) = sctx.idx;
5201 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5202 unshare_expr (civar));
5203 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5204 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5205 orig_v, unshare_expr (ivar)));
5206 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5207 civar);
5208 x = build3 (COND_EXPR, void_type_node, cond, x,
5209 void_node);
5210 gimple_seq tseq = NULL;
5211 gimplify_and_add (x, &tseq);
5212 if (ctx->outer)
5213 lower_omp (&tseq, ctx->outer);
5214 gimple_seq_add_seq (&llist[1], tseq);
5215 }
5216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5217 && ctx->for_simd_scan_phase)
5218 {
5219 x = unshare_expr (ivar);
5220 tree orig_v
5221 = build_outer_var_ref (var, ctx,
5222 OMP_CLAUSE_LASTPRIVATE);
5223 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5224 orig_v);
5225 gimplify_and_add (x, &llist[0]);
5226 }
5227 if (y)
5228 {
5229 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5230 if (y)
5231 gimplify_and_add (y, &llist[1]);
5232 }
5233 break;
5234 }
5235 if (omp_is_reference (var))
5236 {
5237 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5238 tree new_vard = TREE_OPERAND (new_var, 0);
5239 gcc_assert (DECL_P (new_vard));
5240 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5241 x = TYPE_SIZE_UNIT (type);
5242 if (TREE_CONSTANT (x))
5243 {
5244 x = create_tmp_var_raw (type, get_name (var));
5245 gimple_add_tmp_var (x);
5246 TREE_ADDRESSABLE (x) = 1;
5247 x = build_fold_addr_expr_loc (clause_loc, x);
5248 x = fold_convert_loc (clause_loc,
5249 TREE_TYPE (new_vard), x);
5250 gimplify_assign (new_vard, x, ilist);
5251 }
5252 }
5253 }
5254 if (nx)
5255 gimplify_and_add (nx, ilist);
5256 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5257 && is_simd
5258 && ctx->for_simd_scan_phase)
5259 {
5260 tree orig_v = build_outer_var_ref (var, ctx,
5261 OMP_CLAUSE_LASTPRIVATE);
5262 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5263 orig_v);
5264 gimplify_and_add (x, ilist);
5265 }
5266 /* FALLTHRU */
5267
5268 do_dtor:
5269 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5270 if (x)
5271 gimplify_and_add (x, dlist);
5272 break;
5273
5274 case OMP_CLAUSE_LINEAR:
5275 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5276 goto do_firstprivate;
5277 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5278 x = NULL;
5279 else
5280 x = build_outer_var_ref (var, ctx);
5281 goto do_private;
5282
5283 case OMP_CLAUSE_FIRSTPRIVATE:
5284 if (is_task_ctx (ctx))
5285 {
5286 if ((omp_is_reference (var)
5287 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5288 || is_variable_sized (var))
5289 goto do_dtor;
5290 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5291 ctx))
5292 || use_pointer_for_field (var, NULL))
5293 {
5294 x = build_receiver_ref (var, false, ctx);
5295 SET_DECL_VALUE_EXPR (new_var, x);
5296 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5297 goto do_dtor;
5298 }
5299 }
5300 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5301 && omp_is_reference (var))
5302 {
5303 x = build_outer_var_ref (var, ctx);
5304 gcc_assert (TREE_CODE (x) == MEM_REF
5305 && integer_zerop (TREE_OPERAND (x, 1)));
5306 x = TREE_OPERAND (x, 0);
5307 x = lang_hooks.decls.omp_clause_copy_ctor
5308 (c, unshare_expr (new_var), x);
5309 gimplify_and_add (x, ilist);
5310 goto do_dtor;
5311 }
5312 do_firstprivate:
5313 x = build_outer_var_ref (var, ctx);
5314 if (is_simd)
5315 {
5316 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5317 && gimple_omp_for_combined_into_p (ctx->stmt))
5318 {
5319 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5320 tree stept = TREE_TYPE (t);
5321 tree ct = omp_find_clause (clauses,
5322 OMP_CLAUSE__LOOPTEMP_);
5323 gcc_assert (ct);
5324 tree l = OMP_CLAUSE_DECL (ct);
5325 tree n1 = fd->loop.n1;
5326 tree step = fd->loop.step;
5327 tree itype = TREE_TYPE (l);
5328 if (POINTER_TYPE_P (itype))
5329 itype = signed_type_for (itype);
5330 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5331 if (TYPE_UNSIGNED (itype)
5332 && fd->loop.cond_code == GT_EXPR)
5333 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5334 fold_build1 (NEGATE_EXPR, itype, l),
5335 fold_build1 (NEGATE_EXPR,
5336 itype, step));
5337 else
5338 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5339 t = fold_build2 (MULT_EXPR, stept,
5340 fold_convert (stept, l), t);
5341
5342 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5343 {
5344 if (omp_is_reference (var))
5345 {
5346 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5347 tree new_vard = TREE_OPERAND (new_var, 0);
5348 gcc_assert (DECL_P (new_vard));
5349 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5350 nx = TYPE_SIZE_UNIT (type);
5351 if (TREE_CONSTANT (nx))
5352 {
5353 nx = create_tmp_var_raw (type,
5354 get_name (var));
5355 gimple_add_tmp_var (nx);
5356 TREE_ADDRESSABLE (nx) = 1;
5357 nx = build_fold_addr_expr_loc (clause_loc,
5358 nx);
5359 nx = fold_convert_loc (clause_loc,
5360 TREE_TYPE (new_vard),
5361 nx);
5362 gimplify_assign (new_vard, nx, ilist);
5363 }
5364 }
5365
5366 x = lang_hooks.decls.omp_clause_linear_ctor
5367 (c, new_var, x, t);
5368 gimplify_and_add (x, ilist);
5369 goto do_dtor;
5370 }
5371
5372 if (POINTER_TYPE_P (TREE_TYPE (x)))
5373 x = fold_build2 (POINTER_PLUS_EXPR,
5374 TREE_TYPE (x), x, t);
5375 else
5376 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5377 }
5378
5379 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5380 || TREE_ADDRESSABLE (new_var)
5381 || omp_is_reference (var))
5382 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5383 ivar, lvar))
5384 {
5385 if (omp_is_reference (var))
5386 {
5387 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5388 tree new_vard = TREE_OPERAND (new_var, 0);
5389 gcc_assert (DECL_P (new_vard));
5390 SET_DECL_VALUE_EXPR (new_vard,
5391 build_fold_addr_expr (lvar));
5392 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5393 }
5394 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5395 {
5396 tree iv = create_tmp_var (TREE_TYPE (new_var));
5397 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5398 gimplify_and_add (x, ilist);
5399 gimple_stmt_iterator gsi
5400 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5401 gassign *g
5402 = gimple_build_assign (unshare_expr (lvar), iv);
5403 gsi_insert_before_without_update (&gsi, g,
5404 GSI_SAME_STMT);
5405 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5406 enum tree_code code = PLUS_EXPR;
5407 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5408 code = POINTER_PLUS_EXPR;
5409 g = gimple_build_assign (iv, code, iv, t);
5410 gsi_insert_before_without_update (&gsi, g,
5411 GSI_SAME_STMT);
5412 break;
5413 }
5414 x = lang_hooks.decls.omp_clause_copy_ctor
5415 (c, unshare_expr (ivar), x);
5416 gimplify_and_add (x, &llist[0]);
5417 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5418 if (x)
5419 gimplify_and_add (x, &llist[1]);
5420 break;
5421 }
5422 if (omp_is_reference (var))
5423 {
5424 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5425 tree new_vard = TREE_OPERAND (new_var, 0);
5426 gcc_assert (DECL_P (new_vard));
5427 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5428 nx = TYPE_SIZE_UNIT (type);
5429 if (TREE_CONSTANT (nx))
5430 {
5431 nx = create_tmp_var_raw (type, get_name (var));
5432 gimple_add_tmp_var (nx);
5433 TREE_ADDRESSABLE (nx) = 1;
5434 nx = build_fold_addr_expr_loc (clause_loc, nx);
5435 nx = fold_convert_loc (clause_loc,
5436 TREE_TYPE (new_vard), nx);
5437 gimplify_assign (new_vard, nx, ilist);
5438 }
5439 }
5440 }
5441 x = lang_hooks.decls.omp_clause_copy_ctor
5442 (c, unshare_expr (new_var), x);
5443 gimplify_and_add (x, ilist);
5444 goto do_dtor;
5445
5446 case OMP_CLAUSE__LOOPTEMP_:
5447 case OMP_CLAUSE__REDUCTEMP_:
5448 gcc_assert (is_taskreg_ctx (ctx));
5449 x = build_outer_var_ref (var, ctx);
5450 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5451 gimplify_and_add (x, ilist);
5452 break;
5453
5454 case OMP_CLAUSE_COPYIN:
5455 by_ref = use_pointer_for_field (var, NULL);
5456 x = build_receiver_ref (var, by_ref, ctx);
5457 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5458 append_to_statement_list (x, &copyin_seq);
5459 copyin_by_ref |= by_ref;
5460 break;
5461
5462 case OMP_CLAUSE_REDUCTION:
5463 case OMP_CLAUSE_IN_REDUCTION:
5464 /* OpenACC reductions are initialized using the
5465 GOACC_REDUCTION internal function. */
5466 if (is_gimple_omp_oacc (ctx->stmt))
5467 break;
5468 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5469 {
5470 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5471 gimple *tseq;
5472 tree ptype = TREE_TYPE (placeholder);
5473 if (cond)
5474 {
5475 x = error_mark_node;
5476 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5477 && !task_reduction_needs_orig_p)
5478 x = var;
5479 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5480 {
5481 tree pptype = build_pointer_type (ptype);
5482 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5483 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5484 size_int (task_reduction_cnt_full
5485 + task_reduction_cntorig - 1),
5486 NULL_TREE, NULL_TREE);
5487 else
5488 {
5489 unsigned int idx
5490 = *ctx->task_reduction_map->get (c);
5491 x = task_reduction_read (ilist, tskred_temp,
5492 pptype, 7 + 3 * idx);
5493 }
5494 x = fold_convert (pptype, x);
5495 x = build_simple_mem_ref (x);
5496 }
5497 }
5498 else
5499 {
5500 x = build_outer_var_ref (var, ctx);
5501
5502 if (omp_is_reference (var)
5503 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5504 x = build_fold_addr_expr_loc (clause_loc, x);
5505 }
5506 SET_DECL_VALUE_EXPR (placeholder, x);
5507 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5508 tree new_vard = new_var;
5509 if (omp_is_reference (var))
5510 {
5511 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5512 new_vard = TREE_OPERAND (new_var, 0);
5513 gcc_assert (DECL_P (new_vard));
5514 }
5515 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5516 if (is_simd
5517 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5518 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5519 rvarp = &rvar;
5520 if (is_simd
5521 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5522 ivar, lvar, rvarp,
5523 &rvar2))
5524 {
5525 if (new_vard == new_var)
5526 {
5527 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5528 SET_DECL_VALUE_EXPR (new_var, ivar);
5529 }
5530 else
5531 {
5532 SET_DECL_VALUE_EXPR (new_vard,
5533 build_fold_addr_expr (ivar));
5534 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5535 }
5536 x = lang_hooks.decls.omp_clause_default_ctor
5537 (c, unshare_expr (ivar),
5538 build_outer_var_ref (var, ctx));
5539 if (rvarp && ctx->for_simd_scan_phase)
5540 {
5541 if (x)
5542 gimplify_and_add (x, &llist[0]);
5543 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5544 if (x)
5545 gimplify_and_add (x, &llist[1]);
5546 break;
5547 }
5548 else if (rvarp)
5549 {
5550 if (x)
5551 {
5552 gimplify_and_add (x, &llist[0]);
5553
5554 tree ivar2 = unshare_expr (lvar);
5555 TREE_OPERAND (ivar2, 1) = sctx.idx;
5556 x = lang_hooks.decls.omp_clause_default_ctor
5557 (c, ivar2, build_outer_var_ref (var, ctx));
5558 gimplify_and_add (x, &llist[0]);
5559
5560 if (rvar2)
5561 {
5562 x = lang_hooks.decls.omp_clause_default_ctor
5563 (c, unshare_expr (rvar2),
5564 build_outer_var_ref (var, ctx));
5565 gimplify_and_add (x, &llist[0]);
5566 }
5567
5568 /* For types that need construction, add another
5569 private var which will be default constructed
5570 and optionally initialized with
5571 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5572 loop we want to assign this value instead of
5573 constructing and destructing it in each
5574 iteration. */
5575 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5576 gimple_add_tmp_var (nv);
5577 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5578 ? rvar2
5579 : ivar, 0),
5580 nv);
5581 x = lang_hooks.decls.omp_clause_default_ctor
5582 (c, nv, build_outer_var_ref (var, ctx));
5583 gimplify_and_add (x, ilist);
5584
5585 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5586 {
5587 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5588 x = DECL_VALUE_EXPR (new_vard);
5589 tree vexpr = nv;
5590 if (new_vard != new_var)
5591 vexpr = build_fold_addr_expr (nv);
5592 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5593 lower_omp (&tseq, ctx);
5594 SET_DECL_VALUE_EXPR (new_vard, x);
5595 gimple_seq_add_seq (ilist, tseq);
5596 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5597 }
5598
5599 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5600 if (x)
5601 gimplify_and_add (x, dlist);
5602 }
5603
5604 tree ref = build_outer_var_ref (var, ctx);
5605 x = unshare_expr (ivar);
5606 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5607 ref);
5608 gimplify_and_add (x, &llist[0]);
5609
5610 ref = build_outer_var_ref (var, ctx);
5611 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5612 rvar);
5613 gimplify_and_add (x, &llist[3]);
5614
5615 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5616 if (new_vard == new_var)
5617 SET_DECL_VALUE_EXPR (new_var, lvar);
5618 else
5619 SET_DECL_VALUE_EXPR (new_vard,
5620 build_fold_addr_expr (lvar));
5621
5622 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5623 if (x)
5624 gimplify_and_add (x, &llist[1]);
5625
5626 tree ivar2 = unshare_expr (lvar);
5627 TREE_OPERAND (ivar2, 1) = sctx.idx;
5628 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5629 if (x)
5630 gimplify_and_add (x, &llist[1]);
5631
5632 if (rvar2)
5633 {
5634 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5635 if (x)
5636 gimplify_and_add (x, &llist[1]);
5637 }
5638 break;
5639 }
5640 if (x)
5641 gimplify_and_add (x, &llist[0]);
5642 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5643 {
5644 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5645 lower_omp (&tseq, ctx);
5646 gimple_seq_add_seq (&llist[0], tseq);
5647 }
5648 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5649 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5650 lower_omp (&tseq, ctx);
5651 gimple_seq_add_seq (&llist[1], tseq);
5652 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5653 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5654 if (new_vard == new_var)
5655 SET_DECL_VALUE_EXPR (new_var, lvar);
5656 else
5657 SET_DECL_VALUE_EXPR (new_vard,
5658 build_fold_addr_expr (lvar));
5659 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5660 if (x)
5661 gimplify_and_add (x, &llist[1]);
5662 break;
5663 }
5664 /* If this is a reference to constant size reduction var
5665 with placeholder, we haven't emitted the initializer
5666 for it because it is undesirable if SIMD arrays are used.
5667 But if they aren't used, we need to emit the deferred
5668 initialization now. */
5669 else if (omp_is_reference (var) && is_simd)
5670 handle_simd_reference (clause_loc, new_vard, ilist);
5671
5672 tree lab2 = NULL_TREE;
5673 if (cond)
5674 {
5675 gimple *g;
5676 if (!is_parallel_ctx (ctx))
5677 {
5678 tree condv = create_tmp_var (boolean_type_node);
5679 tree m = build_simple_mem_ref (cond);
5680 g = gimple_build_assign (condv, m);
5681 gimple_seq_add_stmt (ilist, g);
5682 tree lab1
5683 = create_artificial_label (UNKNOWN_LOCATION);
5684 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5685 g = gimple_build_cond (NE_EXPR, condv,
5686 boolean_false_node,
5687 lab2, lab1);
5688 gimple_seq_add_stmt (ilist, g);
5689 gimple_seq_add_stmt (ilist,
5690 gimple_build_label (lab1));
5691 }
5692 g = gimple_build_assign (build_simple_mem_ref (cond),
5693 boolean_true_node);
5694 gimple_seq_add_stmt (ilist, g);
5695 }
5696 x = lang_hooks.decls.omp_clause_default_ctor
5697 (c, unshare_expr (new_var),
5698 cond ? NULL_TREE
5699 : build_outer_var_ref (var, ctx));
5700 if (x)
5701 gimplify_and_add (x, ilist);
5702
5703 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5704 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5705 {
5706 if (ctx->for_simd_scan_phase)
5707 goto do_dtor;
5708 if (x || (!is_simd
5709 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5710 {
5711 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5712 gimple_add_tmp_var (nv);
5713 ctx->cb.decl_map->put (new_vard, nv);
5714 x = lang_hooks.decls.omp_clause_default_ctor
5715 (c, nv, build_outer_var_ref (var, ctx));
5716 if (x)
5717 gimplify_and_add (x, ilist);
5718 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5719 {
5720 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5721 tree vexpr = nv;
5722 if (new_vard != new_var)
5723 vexpr = build_fold_addr_expr (nv);
5724 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5725 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5726 lower_omp (&tseq, ctx);
5727 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5728 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5729 gimple_seq_add_seq (ilist, tseq);
5730 }
5731 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5732 if (is_simd && ctx->scan_exclusive)
5733 {
5734 tree nv2
5735 = create_tmp_var_raw (TREE_TYPE (new_var));
5736 gimple_add_tmp_var (nv2);
5737 ctx->cb.decl_map->put (nv, nv2);
5738 x = lang_hooks.decls.omp_clause_default_ctor
5739 (c, nv2, build_outer_var_ref (var, ctx));
5740 gimplify_and_add (x, ilist);
5741 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5742 if (x)
5743 gimplify_and_add (x, dlist);
5744 }
5745 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5746 if (x)
5747 gimplify_and_add (x, dlist);
5748 }
5749 else if (is_simd
5750 && ctx->scan_exclusive
5751 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5752 {
5753 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5754 gimple_add_tmp_var (nv2);
5755 ctx->cb.decl_map->put (new_vard, nv2);
5756 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5757 if (x)
5758 gimplify_and_add (x, dlist);
5759 }
5760 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5761 goto do_dtor;
5762 }
5763
5764 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5765 {
5766 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5767 lower_omp (&tseq, ctx);
5768 gimple_seq_add_seq (ilist, tseq);
5769 }
5770 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5771 if (is_simd)
5772 {
5773 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5774 lower_omp (&tseq, ctx);
5775 gimple_seq_add_seq (dlist, tseq);
5776 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5777 }
5778 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5779 if (cond)
5780 {
5781 if (lab2)
5782 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5783 break;
5784 }
5785 goto do_dtor;
5786 }
5787 else
5788 {
5789 x = omp_reduction_init (c, TREE_TYPE (new_var));
5790 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5791 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5792
5793 if (cond)
5794 {
5795 gimple *g;
5796 tree lab2 = NULL_TREE;
5797 /* GOMP_taskgroup_reduction_register memsets the whole
5798 array to zero. If the initializer is zero, we don't
5799 need to initialize it again, just mark it as ever
5800 used unconditionally, i.e. cond = true. */
5801 if (initializer_zerop (x))
5802 {
5803 g = gimple_build_assign (build_simple_mem_ref (cond),
5804 boolean_true_node);
5805 gimple_seq_add_stmt (ilist, g);
5806 break;
5807 }
5808
5809 /* Otherwise, emit
5810 if (!cond) { cond = true; new_var = x; } */
5811 if (!is_parallel_ctx (ctx))
5812 {
5813 tree condv = create_tmp_var (boolean_type_node);
5814 tree m = build_simple_mem_ref (cond);
5815 g = gimple_build_assign (condv, m);
5816 gimple_seq_add_stmt (ilist, g);
5817 tree lab1
5818 = create_artificial_label (UNKNOWN_LOCATION);
5819 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5820 g = gimple_build_cond (NE_EXPR, condv,
5821 boolean_false_node,
5822 lab2, lab1);
5823 gimple_seq_add_stmt (ilist, g);
5824 gimple_seq_add_stmt (ilist,
5825 gimple_build_label (lab1));
5826 }
5827 g = gimple_build_assign (build_simple_mem_ref (cond),
5828 boolean_true_node);
5829 gimple_seq_add_stmt (ilist, g);
5830 gimplify_assign (new_var, x, ilist);
5831 if (lab2)
5832 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5833 break;
5834 }
5835
5836 /* reduction(-:var) sums up the partial results, so it
5837 acts identically to reduction(+:var). */
5838 if (code == MINUS_EXPR)
5839 code = PLUS_EXPR;
5840
5841 tree new_vard = new_var;
5842 if (is_simd && omp_is_reference (var))
5843 {
5844 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5845 new_vard = TREE_OPERAND (new_var, 0);
5846 gcc_assert (DECL_P (new_vard));
5847 }
5848 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5849 if (is_simd
5850 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5851 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5852 rvarp = &rvar;
5853 if (is_simd
5854 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5855 ivar, lvar, rvarp,
5856 &rvar2))
5857 {
5858 if (new_vard != new_var)
5859 {
5860 SET_DECL_VALUE_EXPR (new_vard,
5861 build_fold_addr_expr (lvar));
5862 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5863 }
5864
5865 tree ref = build_outer_var_ref (var, ctx);
5866
5867 if (rvarp)
5868 {
5869 if (ctx->for_simd_scan_phase)
5870 break;
5871 gimplify_assign (ivar, ref, &llist[0]);
5872 ref = build_outer_var_ref (var, ctx);
5873 gimplify_assign (ref, rvar, &llist[3]);
5874 break;
5875 }
5876
5877 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5878
5879 if (sctx.is_simt)
5880 {
5881 if (!simt_lane)
5882 simt_lane = create_tmp_var (unsigned_type_node);
5883 x = build_call_expr_internal_loc
5884 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5885 TREE_TYPE (ivar), 2, ivar, simt_lane);
5886 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5887 gimplify_assign (ivar, x, &llist[2]);
5888 }
5889 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5890 ref = build_outer_var_ref (var, ctx);
5891 gimplify_assign (ref, x, &llist[1]);
5892
5893 }
5894 else
5895 {
5896 if (omp_is_reference (var) && is_simd)
5897 handle_simd_reference (clause_loc, new_vard, ilist);
5898 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5899 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5900 break;
5901 gimplify_assign (new_var, x, ilist);
5902 if (is_simd)
5903 {
5904 tree ref = build_outer_var_ref (var, ctx);
5905
5906 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5907 ref = build_outer_var_ref (var, ctx);
5908 gimplify_assign (ref, x, dlist);
5909 }
5910 }
5911 }
5912 break;
5913
5914 default:
5915 gcc_unreachable ();
5916 }
5917 }
5918 }
5919 if (tskred_avar)
5920 {
5921 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5922 TREE_THIS_VOLATILE (clobber) = 1;
5923 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5924 }
5925
5926 if (known_eq (sctx.max_vf, 1U))
5927 {
5928 sctx.is_simt = false;
5929 if (ctx->lastprivate_conditional_map)
5930 {
5931 if (gimple_omp_for_combined_into_p (ctx->stmt))
5932 {
5933 /* Signal to lower_omp_1 that it should use parent context. */
5934 ctx->combined_into_simd_safelen1 = true;
5935 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5936 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5937 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5938 {
5939 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5940 omp_context *outer = ctx->outer;
5941 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
5942 outer = outer->outer;
5943 tree *v = ctx->lastprivate_conditional_map->get (o);
5944 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
5945 tree *pv = outer->lastprivate_conditional_map->get (po);
5946 *v = *pv;
5947 }
5948 }
5949 else
5950 {
5951 /* When not vectorized, treat lastprivate(conditional:) like
5952 normal lastprivate, as there will be just one simd lane
5953 writing the privatized variable. */
5954 delete ctx->lastprivate_conditional_map;
5955 ctx->lastprivate_conditional_map = NULL;
5956 }
5957 }
5958 }
5959
5960 if (nonconst_simd_if)
5961 {
5962 if (sctx.lane == NULL_TREE)
5963 {
5964 sctx.idx = create_tmp_var (unsigned_type_node);
5965 sctx.lane = create_tmp_var (unsigned_type_node);
5966 }
5967 /* FIXME: For now. */
5968 sctx.is_simt = false;
5969 }
5970
5971 if (sctx.lane || sctx.is_simt)
5972 {
5973 uid = create_tmp_var (ptr_type_node, "simduid");
5974 /* Don't want uninit warnings on simduid, it is always uninitialized,
5975 but we use it not for the value, but for the DECL_UID only. */
5976 TREE_NO_WARNING (uid) = 1;
5977 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5978 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5979 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5980 gimple_omp_for_set_clauses (ctx->stmt, c);
5981 }
5982 /* Emit calls denoting privatized variables and initializing a pointer to
5983 structure that holds private variables as fields after ompdevlow pass. */
5984 if (sctx.is_simt)
5985 {
5986 sctx.simt_eargs[0] = uid;
5987 gimple *g
5988 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5989 gimple_call_set_lhs (g, uid);
5990 gimple_seq_add_stmt (ilist, g);
5991 sctx.simt_eargs.release ();
5992
5993 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5994 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5995 gimple_call_set_lhs (g, simtrec);
5996 gimple_seq_add_stmt (ilist, g);
5997 }
5998 if (sctx.lane)
5999 {
6000 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6001 2 + (nonconst_simd_if != NULL),
6002 uid, integer_zero_node,
6003 nonconst_simd_if);
6004 gimple_call_set_lhs (g, sctx.lane);
6005 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6006 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6007 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6008 build_int_cst (unsigned_type_node, 0));
6009 gimple_seq_add_stmt (ilist, g);
6010 if (sctx.lastlane)
6011 {
6012 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6013 2, uid, sctx.lane);
6014 gimple_call_set_lhs (g, sctx.lastlane);
6015 gimple_seq_add_stmt (dlist, g);
6016 gimple_seq_add_seq (dlist, llist[3]);
6017 }
6018 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6019 if (llist[2])
6020 {
6021 tree simt_vf = create_tmp_var (unsigned_type_node);
6022 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6023 gimple_call_set_lhs (g, simt_vf);
6024 gimple_seq_add_stmt (dlist, g);
6025
6026 tree t = build_int_cst (unsigned_type_node, 1);
6027 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6028 gimple_seq_add_stmt (dlist, g);
6029
6030 t = build_int_cst (unsigned_type_node, 0);
6031 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6032 gimple_seq_add_stmt (dlist, g);
6033
6034 tree body = create_artificial_label (UNKNOWN_LOCATION);
6035 tree header = create_artificial_label (UNKNOWN_LOCATION);
6036 tree end = create_artificial_label (UNKNOWN_LOCATION);
6037 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6038 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6039
6040 gimple_seq_add_seq (dlist, llist[2]);
6041
6042 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6043 gimple_seq_add_stmt (dlist, g);
6044
6045 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6046 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6047 gimple_seq_add_stmt (dlist, g);
6048
6049 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6050 }
6051 for (int i = 0; i < 2; i++)
6052 if (llist[i])
6053 {
6054 tree vf = create_tmp_var (unsigned_type_node);
6055 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6056 gimple_call_set_lhs (g, vf);
6057 gimple_seq *seq = i == 0 ? ilist : dlist;
6058 gimple_seq_add_stmt (seq, g);
6059 tree t = build_int_cst (unsigned_type_node, 0);
6060 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6061 gimple_seq_add_stmt (seq, g);
6062 tree body = create_artificial_label (UNKNOWN_LOCATION);
6063 tree header = create_artificial_label (UNKNOWN_LOCATION);
6064 tree end = create_artificial_label (UNKNOWN_LOCATION);
6065 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6066 gimple_seq_add_stmt (seq, gimple_build_label (body));
6067 gimple_seq_add_seq (seq, llist[i]);
6068 t = build_int_cst (unsigned_type_node, 1);
6069 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6070 gimple_seq_add_stmt (seq, g);
6071 gimple_seq_add_stmt (seq, gimple_build_label (header));
6072 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6073 gimple_seq_add_stmt (seq, g);
6074 gimple_seq_add_stmt (seq, gimple_build_label (end));
6075 }
6076 }
6077 if (sctx.is_simt)
6078 {
6079 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6080 gimple *g
6081 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6082 gimple_seq_add_stmt (dlist, g);
6083 }
6084
6085 /* The copyin sequence is not to be executed by the main thread, since
6086 that would result in self-copies. Perhaps not visible to scalars,
6087 but it certainly is to C++ operator=. */
6088 if (copyin_seq)
6089 {
6090 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6091 0);
6092 x = build2 (NE_EXPR, boolean_type_node, x,
6093 build_int_cst (TREE_TYPE (x), 0));
6094 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6095 gimplify_and_add (x, ilist);
6096 }
6097
6098 /* If any copyin variable is passed by reference, we must ensure the
6099 master thread doesn't modify it before it is copied over in all
6100 threads. Similarly for variables in both firstprivate and
6101 lastprivate clauses we need to ensure the lastprivate copying
6102 happens after firstprivate copying in all threads. And similarly
6103 for UDRs if initializer expression refers to omp_orig. */
6104 if (copyin_by_ref || lastprivate_firstprivate
6105 || (reduction_omp_orig_ref
6106 && !ctx->scan_inclusive
6107 && !ctx->scan_exclusive))
6108 {
6109 /* Don't add any barrier for #pragma omp simd or
6110 #pragma omp distribute. */
6111 if (!is_task_ctx (ctx)
6112 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6113 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6114 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6115 }
6116
6117 /* If max_vf is non-zero, then we can use only a vectorization factor
6118 up to the max_vf we chose. So stick it into the safelen clause. */
6119 if (maybe_ne (sctx.max_vf, 0U))
6120 {
6121 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6122 OMP_CLAUSE_SAFELEN);
6123 poly_uint64 safe_len;
6124 if (c == NULL_TREE
6125 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6126 && maybe_gt (safe_len, sctx.max_vf)))
6127 {
6128 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6129 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6130 sctx.max_vf);
6131 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6132 gimple_omp_for_set_clauses (ctx->stmt, c);
6133 }
6134 }
6135 }
6136
6137 /* Create temporary variables for lastprivate(conditional:) implementation
6138 in context CTX with CLAUSES. */
6139
6140 static void
6141 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6142 {
6143 tree iter_type = NULL_TREE;
6144 tree cond_ptr = NULL_TREE;
6145 tree iter_var = NULL_TREE;
6146 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6147 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6148 tree next = *clauses;
6149 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6151 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6152 {
6153 if (is_simd)
6154 {
6155 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6156 gcc_assert (cc);
6157 if (iter_type == NULL_TREE)
6158 {
6159 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6160 iter_var = create_tmp_var_raw (iter_type);
6161 DECL_CONTEXT (iter_var) = current_function_decl;
6162 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6163 DECL_CHAIN (iter_var) = ctx->block_vars;
6164 ctx->block_vars = iter_var;
6165 tree c3
6166 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6167 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6168 OMP_CLAUSE_DECL (c3) = iter_var;
6169 OMP_CLAUSE_CHAIN (c3) = *clauses;
6170 *clauses = c3;
6171 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6172 }
6173 next = OMP_CLAUSE_CHAIN (cc);
6174 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6175 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6176 ctx->lastprivate_conditional_map->put (o, v);
6177 continue;
6178 }
6179 if (iter_type == NULL)
6180 {
6181 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6182 {
6183 struct omp_for_data fd;
6184 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6185 NULL);
6186 iter_type = unsigned_type_for (fd.iter_type);
6187 }
6188 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6189 iter_type = unsigned_type_node;
6190 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6191 if (c2)
6192 {
6193 cond_ptr
6194 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6195 OMP_CLAUSE_DECL (c2) = cond_ptr;
6196 }
6197 else
6198 {
6199 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6200 DECL_CONTEXT (cond_ptr) = current_function_decl;
6201 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6202 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6203 ctx->block_vars = cond_ptr;
6204 c2 = build_omp_clause (UNKNOWN_LOCATION,
6205 OMP_CLAUSE__CONDTEMP_);
6206 OMP_CLAUSE_DECL (c2) = cond_ptr;
6207 OMP_CLAUSE_CHAIN (c2) = *clauses;
6208 *clauses = c2;
6209 }
6210 iter_var = create_tmp_var_raw (iter_type);
6211 DECL_CONTEXT (iter_var) = current_function_decl;
6212 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6213 DECL_CHAIN (iter_var) = ctx->block_vars;
6214 ctx->block_vars = iter_var;
6215 tree c3
6216 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6217 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6218 OMP_CLAUSE_DECL (c3) = iter_var;
6219 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6220 OMP_CLAUSE_CHAIN (c2) = c3;
6221 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6222 }
6223 tree v = create_tmp_var_raw (iter_type);
6224 DECL_CONTEXT (v) = current_function_decl;
6225 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6226 DECL_CHAIN (v) = ctx->block_vars;
6227 ctx->block_vars = v;
6228 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6229 ctx->lastprivate_conditional_map->put (o, v);
6230 }
6231 }
6232
6233
6234 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6235 both parallel and workshare constructs. PREDICATE may be NULL if it's
6236 always true. BODY_P is the sequence to insert early initialization
6237 if needed, STMT_LIST is where the non-conditional lastprivate handling
6238 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6239 section. */
6240
6241 static void
6242 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6243 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6244 omp_context *ctx)
6245 {
6246 tree x, c, label = NULL, orig_clauses = clauses;
6247 bool par_clauses = false;
6248 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6249 unsigned HOST_WIDE_INT conditional_off = 0;
6250 gimple_seq post_stmt_list = NULL;
6251
6252 /* Early exit if there are no lastprivate or linear clauses. */
6253 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6254 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6255 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6256 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6257 break;
6258 if (clauses == NULL)
6259 {
6260 /* If this was a workshare clause, see if it had been combined
6261 with its parallel. In that case, look for the clauses on the
6262 parallel statement itself. */
6263 if (is_parallel_ctx (ctx))
6264 return;
6265
6266 ctx = ctx->outer;
6267 if (ctx == NULL || !is_parallel_ctx (ctx))
6268 return;
6269
6270 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6271 OMP_CLAUSE_LASTPRIVATE);
6272 if (clauses == NULL)
6273 return;
6274 par_clauses = true;
6275 }
6276
6277 bool maybe_simt = false;
6278 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6280 {
6281 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6282 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6283 if (simduid)
6284 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6285 }
6286
6287 if (predicate)
6288 {
6289 gcond *stmt;
6290 tree label_true, arm1, arm2;
6291 enum tree_code pred_code = TREE_CODE (predicate);
6292
6293 label = create_artificial_label (UNKNOWN_LOCATION);
6294 label_true = create_artificial_label (UNKNOWN_LOCATION);
6295 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6296 {
6297 arm1 = TREE_OPERAND (predicate, 0);
6298 arm2 = TREE_OPERAND (predicate, 1);
6299 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6300 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6301 }
6302 else
6303 {
6304 arm1 = predicate;
6305 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6306 arm2 = boolean_false_node;
6307 pred_code = NE_EXPR;
6308 }
6309 if (maybe_simt)
6310 {
6311 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6312 c = fold_convert (integer_type_node, c);
6313 simtcond = create_tmp_var (integer_type_node);
6314 gimplify_assign (simtcond, c, stmt_list);
6315 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6316 1, simtcond);
6317 c = create_tmp_var (integer_type_node);
6318 gimple_call_set_lhs (g, c);
6319 gimple_seq_add_stmt (stmt_list, g);
6320 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6321 label_true, label);
6322 }
6323 else
6324 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6325 gimple_seq_add_stmt (stmt_list, stmt);
6326 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6327 }
6328
6329 tree cond_ptr = NULL_TREE;
6330 for (c = clauses; c ;)
6331 {
6332 tree var, new_var;
6333 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6334 gimple_seq *this_stmt_list = stmt_list;
6335 tree lab2 = NULL_TREE;
6336
6337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6338 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6339 && ctx->lastprivate_conditional_map
6340 && !ctx->combined_into_simd_safelen1)
6341 {
6342 gcc_assert (body_p);
6343 if (simduid)
6344 goto next;
6345 if (cond_ptr == NULL_TREE)
6346 {
6347 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6348 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6349 }
6350 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6351 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6352 tree v = *ctx->lastprivate_conditional_map->get (o);
6353 gimplify_assign (v, build_zero_cst (type), body_p);
6354 this_stmt_list = cstmt_list;
6355 tree mem;
6356 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6357 {
6358 mem = build2 (MEM_REF, type, cond_ptr,
6359 build_int_cst (TREE_TYPE (cond_ptr),
6360 conditional_off));
6361 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6362 }
6363 else
6364 mem = build4 (ARRAY_REF, type, cond_ptr,
6365 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6366 tree mem2 = copy_node (mem);
6367 gimple_seq seq = NULL;
6368 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6369 gimple_seq_add_seq (this_stmt_list, seq);
6370 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6371 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6372 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6373 gimple_seq_add_stmt (this_stmt_list, g);
6374 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6375 gimplify_assign (mem2, v, this_stmt_list);
6376 }
6377 else if (predicate
6378 && ctx->combined_into_simd_safelen1
6379 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6380 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6381 && ctx->lastprivate_conditional_map)
6382 this_stmt_list = &post_stmt_list;
6383
6384 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6385 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6386 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6387 {
6388 var = OMP_CLAUSE_DECL (c);
6389 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6390 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6391 && is_taskloop_ctx (ctx))
6392 {
6393 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6394 new_var = lookup_decl (var, ctx->outer);
6395 }
6396 else
6397 {
6398 new_var = lookup_decl (var, ctx);
6399 /* Avoid uninitialized warnings for lastprivate and
6400 for linear iterators. */
6401 if (predicate
6402 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6403 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6404 TREE_NO_WARNING (new_var) = 1;
6405 }
6406
6407 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6408 {
6409 tree val = DECL_VALUE_EXPR (new_var);
6410 if (TREE_CODE (val) == ARRAY_REF
6411 && VAR_P (TREE_OPERAND (val, 0))
6412 && lookup_attribute ("omp simd array",
6413 DECL_ATTRIBUTES (TREE_OPERAND (val,
6414 0))))
6415 {
6416 if (lastlane == NULL)
6417 {
6418 lastlane = create_tmp_var (unsigned_type_node);
6419 gcall *g
6420 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6421 2, simduid,
6422 TREE_OPERAND (val, 1));
6423 gimple_call_set_lhs (g, lastlane);
6424 gimple_seq_add_stmt (this_stmt_list, g);
6425 }
6426 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6427 TREE_OPERAND (val, 0), lastlane,
6428 NULL_TREE, NULL_TREE);
6429 TREE_THIS_NOTRAP (new_var) = 1;
6430 }
6431 }
6432 else if (maybe_simt)
6433 {
6434 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6435 ? DECL_VALUE_EXPR (new_var)
6436 : new_var);
6437 if (simtlast == NULL)
6438 {
6439 simtlast = create_tmp_var (unsigned_type_node);
6440 gcall *g = gimple_build_call_internal
6441 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6442 gimple_call_set_lhs (g, simtlast);
6443 gimple_seq_add_stmt (this_stmt_list, g);
6444 }
6445 x = build_call_expr_internal_loc
6446 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6447 TREE_TYPE (val), 2, val, simtlast);
6448 new_var = unshare_expr (new_var);
6449 gimplify_assign (new_var, x, this_stmt_list);
6450 new_var = unshare_expr (new_var);
6451 }
6452
6453 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6454 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6455 {
6456 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6457 gimple_seq_add_seq (this_stmt_list,
6458 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6459 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6460 }
6461 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6462 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6463 {
6464 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6465 gimple_seq_add_seq (this_stmt_list,
6466 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6467 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6468 }
6469
6470 x = NULL_TREE;
6471 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6472 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
6473 {
6474 gcc_checking_assert (is_taskloop_ctx (ctx));
6475 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6476 ctx->outer->outer);
6477 if (is_global_var (ovar))
6478 x = ovar;
6479 }
6480 if (!x)
6481 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6482 if (omp_is_reference (var))
6483 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6484 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6485 gimplify_and_add (x, this_stmt_list);
6486
6487 if (lab2)
6488 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6489 }
6490
6491 next:
6492 c = OMP_CLAUSE_CHAIN (c);
6493 if (c == NULL && !par_clauses)
6494 {
6495 /* If this was a workshare clause, see if it had been combined
6496 with its parallel. In that case, continue looking for the
6497 clauses also on the parallel statement itself. */
6498 if (is_parallel_ctx (ctx))
6499 break;
6500
6501 ctx = ctx->outer;
6502 if (ctx == NULL || !is_parallel_ctx (ctx))
6503 break;
6504
6505 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6506 OMP_CLAUSE_LASTPRIVATE);
6507 par_clauses = true;
6508 }
6509 }
6510
6511 if (label)
6512 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6513 gimple_seq_add_seq (stmt_list, post_stmt_list);
6514 }
6515
6516 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6517 (which might be a placeholder). INNER is true if this is an inner
6518 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6519 join markers. Generate the before-loop forking sequence in
6520 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6521 general form of these sequences is
6522
6523 GOACC_REDUCTION_SETUP
6524 GOACC_FORK
6525 GOACC_REDUCTION_INIT
6526 ...
6527 GOACC_REDUCTION_FINI
6528 GOACC_JOIN
6529 GOACC_REDUCTION_TEARDOWN. */
6530
6531 static void
6532 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6533 gcall *fork, gcall *join, gimple_seq *fork_seq,
6534 gimple_seq *join_seq, omp_context *ctx)
6535 {
6536 gimple_seq before_fork = NULL;
6537 gimple_seq after_fork = NULL;
6538 gimple_seq before_join = NULL;
6539 gimple_seq after_join = NULL;
6540 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6541 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6542 unsigned offset = 0;
6543
6544 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6546 {
6547 tree orig = OMP_CLAUSE_DECL (c);
6548 tree var = maybe_lookup_decl (orig, ctx);
6549 tree ref_to_res = NULL_TREE;
6550 tree incoming, outgoing, v1, v2, v3;
6551 bool is_private = false;
6552
6553 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6554 if (rcode == MINUS_EXPR)
6555 rcode = PLUS_EXPR;
6556 else if (rcode == TRUTH_ANDIF_EXPR)
6557 rcode = BIT_AND_EXPR;
6558 else if (rcode == TRUTH_ORIF_EXPR)
6559 rcode = BIT_IOR_EXPR;
6560 tree op = build_int_cst (unsigned_type_node, rcode);
6561
6562 if (!var)
6563 var = orig;
6564
6565 incoming = outgoing = var;
6566
6567 if (!inner)
6568 {
6569 /* See if an outer construct also reduces this variable. */
6570 omp_context *outer = ctx;
6571
6572 while (omp_context *probe = outer->outer)
6573 {
6574 enum gimple_code type = gimple_code (probe->stmt);
6575 tree cls;
6576
6577 switch (type)
6578 {
6579 case GIMPLE_OMP_FOR:
6580 cls = gimple_omp_for_clauses (probe->stmt);
6581 break;
6582
6583 case GIMPLE_OMP_TARGET:
6584 if (gimple_omp_target_kind (probe->stmt)
6585 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6586 goto do_lookup;
6587
6588 cls = gimple_omp_target_clauses (probe->stmt);
6589 break;
6590
6591 default:
6592 goto do_lookup;
6593 }
6594
6595 outer = probe;
6596 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6597 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6598 && orig == OMP_CLAUSE_DECL (cls))
6599 {
6600 incoming = outgoing = lookup_decl (orig, probe);
6601 goto has_outer_reduction;
6602 }
6603 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6604 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6605 && orig == OMP_CLAUSE_DECL (cls))
6606 {
6607 is_private = true;
6608 goto do_lookup;
6609 }
6610 }
6611
6612 do_lookup:
6613 /* This is the outermost construct with this reduction,
6614 see if there's a mapping for it. */
6615 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6616 && maybe_lookup_field (orig, outer) && !is_private)
6617 {
6618 ref_to_res = build_receiver_ref (orig, false, outer);
6619 if (omp_is_reference (orig))
6620 ref_to_res = build_simple_mem_ref (ref_to_res);
6621
6622 tree type = TREE_TYPE (var);
6623 if (POINTER_TYPE_P (type))
6624 type = TREE_TYPE (type);
6625
6626 outgoing = var;
6627 incoming = omp_reduction_init_op (loc, rcode, type);
6628 }
6629 else
6630 {
6631 /* Try to look at enclosing contexts for reduction var,
6632 use original if no mapping found. */
6633 tree t = NULL_TREE;
6634 omp_context *c = ctx->outer;
6635 while (c && !t)
6636 {
6637 t = maybe_lookup_decl (orig, c);
6638 c = c->outer;
6639 }
6640 incoming = outgoing = (t ? t : orig);
6641 }
6642
6643 has_outer_reduction:;
6644 }
6645
6646 if (!ref_to_res)
6647 ref_to_res = integer_zero_node;
6648
6649 if (omp_is_reference (orig))
6650 {
6651 tree type = TREE_TYPE (var);
6652 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6653
6654 if (!inner)
6655 {
6656 tree x = create_tmp_var (TREE_TYPE (type), id);
6657 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6658 }
6659
6660 v1 = create_tmp_var (type, id);
6661 v2 = create_tmp_var (type, id);
6662 v3 = create_tmp_var (type, id);
6663
6664 gimplify_assign (v1, var, fork_seq);
6665 gimplify_assign (v2, var, fork_seq);
6666 gimplify_assign (v3, var, fork_seq);
6667
6668 var = build_simple_mem_ref (var);
6669 v1 = build_simple_mem_ref (v1);
6670 v2 = build_simple_mem_ref (v2);
6671 v3 = build_simple_mem_ref (v3);
6672 outgoing = build_simple_mem_ref (outgoing);
6673
6674 if (!TREE_CONSTANT (incoming))
6675 incoming = build_simple_mem_ref (incoming);
6676 }
6677 else
6678 v1 = v2 = v3 = var;
6679
6680 /* Determine position in reduction buffer, which may be used
6681 by target. The parser has ensured that this is not a
6682 variable-sized type. */
6683 fixed_size_mode mode
6684 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6685 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6686 offset = (offset + align - 1) & ~(align - 1);
6687 tree off = build_int_cst (sizetype, offset);
6688 offset += GET_MODE_SIZE (mode);
6689
6690 if (!init_code)
6691 {
6692 init_code = build_int_cst (integer_type_node,
6693 IFN_GOACC_REDUCTION_INIT);
6694 fini_code = build_int_cst (integer_type_node,
6695 IFN_GOACC_REDUCTION_FINI);
6696 setup_code = build_int_cst (integer_type_node,
6697 IFN_GOACC_REDUCTION_SETUP);
6698 teardown_code = build_int_cst (integer_type_node,
6699 IFN_GOACC_REDUCTION_TEARDOWN);
6700 }
6701
6702 tree setup_call
6703 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6704 TREE_TYPE (var), 6, setup_code,
6705 unshare_expr (ref_to_res),
6706 incoming, level, op, off);
6707 tree init_call
6708 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6709 TREE_TYPE (var), 6, init_code,
6710 unshare_expr (ref_to_res),
6711 v1, level, op, off);
6712 tree fini_call
6713 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6714 TREE_TYPE (var), 6, fini_code,
6715 unshare_expr (ref_to_res),
6716 v2, level, op, off);
6717 tree teardown_call
6718 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6719 TREE_TYPE (var), 6, teardown_code,
6720 ref_to_res, v3, level, op, off);
6721
6722 gimplify_assign (v1, setup_call, &before_fork);
6723 gimplify_assign (v2, init_call, &after_fork);
6724 gimplify_assign (v3, fini_call, &before_join);
6725 gimplify_assign (outgoing, teardown_call, &after_join);
6726 }
6727
6728 /* Now stitch things together. */
6729 gimple_seq_add_seq (fork_seq, before_fork);
6730 if (fork)
6731 gimple_seq_add_stmt (fork_seq, fork);
6732 gimple_seq_add_seq (fork_seq, after_fork);
6733
6734 gimple_seq_add_seq (join_seq, before_join);
6735 if (join)
6736 gimple_seq_add_stmt (join_seq, join);
6737 gimple_seq_add_seq (join_seq, after_join);
6738 }
6739
6740 /* Generate code to implement the REDUCTION clauses, append it
6741 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6742 that should be emitted also inside of the critical section,
6743 in that case clear *CLIST afterwards, otherwise leave it as is
6744 and let the caller emit it itself. */
6745
6746 static void
6747 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6748 gimple_seq *clist, omp_context *ctx)
6749 {
6750 gimple_seq sub_seq = NULL;
6751 gimple *stmt;
6752 tree x, c;
6753 int count = 0;
6754
6755 /* OpenACC loop reductions are handled elsewhere. */
6756 if (is_gimple_omp_oacc (ctx->stmt))
6757 return;
6758
6759 /* SIMD reductions are handled in lower_rec_input_clauses. */
6760 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6761 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6762 return;
6763
6764 /* inscan reductions are handled elsewhere. */
6765 if (ctx->scan_inclusive || ctx->scan_exclusive)
6766 return;
6767
6768 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6769 update in that case, otherwise use a lock. */
6770 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6771 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6772 && !OMP_CLAUSE_REDUCTION_TASK (c))
6773 {
6774 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6775 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6776 {
6777 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6778 count = -1;
6779 break;
6780 }
6781 count++;
6782 }
6783
6784 if (count == 0)
6785 return;
6786
6787 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6788 {
6789 tree var, ref, new_var, orig_var;
6790 enum tree_code code;
6791 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6792
6793 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6794 || OMP_CLAUSE_REDUCTION_TASK (c))
6795 continue;
6796
6797 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6798 orig_var = var = OMP_CLAUSE_DECL (c);
6799 if (TREE_CODE (var) == MEM_REF)
6800 {
6801 var = TREE_OPERAND (var, 0);
6802 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6803 var = TREE_OPERAND (var, 0);
6804 if (TREE_CODE (var) == ADDR_EXPR)
6805 var = TREE_OPERAND (var, 0);
6806 else
6807 {
6808 /* If this is a pointer or referenced based array
6809 section, the var could be private in the outer
6810 context e.g. on orphaned loop construct. Pretend this
6811 is private variable's outer reference. */
6812 ccode = OMP_CLAUSE_PRIVATE;
6813 if (TREE_CODE (var) == INDIRECT_REF)
6814 var = TREE_OPERAND (var, 0);
6815 }
6816 orig_var = var;
6817 if (is_variable_sized (var))
6818 {
6819 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6820 var = DECL_VALUE_EXPR (var);
6821 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6822 var = TREE_OPERAND (var, 0);
6823 gcc_assert (DECL_P (var));
6824 }
6825 }
6826 new_var = lookup_decl (var, ctx);
6827 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6828 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6829 ref = build_outer_var_ref (var, ctx, ccode);
6830 code = OMP_CLAUSE_REDUCTION_CODE (c);
6831
6832 /* reduction(-:var) sums up the partial results, so it acts
6833 identically to reduction(+:var). */
6834 if (code == MINUS_EXPR)
6835 code = PLUS_EXPR;
6836
6837 if (count == 1)
6838 {
6839 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6840
6841 addr = save_expr (addr);
6842 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6843 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6844 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6845 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6846 gimplify_and_add (x, stmt_seqp);
6847 return;
6848 }
6849 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6850 {
6851 tree d = OMP_CLAUSE_DECL (c);
6852 tree type = TREE_TYPE (d);
6853 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6854 tree i = create_tmp_var (TREE_TYPE (v));
6855 tree ptype = build_pointer_type (TREE_TYPE (type));
6856 tree bias = TREE_OPERAND (d, 1);
6857 d = TREE_OPERAND (d, 0);
6858 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6859 {
6860 tree b = TREE_OPERAND (d, 1);
6861 b = maybe_lookup_decl (b, ctx);
6862 if (b == NULL)
6863 {
6864 b = TREE_OPERAND (d, 1);
6865 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6866 }
6867 if (integer_zerop (bias))
6868 bias = b;
6869 else
6870 {
6871 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6872 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6873 TREE_TYPE (b), b, bias);
6874 }
6875 d = TREE_OPERAND (d, 0);
6876 }
6877 /* For ref build_outer_var_ref already performs this, so
6878 only new_var needs a dereference. */
6879 if (TREE_CODE (d) == INDIRECT_REF)
6880 {
6881 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6882 gcc_assert (omp_is_reference (var) && var == orig_var);
6883 }
6884 else if (TREE_CODE (d) == ADDR_EXPR)
6885 {
6886 if (orig_var == var)
6887 {
6888 new_var = build_fold_addr_expr (new_var);
6889 ref = build_fold_addr_expr (ref);
6890 }
6891 }
6892 else
6893 {
6894 gcc_assert (orig_var == var);
6895 if (omp_is_reference (var))
6896 ref = build_fold_addr_expr (ref);
6897 }
6898 if (DECL_P (v))
6899 {
6900 tree t = maybe_lookup_decl (v, ctx);
6901 if (t)
6902 v = t;
6903 else
6904 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6905 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6906 }
6907 if (!integer_zerop (bias))
6908 {
6909 bias = fold_convert_loc (clause_loc, sizetype, bias);
6910 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6911 TREE_TYPE (new_var), new_var,
6912 unshare_expr (bias));
6913 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6914 TREE_TYPE (ref), ref, bias);
6915 }
6916 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6917 ref = fold_convert_loc (clause_loc, ptype, ref);
6918 tree m = create_tmp_var (ptype);
6919 gimplify_assign (m, new_var, stmt_seqp);
6920 new_var = m;
6921 m = create_tmp_var (ptype);
6922 gimplify_assign (m, ref, stmt_seqp);
6923 ref = m;
6924 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6925 tree body = create_artificial_label (UNKNOWN_LOCATION);
6926 tree end = create_artificial_label (UNKNOWN_LOCATION);
6927 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6928 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6929 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6930 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6931 {
6932 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6933 tree decl_placeholder
6934 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6935 SET_DECL_VALUE_EXPR (placeholder, out);
6936 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6937 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6938 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6939 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6940 gimple_seq_add_seq (&sub_seq,
6941 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6942 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6943 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6944 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6945 }
6946 else
6947 {
6948 x = build2 (code, TREE_TYPE (out), out, priv);
6949 out = unshare_expr (out);
6950 gimplify_assign (out, x, &sub_seq);
6951 }
6952 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6953 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6954 gimple_seq_add_stmt (&sub_seq, g);
6955 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6956 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6957 gimple_seq_add_stmt (&sub_seq, g);
6958 g = gimple_build_assign (i, PLUS_EXPR, i,
6959 build_int_cst (TREE_TYPE (i), 1));
6960 gimple_seq_add_stmt (&sub_seq, g);
6961 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6962 gimple_seq_add_stmt (&sub_seq, g);
6963 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6964 }
6965 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6966 {
6967 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6968
6969 if (omp_is_reference (var)
6970 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6971 TREE_TYPE (ref)))
6972 ref = build_fold_addr_expr_loc (clause_loc, ref);
6973 SET_DECL_VALUE_EXPR (placeholder, ref);
6974 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6975 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6976 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6977 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6978 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6979 }
6980 else
6981 {
6982 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6983 ref = build_outer_var_ref (var, ctx);
6984 gimplify_assign (ref, x, &sub_seq);
6985 }
6986 }
6987
6988 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6989 0);
6990 gimple_seq_add_stmt (stmt_seqp, stmt);
6991
6992 gimple_seq_add_seq (stmt_seqp, sub_seq);
6993
6994 if (clist)
6995 {
6996 gimple_seq_add_seq (stmt_seqp, *clist);
6997 *clist = NULL;
6998 }
6999
7000 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7001 0);
7002 gimple_seq_add_stmt (stmt_seqp, stmt);
7003 }
7004
7005
7006 /* Generate code to implement the COPYPRIVATE clauses. */
7007
7008 static void
7009 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7010 omp_context *ctx)
7011 {
7012 tree c;
7013
7014 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7015 {
7016 tree var, new_var, ref, x;
7017 bool by_ref;
7018 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7019
7020 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7021 continue;
7022
7023 var = OMP_CLAUSE_DECL (c);
7024 by_ref = use_pointer_for_field (var, NULL);
7025
7026 ref = build_sender_ref (var, ctx);
7027 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7028 if (by_ref)
7029 {
7030 x = build_fold_addr_expr_loc (clause_loc, new_var);
7031 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7032 }
7033 gimplify_assign (ref, x, slist);
7034
7035 ref = build_receiver_ref (var, false, ctx);
7036 if (by_ref)
7037 {
7038 ref = fold_convert_loc (clause_loc,
7039 build_pointer_type (TREE_TYPE (new_var)),
7040 ref);
7041 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7042 }
7043 if (omp_is_reference (var))
7044 {
7045 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7046 ref = build_simple_mem_ref_loc (clause_loc, ref);
7047 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7048 }
7049 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7050 gimplify_and_add (x, rlist);
7051 }
7052 }
7053
7054
7055 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7056 and REDUCTION from the sender (aka parent) side. */
7057
7058 static void
7059 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7060 omp_context *ctx)
7061 {
7062 tree c, t;
7063 int ignored_looptemp = 0;
7064 bool is_taskloop = false;
7065
7066 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7067 by GOMP_taskloop. */
7068 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7069 {
7070 ignored_looptemp = 2;
7071 is_taskloop = true;
7072 }
7073
7074 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7075 {
7076 tree val, ref, x, var;
7077 bool by_ref, do_in = false, do_out = false;
7078 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7079
7080 switch (OMP_CLAUSE_CODE (c))
7081 {
7082 case OMP_CLAUSE_PRIVATE:
7083 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7084 break;
7085 continue;
7086 case OMP_CLAUSE_FIRSTPRIVATE:
7087 case OMP_CLAUSE_COPYIN:
7088 case OMP_CLAUSE_LASTPRIVATE:
7089 case OMP_CLAUSE_IN_REDUCTION:
7090 case OMP_CLAUSE__REDUCTEMP_:
7091 break;
7092 case OMP_CLAUSE_REDUCTION:
7093 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7094 continue;
7095 break;
7096 case OMP_CLAUSE_SHARED:
7097 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7098 break;
7099 continue;
7100 case OMP_CLAUSE__LOOPTEMP_:
7101 if (ignored_looptemp)
7102 {
7103 ignored_looptemp--;
7104 continue;
7105 }
7106 break;
7107 default:
7108 continue;
7109 }
7110
7111 val = OMP_CLAUSE_DECL (c);
7112 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7113 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7114 && TREE_CODE (val) == MEM_REF)
7115 {
7116 val = TREE_OPERAND (val, 0);
7117 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7118 val = TREE_OPERAND (val, 0);
7119 if (TREE_CODE (val) == INDIRECT_REF
7120 || TREE_CODE (val) == ADDR_EXPR)
7121 val = TREE_OPERAND (val, 0);
7122 if (is_variable_sized (val))
7123 continue;
7124 }
7125
7126 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7127 outer taskloop region. */
7128 omp_context *ctx_for_o = ctx;
7129 if (is_taskloop
7130 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7131 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7132 ctx_for_o = ctx->outer;
7133
7134 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7135
7136 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7137 && is_global_var (var)
7138 && (val == OMP_CLAUSE_DECL (c)
7139 || !is_task_ctx (ctx)
7140 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7141 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7142 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7143 != POINTER_TYPE)))))
7144 continue;
7145
7146 t = omp_member_access_dummy_var (var);
7147 if (t)
7148 {
7149 var = DECL_VALUE_EXPR (var);
7150 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7151 if (o != t)
7152 var = unshare_and_remap (var, t, o);
7153 else
7154 var = unshare_expr (var);
7155 }
7156
7157 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7158 {
7159 /* Handle taskloop firstprivate/lastprivate, where the
7160 lastprivate on GIMPLE_OMP_TASK is represented as
7161 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7162 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7163 x = omp_build_component_ref (ctx->sender_decl, f);
7164 if (use_pointer_for_field (val, ctx))
7165 var = build_fold_addr_expr (var);
7166 gimplify_assign (x, var, ilist);
7167 DECL_ABSTRACT_ORIGIN (f) = NULL;
7168 continue;
7169 }
7170
7171 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7172 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7173 || val == OMP_CLAUSE_DECL (c))
7174 && is_variable_sized (val))
7175 continue;
7176 by_ref = use_pointer_for_field (val, NULL);
7177
7178 switch (OMP_CLAUSE_CODE (c))
7179 {
7180 case OMP_CLAUSE_FIRSTPRIVATE:
7181 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7182 && !by_ref
7183 && is_task_ctx (ctx))
7184 TREE_NO_WARNING (var) = 1;
7185 do_in = true;
7186 break;
7187
7188 case OMP_CLAUSE_PRIVATE:
7189 case OMP_CLAUSE_COPYIN:
7190 case OMP_CLAUSE__LOOPTEMP_:
7191 case OMP_CLAUSE__REDUCTEMP_:
7192 do_in = true;
7193 break;
7194
7195 case OMP_CLAUSE_LASTPRIVATE:
7196 if (by_ref || omp_is_reference (val))
7197 {
7198 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7199 continue;
7200 do_in = true;
7201 }
7202 else
7203 {
7204 do_out = true;
7205 if (lang_hooks.decls.omp_private_outer_ref (val))
7206 do_in = true;
7207 }
7208 break;
7209
7210 case OMP_CLAUSE_REDUCTION:
7211 case OMP_CLAUSE_IN_REDUCTION:
7212 do_in = true;
7213 if (val == OMP_CLAUSE_DECL (c))
7214 {
7215 if (is_task_ctx (ctx))
7216 by_ref = use_pointer_for_field (val, ctx);
7217 else
7218 do_out = !(by_ref || omp_is_reference (val));
7219 }
7220 else
7221 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7222 break;
7223
7224 default:
7225 gcc_unreachable ();
7226 }
7227
7228 if (do_in)
7229 {
7230 ref = build_sender_ref (val, ctx);
7231 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7232 gimplify_assign (ref, x, ilist);
7233 if (is_task_ctx (ctx))
7234 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7235 }
7236
7237 if (do_out)
7238 {
7239 ref = build_sender_ref (val, ctx);
7240 gimplify_assign (var, ref, olist);
7241 }
7242 }
7243 }
7244
7245 /* Generate code to implement SHARED from the sender (aka parent)
7246 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7247 list things that got automatically shared. */
7248
7249 static void
7250 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7251 {
7252 tree var, ovar, nvar, t, f, x, record_type;
7253
7254 if (ctx->record_type == NULL)
7255 return;
7256
7257 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7258 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7259 {
7260 ovar = DECL_ABSTRACT_ORIGIN (f);
7261 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7262 continue;
7263
7264 nvar = maybe_lookup_decl (ovar, ctx);
7265 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7266 continue;
7267
7268 /* If CTX is a nested parallel directive. Find the immediately
7269 enclosing parallel or workshare construct that contains a
7270 mapping for OVAR. */
7271 var = lookup_decl_in_outer_ctx (ovar, ctx);
7272
7273 t = omp_member_access_dummy_var (var);
7274 if (t)
7275 {
7276 var = DECL_VALUE_EXPR (var);
7277 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7278 if (o != t)
7279 var = unshare_and_remap (var, t, o);
7280 else
7281 var = unshare_expr (var);
7282 }
7283
7284 if (use_pointer_for_field (ovar, ctx))
7285 {
7286 x = build_sender_ref (ovar, ctx);
7287 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7288 && TREE_TYPE (f) == TREE_TYPE (ovar))
7289 {
7290 gcc_assert (is_parallel_ctx (ctx)
7291 && DECL_ARTIFICIAL (ovar));
7292 /* _condtemp_ clause. */
7293 var = build_constructor (TREE_TYPE (x), NULL);
7294 }
7295 else
7296 var = build_fold_addr_expr (var);
7297 gimplify_assign (x, var, ilist);
7298 }
7299 else
7300 {
7301 x = build_sender_ref (ovar, ctx);
7302 gimplify_assign (x, var, ilist);
7303
7304 if (!TREE_READONLY (var)
7305 /* We don't need to receive a new reference to a result
7306 or parm decl. In fact we may not store to it as we will
7307 invalidate any pending RSO and generate wrong gimple
7308 during inlining. */
7309 && !((TREE_CODE (var) == RESULT_DECL
7310 || TREE_CODE (var) == PARM_DECL)
7311 && DECL_BY_REFERENCE (var)))
7312 {
7313 x = build_sender_ref (ovar, ctx);
7314 gimplify_assign (var, x, olist);
7315 }
7316 }
7317 }
7318 }
7319
7320 /* Emit an OpenACC head marker call, encapulating the partitioning and
7321 other information that must be processed by the target compiler.
7322 Return the maximum number of dimensions the associated loop might
7323 be partitioned over. */
7324
7325 static unsigned
7326 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7327 gimple_seq *seq, omp_context *ctx)
7328 {
7329 unsigned levels = 0;
7330 unsigned tag = 0;
7331 tree gang_static = NULL_TREE;
7332 auto_vec<tree, 5> args;
7333
7334 args.quick_push (build_int_cst
7335 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7336 args.quick_push (ddvar);
7337 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7338 {
7339 switch (OMP_CLAUSE_CODE (c))
7340 {
7341 case OMP_CLAUSE_GANG:
7342 tag |= OLF_DIM_GANG;
7343 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7344 /* static:* is represented by -1, and we can ignore it, as
7345 scheduling is always static. */
7346 if (gang_static && integer_minus_onep (gang_static))
7347 gang_static = NULL_TREE;
7348 levels++;
7349 break;
7350
7351 case OMP_CLAUSE_WORKER:
7352 tag |= OLF_DIM_WORKER;
7353 levels++;
7354 break;
7355
7356 case OMP_CLAUSE_VECTOR:
7357 tag |= OLF_DIM_VECTOR;
7358 levels++;
7359 break;
7360
7361 case OMP_CLAUSE_SEQ:
7362 tag |= OLF_SEQ;
7363 break;
7364
7365 case OMP_CLAUSE_AUTO:
7366 tag |= OLF_AUTO;
7367 break;
7368
7369 case OMP_CLAUSE_INDEPENDENT:
7370 tag |= OLF_INDEPENDENT;
7371 break;
7372
7373 case OMP_CLAUSE_TILE:
7374 tag |= OLF_TILE;
7375 break;
7376
7377 default:
7378 continue;
7379 }
7380 }
7381
7382 if (gang_static)
7383 {
7384 if (DECL_P (gang_static))
7385 gang_static = build_outer_var_ref (gang_static, ctx);
7386 tag |= OLF_GANG_STATIC;
7387 }
7388
7389 /* In a parallel region, loops are implicitly INDEPENDENT. */
7390 omp_context *tgt = enclosing_target_ctx (ctx);
7391 if (!tgt || is_oacc_parallel (tgt))
7392 tag |= OLF_INDEPENDENT;
7393
7394 if (tag & OLF_TILE)
7395 /* Tiling could use all 3 levels. */
7396 levels = 3;
7397 else
7398 {
7399 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7400 Ensure at least one level, or 2 for possible auto
7401 partitioning */
7402 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7403 << OLF_DIM_BASE) | OLF_SEQ));
7404
7405 if (levels < 1u + maybe_auto)
7406 levels = 1u + maybe_auto;
7407 }
7408
7409 args.quick_push (build_int_cst (integer_type_node, levels));
7410 args.quick_push (build_int_cst (integer_type_node, tag));
7411 if (gang_static)
7412 args.quick_push (gang_static);
7413
7414 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7415 gimple_set_location (call, loc);
7416 gimple_set_lhs (call, ddvar);
7417 gimple_seq_add_stmt (seq, call);
7418
7419 return levels;
7420 }
7421
7422 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7423 partitioning level of the enclosed region. */
7424
7425 static void
7426 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7427 tree tofollow, gimple_seq *seq)
7428 {
7429 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7430 : IFN_UNIQUE_OACC_TAIL_MARK);
7431 tree marker = build_int_cst (integer_type_node, marker_kind);
7432 int nargs = 2 + (tofollow != NULL_TREE);
7433 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7434 marker, ddvar, tofollow);
7435 gimple_set_location (call, loc);
7436 gimple_set_lhs (call, ddvar);
7437 gimple_seq_add_stmt (seq, call);
7438 }
7439
7440 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7441 the loop clauses, from which we extract reductions. Initialize
7442 HEAD and TAIL. */
7443
7444 static void
7445 lower_oacc_head_tail (location_t loc, tree clauses,
7446 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7447 {
7448 bool inner = false;
7449 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7450 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7451
7452 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7453 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7454 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7455
7456 gcc_assert (count);
7457 for (unsigned done = 1; count; count--, done++)
7458 {
7459 gimple_seq fork_seq = NULL;
7460 gimple_seq join_seq = NULL;
7461
7462 tree place = build_int_cst (integer_type_node, -1);
7463 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7464 fork_kind, ddvar, place);
7465 gimple_set_location (fork, loc);
7466 gimple_set_lhs (fork, ddvar);
7467
7468 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7469 join_kind, ddvar, place);
7470 gimple_set_location (join, loc);
7471 gimple_set_lhs (join, ddvar);
7472
7473 /* Mark the beginning of this level sequence. */
7474 if (inner)
7475 lower_oacc_loop_marker (loc, ddvar, true,
7476 build_int_cst (integer_type_node, count),
7477 &fork_seq);
7478 lower_oacc_loop_marker (loc, ddvar, false,
7479 build_int_cst (integer_type_node, done),
7480 &join_seq);
7481
7482 lower_oacc_reductions (loc, clauses, place, inner,
7483 fork, join, &fork_seq, &join_seq, ctx);
7484
7485 /* Append this level to head. */
7486 gimple_seq_add_seq (head, fork_seq);
7487 /* Prepend it to tail. */
7488 gimple_seq_add_seq (&join_seq, *tail);
7489 *tail = join_seq;
7490
7491 inner = true;
7492 }
7493
7494 /* Mark the end of the sequence. */
7495 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7496 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7497 }
7498
7499 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7500 catch handler and return it. This prevents programs from violating the
7501 structured block semantics with throws. */
7502
7503 static gimple_seq
7504 maybe_catch_exception (gimple_seq body)
7505 {
7506 gimple *g;
7507 tree decl;
7508
7509 if (!flag_exceptions)
7510 return body;
7511
7512 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7513 decl = lang_hooks.eh_protect_cleanup_actions ();
7514 else
7515 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7516
7517 g = gimple_build_eh_must_not_throw (decl);
7518 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7519 GIMPLE_TRY_CATCH);
7520
7521 return gimple_seq_alloc_with_stmt (g);
7522 }
7523
7524 \f
7525 /* Routines to lower OMP directives into OMP-GIMPLE. */
7526
7527 /* If ctx is a worksharing context inside of a cancellable parallel
7528 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7529 and conditional branch to parallel's cancel_label to handle
7530 cancellation in the implicit barrier. */
7531
7532 static void
7533 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7534 gimple_seq *body)
7535 {
7536 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7537 if (gimple_omp_return_nowait_p (omp_return))
7538 return;
7539 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7540 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7541 && outer->cancellable)
7542 {
7543 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7544 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7545 tree lhs = create_tmp_var (c_bool_type);
7546 gimple_omp_return_set_lhs (omp_return, lhs);
7547 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7548 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7549 fold_convert (c_bool_type,
7550 boolean_false_node),
7551 outer->cancel_label, fallthru_label);
7552 gimple_seq_add_stmt (body, g);
7553 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7554 }
7555 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7556 return;
7557 }
7558
7559 /* Find the first task_reduction or reduction clause or return NULL
7560 if there are none. */
7561
7562 static inline tree
7563 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7564 enum omp_clause_code ccode)
7565 {
7566 while (1)
7567 {
7568 clauses = omp_find_clause (clauses, ccode);
7569 if (clauses == NULL_TREE)
7570 return NULL_TREE;
7571 if (ccode != OMP_CLAUSE_REDUCTION
7572 || code == OMP_TASKLOOP
7573 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7574 return clauses;
7575 clauses = OMP_CLAUSE_CHAIN (clauses);
7576 }
7577 }
7578
7579 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7580 gimple_seq *, gimple_seq *);
7581
7582 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7583 CTX is the enclosing OMP context for the current statement. */
7584
7585 static void
7586 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7587 {
7588 tree block, control;
7589 gimple_stmt_iterator tgsi;
7590 gomp_sections *stmt;
7591 gimple *t;
7592 gbind *new_stmt, *bind;
7593 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7594
7595 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7596
7597 push_gimplify_context ();
7598
7599 dlist = NULL;
7600 ilist = NULL;
7601
7602 tree rclauses
7603 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7604 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7605 tree rtmp = NULL_TREE;
7606 if (rclauses)
7607 {
7608 tree type = build_pointer_type (pointer_sized_int_node);
7609 tree temp = create_tmp_var (type);
7610 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7611 OMP_CLAUSE_DECL (c) = temp;
7612 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7613 gimple_omp_sections_set_clauses (stmt, c);
7614 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7615 gimple_omp_sections_clauses (stmt),
7616 &ilist, &tred_dlist);
7617 rclauses = c;
7618 rtmp = make_ssa_name (type);
7619 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7620 }
7621
7622 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7623 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7624
7625 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7626 &ilist, &dlist, ctx, NULL);
7627
7628 control = create_tmp_var (unsigned_type_node, ".section");
7629 gimple_omp_sections_set_control (stmt, control);
7630
7631 new_body = gimple_omp_body (stmt);
7632 gimple_omp_set_body (stmt, NULL);
7633 tgsi = gsi_start (new_body);
7634 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7635 {
7636 omp_context *sctx;
7637 gimple *sec_start;
7638
7639 sec_start = gsi_stmt (tgsi);
7640 sctx = maybe_lookup_ctx (sec_start);
7641 gcc_assert (sctx);
7642
7643 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7644 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7645 GSI_CONTINUE_LINKING);
7646 gimple_omp_set_body (sec_start, NULL);
7647
7648 if (gsi_one_before_end_p (tgsi))
7649 {
7650 gimple_seq l = NULL;
7651 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7652 &ilist, &l, &clist, ctx);
7653 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7654 gimple_omp_section_set_last (sec_start);
7655 }
7656
7657 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7658 GSI_CONTINUE_LINKING);
7659 }
7660
7661 block = make_node (BLOCK);
7662 bind = gimple_build_bind (NULL, new_body, block);
7663
7664 olist = NULL;
7665 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7666 &clist, ctx);
7667 if (clist)
7668 {
7669 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7670 gcall *g = gimple_build_call (fndecl, 0);
7671 gimple_seq_add_stmt (&olist, g);
7672 gimple_seq_add_seq (&olist, clist);
7673 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7674 g = gimple_build_call (fndecl, 0);
7675 gimple_seq_add_stmt (&olist, g);
7676 }
7677
7678 block = make_node (BLOCK);
7679 new_stmt = gimple_build_bind (NULL, NULL, block);
7680 gsi_replace (gsi_p, new_stmt, true);
7681
7682 pop_gimplify_context (new_stmt);
7683 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7684 BLOCK_VARS (block) = gimple_bind_vars (bind);
7685 if (BLOCK_VARS (block))
7686 TREE_USED (block) = 1;
7687
7688 new_body = NULL;
7689 gimple_seq_add_seq (&new_body, ilist);
7690 gimple_seq_add_stmt (&new_body, stmt);
7691 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7692 gimple_seq_add_stmt (&new_body, bind);
7693
7694 t = gimple_build_omp_continue (control, control);
7695 gimple_seq_add_stmt (&new_body, t);
7696
7697 gimple_seq_add_seq (&new_body, olist);
7698 if (ctx->cancellable)
7699 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7700 gimple_seq_add_seq (&new_body, dlist);
7701
7702 new_body = maybe_catch_exception (new_body);
7703
7704 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7705 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7706 t = gimple_build_omp_return (nowait);
7707 gimple_seq_add_stmt (&new_body, t);
7708 gimple_seq_add_seq (&new_body, tred_dlist);
7709 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7710
7711 if (rclauses)
7712 OMP_CLAUSE_DECL (rclauses) = rtmp;
7713
7714 gimple_bind_set_body (new_stmt, new_body);
7715 }
7716
7717
7718 /* A subroutine of lower_omp_single. Expand the simple form of
7719 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7720
7721 if (GOMP_single_start ())
7722 BODY;
7723 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7724
7725 FIXME. It may be better to delay expanding the logic of this until
7726 pass_expand_omp. The expanded logic may make the job more difficult
7727 to a synchronization analysis pass. */
7728
7729 static void
7730 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7731 {
7732 location_t loc = gimple_location (single_stmt);
7733 tree tlabel = create_artificial_label (loc);
7734 tree flabel = create_artificial_label (loc);
7735 gimple *call, *cond;
7736 tree lhs, decl;
7737
7738 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7739 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7740 call = gimple_build_call (decl, 0);
7741 gimple_call_set_lhs (call, lhs);
7742 gimple_seq_add_stmt (pre_p, call);
7743
7744 cond = gimple_build_cond (EQ_EXPR, lhs,
7745 fold_convert_loc (loc, TREE_TYPE (lhs),
7746 boolean_true_node),
7747 tlabel, flabel);
7748 gimple_seq_add_stmt (pre_p, cond);
7749 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7750 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7751 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7752 }
7753
7754
7755 /* A subroutine of lower_omp_single. Expand the simple form of
7756 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7757
7758 #pragma omp single copyprivate (a, b, c)
7759
7760 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7761
7762 {
7763 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7764 {
7765 BODY;
7766 copyout.a = a;
7767 copyout.b = b;
7768 copyout.c = c;
7769 GOMP_single_copy_end (&copyout);
7770 }
7771 else
7772 {
7773 a = copyout_p->a;
7774 b = copyout_p->b;
7775 c = copyout_p->c;
7776 }
7777 GOMP_barrier ();
7778 }
7779
7780 FIXME. It may be better to delay expanding the logic of this until
7781 pass_expand_omp. The expanded logic may make the job more difficult
7782 to a synchronization analysis pass. */
7783
7784 static void
7785 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7786 omp_context *ctx)
7787 {
7788 tree ptr_type, t, l0, l1, l2, bfn_decl;
7789 gimple_seq copyin_seq;
7790 location_t loc = gimple_location (single_stmt);
7791
7792 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7793
7794 ptr_type = build_pointer_type (ctx->record_type);
7795 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7796
7797 l0 = create_artificial_label (loc);
7798 l1 = create_artificial_label (loc);
7799 l2 = create_artificial_label (loc);
7800
7801 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7802 t = build_call_expr_loc (loc, bfn_decl, 0);
7803 t = fold_convert_loc (loc, ptr_type, t);
7804 gimplify_assign (ctx->receiver_decl, t, pre_p);
7805
7806 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7807 build_int_cst (ptr_type, 0));
7808 t = build3 (COND_EXPR, void_type_node, t,
7809 build_and_jump (&l0), build_and_jump (&l1));
7810 gimplify_and_add (t, pre_p);
7811
7812 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7813
7814 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7815
7816 copyin_seq = NULL;
7817 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7818 &copyin_seq, ctx);
7819
7820 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7821 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7822 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7823 gimplify_and_add (t, pre_p);
7824
7825 t = build_and_jump (&l2);
7826 gimplify_and_add (t, pre_p);
7827
7828 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7829
7830 gimple_seq_add_seq (pre_p, copyin_seq);
7831
7832 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7833 }
7834
7835
7836 /* Expand code for an OpenMP single directive. */
7837
7838 static void
7839 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7840 {
7841 tree block;
7842 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7843 gbind *bind;
7844 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7845
7846 push_gimplify_context ();
7847
7848 block = make_node (BLOCK);
7849 bind = gimple_build_bind (NULL, NULL, block);
7850 gsi_replace (gsi_p, bind, true);
7851 bind_body = NULL;
7852 dlist = NULL;
7853 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7854 &bind_body, &dlist, ctx, NULL);
7855 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7856
7857 gimple_seq_add_stmt (&bind_body, single_stmt);
7858
7859 if (ctx->record_type)
7860 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7861 else
7862 lower_omp_single_simple (single_stmt, &bind_body);
7863
7864 gimple_omp_set_body (single_stmt, NULL);
7865
7866 gimple_seq_add_seq (&bind_body, dlist);
7867
7868 bind_body = maybe_catch_exception (bind_body);
7869
7870 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7871 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7872 gimple *g = gimple_build_omp_return (nowait);
7873 gimple_seq_add_stmt (&bind_body_tail, g);
7874 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7875 if (ctx->record_type)
7876 {
7877 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7878 tree clobber = build_constructor (ctx->record_type, NULL);
7879 TREE_THIS_VOLATILE (clobber) = 1;
7880 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7881 clobber), GSI_SAME_STMT);
7882 }
7883 gimple_seq_add_seq (&bind_body, bind_body_tail);
7884 gimple_bind_set_body (bind, bind_body);
7885
7886 pop_gimplify_context (bind);
7887
7888 gimple_bind_append_vars (bind, ctx->block_vars);
7889 BLOCK_VARS (block) = ctx->block_vars;
7890 if (BLOCK_VARS (block))
7891 TREE_USED (block) = 1;
7892 }
7893
7894
7895 /* Expand code for an OpenMP master directive. */
7896
7897 static void
7898 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7899 {
7900 tree block, lab = NULL, x, bfn_decl;
7901 gimple *stmt = gsi_stmt (*gsi_p);
7902 gbind *bind;
7903 location_t loc = gimple_location (stmt);
7904 gimple_seq tseq;
7905
7906 push_gimplify_context ();
7907
7908 block = make_node (BLOCK);
7909 bind = gimple_build_bind (NULL, NULL, block);
7910 gsi_replace (gsi_p, bind, true);
7911 gimple_bind_add_stmt (bind, stmt);
7912
7913 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7914 x = build_call_expr_loc (loc, bfn_decl, 0);
7915 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7916 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7917 tseq = NULL;
7918 gimplify_and_add (x, &tseq);
7919 gimple_bind_add_seq (bind, tseq);
7920
7921 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7922 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7923 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7924 gimple_omp_set_body (stmt, NULL);
7925
7926 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7927
7928 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7929
7930 pop_gimplify_context (bind);
7931
7932 gimple_bind_append_vars (bind, ctx->block_vars);
7933 BLOCK_VARS (block) = ctx->block_vars;
7934 }
7935
7936 /* Helper function for lower_omp_task_reductions. For a specific PASS
7937 find out the current clause it should be processed, or return false
7938 if all have been processed already. */
7939
7940 static inline bool
7941 omp_task_reduction_iterate (int pass, enum tree_code code,
7942 enum omp_clause_code ccode, tree *c, tree *decl,
7943 tree *type, tree *next)
7944 {
7945 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7946 {
7947 if (ccode == OMP_CLAUSE_REDUCTION
7948 && code != OMP_TASKLOOP
7949 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7950 continue;
7951 *decl = OMP_CLAUSE_DECL (*c);
7952 *type = TREE_TYPE (*decl);
7953 if (TREE_CODE (*decl) == MEM_REF)
7954 {
7955 if (pass != 1)
7956 continue;
7957 }
7958 else
7959 {
7960 if (omp_is_reference (*decl))
7961 *type = TREE_TYPE (*type);
7962 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7963 continue;
7964 }
7965 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7966 return true;
7967 }
7968 *decl = NULL_TREE;
7969 *type = NULL_TREE;
7970 *next = NULL_TREE;
7971 return false;
7972 }
7973
7974 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7975 OMP_TASKGROUP only with task modifier). Register mapping of those in
7976 START sequence and reducing them and unregister them in the END sequence. */
7977
7978 static void
7979 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7980 gimple_seq *start, gimple_seq *end)
7981 {
7982 enum omp_clause_code ccode
7983 = (code == OMP_TASKGROUP
7984 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7985 tree cancellable = NULL_TREE;
7986 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7987 if (clauses == NULL_TREE)
7988 return;
7989 if (code == OMP_FOR || code == OMP_SECTIONS)
7990 {
7991 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7992 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7993 && outer->cancellable)
7994 {
7995 cancellable = error_mark_node;
7996 break;
7997 }
7998 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7999 break;
8000 }
8001 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8002 tree *last = &TYPE_FIELDS (record_type);
8003 unsigned cnt = 0;
8004 if (cancellable)
8005 {
8006 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8007 ptr_type_node);
8008 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8009 integer_type_node);
8010 *last = field;
8011 DECL_CHAIN (field) = ifield;
8012 last = &DECL_CHAIN (ifield);
8013 DECL_CONTEXT (field) = record_type;
8014 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8015 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8016 DECL_CONTEXT (ifield) = record_type;
8017 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8018 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8019 }
8020 for (int pass = 0; pass < 2; pass++)
8021 {
8022 tree decl, type, next;
8023 for (tree c = clauses;
8024 omp_task_reduction_iterate (pass, code, ccode,
8025 &c, &decl, &type, &next); c = next)
8026 {
8027 ++cnt;
8028 tree new_type = type;
8029 if (ctx->outer)
8030 new_type = remap_type (type, &ctx->outer->cb);
8031 tree field
8032 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8033 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8034 new_type);
8035 if (DECL_P (decl) && type == TREE_TYPE (decl))
8036 {
8037 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8038 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8039 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8040 }
8041 else
8042 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8043 DECL_CONTEXT (field) = record_type;
8044 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8045 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8046 *last = field;
8047 last = &DECL_CHAIN (field);
8048 tree bfield
8049 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8050 boolean_type_node);
8051 DECL_CONTEXT (bfield) = record_type;
8052 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8053 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8054 *last = bfield;
8055 last = &DECL_CHAIN (bfield);
8056 }
8057 }
8058 *last = NULL_TREE;
8059 layout_type (record_type);
8060
8061 /* Build up an array which registers with the runtime all the reductions
8062 and deregisters them at the end. Format documented in libgomp/task.c. */
8063 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8064 tree avar = create_tmp_var_raw (atype);
8065 gimple_add_tmp_var (avar);
8066 TREE_ADDRESSABLE (avar) = 1;
8067 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8068 NULL_TREE, NULL_TREE);
8069 tree t = build_int_cst (pointer_sized_int_node, cnt);
8070 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8071 gimple_seq seq = NULL;
8072 tree sz = fold_convert (pointer_sized_int_node,
8073 TYPE_SIZE_UNIT (record_type));
8074 int cachesz = 64;
8075 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8076 build_int_cst (pointer_sized_int_node, cachesz - 1));
8077 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8078 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8079 ctx->task_reductions.create (1 + cnt);
8080 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8081 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8082 ? sz : NULL_TREE);
8083 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8084 gimple_seq_add_seq (start, seq);
8085 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8086 NULL_TREE, NULL_TREE);
8087 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8088 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8089 NULL_TREE, NULL_TREE);
8090 t = build_int_cst (pointer_sized_int_node,
8091 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8092 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8093 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8094 NULL_TREE, NULL_TREE);
8095 t = build_int_cst (pointer_sized_int_node, -1);
8096 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8097 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8098 NULL_TREE, NULL_TREE);
8099 t = build_int_cst (pointer_sized_int_node, 0);
8100 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8101
8102 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8103 and for each task reduction checks a bool right after the private variable
8104 within that thread's chunk; if the bool is clear, it hasn't been
8105 initialized and thus isn't going to be reduced nor destructed, otherwise
8106 reduce and destruct it. */
8107 tree idx = create_tmp_var (size_type_node);
8108 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8109 tree num_thr_sz = create_tmp_var (size_type_node);
8110 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8111 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8112 tree lab3 = NULL_TREE;
8113 gimple *g;
8114 if (code == OMP_FOR || code == OMP_SECTIONS)
8115 {
8116 /* For worksharing constructs, only perform it in the master thread,
8117 with the exception of cancelled implicit barriers - then only handle
8118 the current thread. */
8119 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8120 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8121 tree thr_num = create_tmp_var (integer_type_node);
8122 g = gimple_build_call (t, 0);
8123 gimple_call_set_lhs (g, thr_num);
8124 gimple_seq_add_stmt (end, g);
8125 if (cancellable)
8126 {
8127 tree c;
8128 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8129 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8130 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8131 if (code == OMP_FOR)
8132 c = gimple_omp_for_clauses (ctx->stmt);
8133 else /* if (code == OMP_SECTIONS) */
8134 c = gimple_omp_sections_clauses (ctx->stmt);
8135 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8136 cancellable = c;
8137 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8138 lab5, lab6);
8139 gimple_seq_add_stmt (end, g);
8140 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8141 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8142 gimple_seq_add_stmt (end, g);
8143 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8144 build_one_cst (TREE_TYPE (idx)));
8145 gimple_seq_add_stmt (end, g);
8146 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8147 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8148 }
8149 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8150 gimple_seq_add_stmt (end, g);
8151 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8152 }
8153 if (code != OMP_PARALLEL)
8154 {
8155 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8156 tree num_thr = create_tmp_var (integer_type_node);
8157 g = gimple_build_call (t, 0);
8158 gimple_call_set_lhs (g, num_thr);
8159 gimple_seq_add_stmt (end, g);
8160 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8161 gimple_seq_add_stmt (end, g);
8162 if (cancellable)
8163 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8164 }
8165 else
8166 {
8167 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8168 OMP_CLAUSE__REDUCTEMP_);
8169 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8170 t = fold_convert (size_type_node, t);
8171 gimplify_assign (num_thr_sz, t, end);
8172 }
8173 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8174 NULL_TREE, NULL_TREE);
8175 tree data = create_tmp_var (pointer_sized_int_node);
8176 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8177 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8178 tree ptr;
8179 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8180 ptr = create_tmp_var (build_pointer_type (record_type));
8181 else
8182 ptr = create_tmp_var (ptr_type_node);
8183 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8184
8185 tree field = TYPE_FIELDS (record_type);
8186 cnt = 0;
8187 if (cancellable)
8188 field = DECL_CHAIN (DECL_CHAIN (field));
8189 for (int pass = 0; pass < 2; pass++)
8190 {
8191 tree decl, type, next;
8192 for (tree c = clauses;
8193 omp_task_reduction_iterate (pass, code, ccode,
8194 &c, &decl, &type, &next); c = next)
8195 {
8196 tree var = decl, ref;
8197 if (TREE_CODE (decl) == MEM_REF)
8198 {
8199 var = TREE_OPERAND (var, 0);
8200 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8201 var = TREE_OPERAND (var, 0);
8202 tree v = var;
8203 if (TREE_CODE (var) == ADDR_EXPR)
8204 var = TREE_OPERAND (var, 0);
8205 else if (TREE_CODE (var) == INDIRECT_REF)
8206 var = TREE_OPERAND (var, 0);
8207 tree orig_var = var;
8208 if (is_variable_sized (var))
8209 {
8210 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8211 var = DECL_VALUE_EXPR (var);
8212 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8213 var = TREE_OPERAND (var, 0);
8214 gcc_assert (DECL_P (var));
8215 }
8216 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8217 if (orig_var != var)
8218 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8219 else if (TREE_CODE (v) == ADDR_EXPR)
8220 t = build_fold_addr_expr (t);
8221 else if (TREE_CODE (v) == INDIRECT_REF)
8222 t = build_fold_indirect_ref (t);
8223 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8224 {
8225 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8226 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8227 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8228 }
8229 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8230 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8231 fold_convert (size_type_node,
8232 TREE_OPERAND (decl, 1)));
8233 }
8234 else
8235 {
8236 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8237 if (!omp_is_reference (decl))
8238 t = build_fold_addr_expr (t);
8239 }
8240 t = fold_convert (pointer_sized_int_node, t);
8241 seq = NULL;
8242 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8243 gimple_seq_add_seq (start, seq);
8244 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8245 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8246 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8247 t = unshare_expr (byte_position (field));
8248 t = fold_convert (pointer_sized_int_node, t);
8249 ctx->task_reduction_map->put (c, cnt);
8250 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8251 ? t : NULL_TREE);
8252 seq = NULL;
8253 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8254 gimple_seq_add_seq (start, seq);
8255 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8256 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8257 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8258
8259 tree bfield = DECL_CHAIN (field);
8260 tree cond;
8261 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8262 /* In parallel or worksharing all threads unconditionally
8263 initialize all their task reduction private variables. */
8264 cond = boolean_true_node;
8265 else if (TREE_TYPE (ptr) == ptr_type_node)
8266 {
8267 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8268 unshare_expr (byte_position (bfield)));
8269 seq = NULL;
8270 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8271 gimple_seq_add_seq (end, seq);
8272 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8273 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8274 build_int_cst (pbool, 0));
8275 }
8276 else
8277 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8278 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8279 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8280 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8281 tree condv = create_tmp_var (boolean_type_node);
8282 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8283 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8284 lab3, lab4);
8285 gimple_seq_add_stmt (end, g);
8286 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8287 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8288 {
8289 /* If this reduction doesn't need destruction and parallel
8290 has been cancelled, there is nothing to do for this
8291 reduction, so jump around the merge operation. */
8292 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8293 g = gimple_build_cond (NE_EXPR, cancellable,
8294 build_zero_cst (TREE_TYPE (cancellable)),
8295 lab4, lab5);
8296 gimple_seq_add_stmt (end, g);
8297 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8298 }
8299
8300 tree new_var;
8301 if (TREE_TYPE (ptr) == ptr_type_node)
8302 {
8303 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8304 unshare_expr (byte_position (field)));
8305 seq = NULL;
8306 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8307 gimple_seq_add_seq (end, seq);
8308 tree pbool = build_pointer_type (TREE_TYPE (field));
8309 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8310 build_int_cst (pbool, 0));
8311 }
8312 else
8313 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8314 build_simple_mem_ref (ptr), field, NULL_TREE);
8315
8316 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8317 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8318 ref = build_simple_mem_ref (ref);
8319 /* reduction(-:var) sums up the partial results, so it acts
8320 identically to reduction(+:var). */
8321 if (rcode == MINUS_EXPR)
8322 rcode = PLUS_EXPR;
8323 if (TREE_CODE (decl) == MEM_REF)
8324 {
8325 tree type = TREE_TYPE (new_var);
8326 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8327 tree i = create_tmp_var (TREE_TYPE (v));
8328 tree ptype = build_pointer_type (TREE_TYPE (type));
8329 if (DECL_P (v))
8330 {
8331 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8332 tree vv = create_tmp_var (TREE_TYPE (v));
8333 gimplify_assign (vv, v, start);
8334 v = vv;
8335 }
8336 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8337 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8338 new_var = build_fold_addr_expr (new_var);
8339 new_var = fold_convert (ptype, new_var);
8340 ref = fold_convert (ptype, ref);
8341 tree m = create_tmp_var (ptype);
8342 gimplify_assign (m, new_var, end);
8343 new_var = m;
8344 m = create_tmp_var (ptype);
8345 gimplify_assign (m, ref, end);
8346 ref = m;
8347 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8348 tree body = create_artificial_label (UNKNOWN_LOCATION);
8349 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8350 gimple_seq_add_stmt (end, gimple_build_label (body));
8351 tree priv = build_simple_mem_ref (new_var);
8352 tree out = build_simple_mem_ref (ref);
8353 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8354 {
8355 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8356 tree decl_placeholder
8357 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8358 tree lab6 = NULL_TREE;
8359 if (cancellable)
8360 {
8361 /* If this reduction needs destruction and parallel
8362 has been cancelled, jump around the merge operation
8363 to the destruction. */
8364 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8365 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8366 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8367 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8368 lab6, lab5);
8369 gimple_seq_add_stmt (end, g);
8370 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8371 }
8372 SET_DECL_VALUE_EXPR (placeholder, out);
8373 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8374 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8375 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8376 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8377 gimple_seq_add_seq (end,
8378 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8379 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8380 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8381 {
8382 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8383 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8384 }
8385 if (cancellable)
8386 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8387 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8388 if (x)
8389 {
8390 gimple_seq tseq = NULL;
8391 gimplify_stmt (&x, &tseq);
8392 gimple_seq_add_seq (end, tseq);
8393 }
8394 }
8395 else
8396 {
8397 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8398 out = unshare_expr (out);
8399 gimplify_assign (out, x, end);
8400 }
8401 gimple *g
8402 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8403 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8404 gimple_seq_add_stmt (end, g);
8405 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8406 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8407 gimple_seq_add_stmt (end, g);
8408 g = gimple_build_assign (i, PLUS_EXPR, i,
8409 build_int_cst (TREE_TYPE (i), 1));
8410 gimple_seq_add_stmt (end, g);
8411 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8412 gimple_seq_add_stmt (end, g);
8413 gimple_seq_add_stmt (end, gimple_build_label (endl));
8414 }
8415 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8416 {
8417 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8418 tree oldv = NULL_TREE;
8419 tree lab6 = NULL_TREE;
8420 if (cancellable)
8421 {
8422 /* If this reduction needs destruction and parallel
8423 has been cancelled, jump around the merge operation
8424 to the destruction. */
8425 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8426 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8427 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8428 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8429 lab6, lab5);
8430 gimple_seq_add_stmt (end, g);
8431 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8432 }
8433 if (omp_is_reference (decl)
8434 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8435 TREE_TYPE (ref)))
8436 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8437 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8438 tree refv = create_tmp_var (TREE_TYPE (ref));
8439 gimplify_assign (refv, ref, end);
8440 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8441 SET_DECL_VALUE_EXPR (placeholder, ref);
8442 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8443 tree d = maybe_lookup_decl (decl, ctx);
8444 gcc_assert (d);
8445 if (DECL_HAS_VALUE_EXPR_P (d))
8446 oldv = DECL_VALUE_EXPR (d);
8447 if (omp_is_reference (var))
8448 {
8449 tree v = fold_convert (TREE_TYPE (d),
8450 build_fold_addr_expr (new_var));
8451 SET_DECL_VALUE_EXPR (d, v);
8452 }
8453 else
8454 SET_DECL_VALUE_EXPR (d, new_var);
8455 DECL_HAS_VALUE_EXPR_P (d) = 1;
8456 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8457 if (oldv)
8458 SET_DECL_VALUE_EXPR (d, oldv);
8459 else
8460 {
8461 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8462 DECL_HAS_VALUE_EXPR_P (d) = 0;
8463 }
8464 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8465 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8466 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8467 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8468 if (cancellable)
8469 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8470 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8471 if (x)
8472 {
8473 gimple_seq tseq = NULL;
8474 gimplify_stmt (&x, &tseq);
8475 gimple_seq_add_seq (end, tseq);
8476 }
8477 }
8478 else
8479 {
8480 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8481 ref = unshare_expr (ref);
8482 gimplify_assign (ref, x, end);
8483 }
8484 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8485 ++cnt;
8486 field = DECL_CHAIN (bfield);
8487 }
8488 }
8489
8490 if (code == OMP_TASKGROUP)
8491 {
8492 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8493 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8494 gimple_seq_add_stmt (start, g);
8495 }
8496 else
8497 {
8498 tree c;
8499 if (code == OMP_FOR)
8500 c = gimple_omp_for_clauses (ctx->stmt);
8501 else if (code == OMP_SECTIONS)
8502 c = gimple_omp_sections_clauses (ctx->stmt);
8503 else
8504 c = gimple_omp_taskreg_clauses (ctx->stmt);
8505 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8506 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8507 build_fold_addr_expr (avar));
8508 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8509 }
8510
8511 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8512 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8513 size_one_node));
8514 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8515 gimple_seq_add_stmt (end, g);
8516 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8517 if (code == OMP_FOR || code == OMP_SECTIONS)
8518 {
8519 enum built_in_function bfn
8520 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8521 t = builtin_decl_explicit (bfn);
8522 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8523 tree arg;
8524 if (cancellable)
8525 {
8526 arg = create_tmp_var (c_bool_type);
8527 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8528 cancellable));
8529 }
8530 else
8531 arg = build_int_cst (c_bool_type, 0);
8532 g = gimple_build_call (t, 1, arg);
8533 }
8534 else
8535 {
8536 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8537 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8538 }
8539 gimple_seq_add_stmt (end, g);
8540 t = build_constructor (atype, NULL);
8541 TREE_THIS_VOLATILE (t) = 1;
8542 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8543 }
8544
8545 /* Expand code for an OpenMP taskgroup directive. */
8546
8547 static void
8548 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8549 {
8550 gimple *stmt = gsi_stmt (*gsi_p);
8551 gcall *x;
8552 gbind *bind;
8553 gimple_seq dseq = NULL;
8554 tree block = make_node (BLOCK);
8555
8556 bind = gimple_build_bind (NULL, NULL, block);
8557 gsi_replace (gsi_p, bind, true);
8558 gimple_bind_add_stmt (bind, stmt);
8559
8560 push_gimplify_context ();
8561
8562 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8563 0);
8564 gimple_bind_add_stmt (bind, x);
8565
8566 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8567 gimple_omp_taskgroup_clauses (stmt),
8568 gimple_bind_body_ptr (bind), &dseq);
8569
8570 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8571 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8572 gimple_omp_set_body (stmt, NULL);
8573
8574 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8575 gimple_bind_add_seq (bind, dseq);
8576
8577 pop_gimplify_context (bind);
8578
8579 gimple_bind_append_vars (bind, ctx->block_vars);
8580 BLOCK_VARS (block) = ctx->block_vars;
8581 }
8582
8583
8584 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8585
8586 static void
8587 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8588 omp_context *ctx)
8589 {
8590 struct omp_for_data fd;
8591 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8592 return;
8593
8594 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8595 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8596 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8597 if (!fd.ordered)
8598 return;
8599
8600 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8601 tree c = gimple_omp_ordered_clauses (ord_stmt);
8602 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8603 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8604 {
8605 /* Merge depend clauses from multiple adjacent
8606 #pragma omp ordered depend(sink:...) constructs
8607 into one #pragma omp ordered depend(sink:...), so that
8608 we can optimize them together. */
8609 gimple_stmt_iterator gsi = *gsi_p;
8610 gsi_next (&gsi);
8611 while (!gsi_end_p (gsi))
8612 {
8613 gimple *stmt = gsi_stmt (gsi);
8614 if (is_gimple_debug (stmt)
8615 || gimple_code (stmt) == GIMPLE_NOP)
8616 {
8617 gsi_next (&gsi);
8618 continue;
8619 }
8620 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8621 break;
8622 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8623 c = gimple_omp_ordered_clauses (ord_stmt2);
8624 if (c == NULL_TREE
8625 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8626 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8627 break;
8628 while (*list_p)
8629 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8630 *list_p = c;
8631 gsi_remove (&gsi, true);
8632 }
8633 }
8634
8635 /* Canonicalize sink dependence clauses into one folded clause if
8636 possible.
8637
8638 The basic algorithm is to create a sink vector whose first
8639 element is the GCD of all the first elements, and whose remaining
8640 elements are the minimum of the subsequent columns.
8641
8642 We ignore dependence vectors whose first element is zero because
8643 such dependencies are known to be executed by the same thread.
8644
8645 We take into account the direction of the loop, so a minimum
8646 becomes a maximum if the loop is iterating forwards. We also
8647 ignore sink clauses where the loop direction is unknown, or where
8648 the offsets are clearly invalid because they are not a multiple
8649 of the loop increment.
8650
8651 For example:
8652
8653 #pragma omp for ordered(2)
8654 for (i=0; i < N; ++i)
8655 for (j=0; j < M; ++j)
8656 {
8657 #pragma omp ordered \
8658 depend(sink:i-8,j-2) \
8659 depend(sink:i,j-1) \ // Completely ignored because i+0.
8660 depend(sink:i-4,j-3) \
8661 depend(sink:i-6,j-4)
8662 #pragma omp ordered depend(source)
8663 }
8664
8665 Folded clause is:
8666
8667 depend(sink:-gcd(8,4,6),-min(2,3,4))
8668 -or-
8669 depend(sink:-2,-2)
8670 */
8671
8672 /* FIXME: Computing GCD's where the first element is zero is
8673 non-trivial in the presence of collapsed loops. Do this later. */
8674 if (fd.collapse > 1)
8675 return;
8676
8677 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8678
8679 /* wide_int is not a POD so it must be default-constructed. */
8680 for (unsigned i = 0; i != 2 * len - 1; ++i)
8681 new (static_cast<void*>(folded_deps + i)) wide_int ();
8682
8683 tree folded_dep = NULL_TREE;
8684 /* TRUE if the first dimension's offset is negative. */
8685 bool neg_offset_p = false;
8686
8687 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8688 unsigned int i;
8689 while ((c = *list_p) != NULL)
8690 {
8691 bool remove = false;
8692
8693 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8694 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8695 goto next_ordered_clause;
8696
8697 tree vec;
8698 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8699 vec && TREE_CODE (vec) == TREE_LIST;
8700 vec = TREE_CHAIN (vec), ++i)
8701 {
8702 gcc_assert (i < len);
8703
8704 /* omp_extract_for_data has canonicalized the condition. */
8705 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8706 || fd.loops[i].cond_code == GT_EXPR);
8707 bool forward = fd.loops[i].cond_code == LT_EXPR;
8708 bool maybe_lexically_later = true;
8709
8710 /* While the committee makes up its mind, bail if we have any
8711 non-constant steps. */
8712 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8713 goto lower_omp_ordered_ret;
8714
8715 tree itype = TREE_TYPE (TREE_VALUE (vec));
8716 if (POINTER_TYPE_P (itype))
8717 itype = sizetype;
8718 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8719 TYPE_PRECISION (itype),
8720 TYPE_SIGN (itype));
8721
8722 /* Ignore invalid offsets that are not multiples of the step. */
8723 if (!wi::multiple_of_p (wi::abs (offset),
8724 wi::abs (wi::to_wide (fd.loops[i].step)),
8725 UNSIGNED))
8726 {
8727 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8728 "ignoring sink clause with offset that is not "
8729 "a multiple of the loop step");
8730 remove = true;
8731 goto next_ordered_clause;
8732 }
8733
8734 /* Calculate the first dimension. The first dimension of
8735 the folded dependency vector is the GCD of the first
8736 elements, while ignoring any first elements whose offset
8737 is 0. */
8738 if (i == 0)
8739 {
8740 /* Ignore dependence vectors whose first dimension is 0. */
8741 if (offset == 0)
8742 {
8743 remove = true;
8744 goto next_ordered_clause;
8745 }
8746 else
8747 {
8748 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8749 {
8750 error_at (OMP_CLAUSE_LOCATION (c),
8751 "first offset must be in opposite direction "
8752 "of loop iterations");
8753 goto lower_omp_ordered_ret;
8754 }
8755 if (forward)
8756 offset = -offset;
8757 neg_offset_p = forward;
8758 /* Initialize the first time around. */
8759 if (folded_dep == NULL_TREE)
8760 {
8761 folded_dep = c;
8762 folded_deps[0] = offset;
8763 }
8764 else
8765 folded_deps[0] = wi::gcd (folded_deps[0],
8766 offset, UNSIGNED);
8767 }
8768 }
8769 /* Calculate minimum for the remaining dimensions. */
8770 else
8771 {
8772 folded_deps[len + i - 1] = offset;
8773 if (folded_dep == c)
8774 folded_deps[i] = offset;
8775 else if (maybe_lexically_later
8776 && !wi::eq_p (folded_deps[i], offset))
8777 {
8778 if (forward ^ wi::gts_p (folded_deps[i], offset))
8779 {
8780 unsigned int j;
8781 folded_dep = c;
8782 for (j = 1; j <= i; j++)
8783 folded_deps[j] = folded_deps[len + j - 1];
8784 }
8785 else
8786 maybe_lexically_later = false;
8787 }
8788 }
8789 }
8790 gcc_assert (i == len);
8791
8792 remove = true;
8793
8794 next_ordered_clause:
8795 if (remove)
8796 *list_p = OMP_CLAUSE_CHAIN (c);
8797 else
8798 list_p = &OMP_CLAUSE_CHAIN (c);
8799 }
8800
8801 if (folded_dep)
8802 {
8803 if (neg_offset_p)
8804 folded_deps[0] = -folded_deps[0];
8805
8806 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8807 if (POINTER_TYPE_P (itype))
8808 itype = sizetype;
8809
8810 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8811 = wide_int_to_tree (itype, folded_deps[0]);
8812 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8813 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8814 }
8815
8816 lower_omp_ordered_ret:
8817
8818 /* Ordered without clauses is #pragma omp threads, while we want
8819 a nop instead if we remove all clauses. */
8820 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8821 gsi_replace (gsi_p, gimple_build_nop (), true);
8822 }
8823
8824
8825 /* Expand code for an OpenMP ordered directive. */
8826
8827 static void
8828 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8829 {
8830 tree block;
8831 gimple *stmt = gsi_stmt (*gsi_p), *g;
8832 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8833 gcall *x;
8834 gbind *bind;
8835 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8836 OMP_CLAUSE_SIMD);
8837 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8838 loop. */
8839 bool maybe_simt
8840 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8841 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8842 OMP_CLAUSE_THREADS);
8843
8844 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8845 OMP_CLAUSE_DEPEND))
8846 {
8847 /* FIXME: This is needs to be moved to the expansion to verify various
8848 conditions only testable on cfg with dominators computed, and also
8849 all the depend clauses to be merged still might need to be available
8850 for the runtime checks. */
8851 if (0)
8852 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8853 return;
8854 }
8855
8856 push_gimplify_context ();
8857
8858 block = make_node (BLOCK);
8859 bind = gimple_build_bind (NULL, NULL, block);
8860 gsi_replace (gsi_p, bind, true);
8861 gimple_bind_add_stmt (bind, stmt);
8862
8863 if (simd)
8864 {
8865 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8866 build_int_cst (NULL_TREE, threads));
8867 cfun->has_simduid_loops = true;
8868 }
8869 else
8870 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8871 0);
8872 gimple_bind_add_stmt (bind, x);
8873
8874 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8875 if (maybe_simt)
8876 {
8877 counter = create_tmp_var (integer_type_node);
8878 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8879 gimple_call_set_lhs (g, counter);
8880 gimple_bind_add_stmt (bind, g);
8881
8882 body = create_artificial_label (UNKNOWN_LOCATION);
8883 test = create_artificial_label (UNKNOWN_LOCATION);
8884 gimple_bind_add_stmt (bind, gimple_build_label (body));
8885
8886 tree simt_pred = create_tmp_var (integer_type_node);
8887 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8888 gimple_call_set_lhs (g, simt_pred);
8889 gimple_bind_add_stmt (bind, g);
8890
8891 tree t = create_artificial_label (UNKNOWN_LOCATION);
8892 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8893 gimple_bind_add_stmt (bind, g);
8894
8895 gimple_bind_add_stmt (bind, gimple_build_label (t));
8896 }
8897 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8898 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8899 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8900 gimple_omp_set_body (stmt, NULL);
8901
8902 if (maybe_simt)
8903 {
8904 gimple_bind_add_stmt (bind, gimple_build_label (test));
8905 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8906 gimple_bind_add_stmt (bind, g);
8907
8908 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8909 tree nonneg = create_tmp_var (integer_type_node);
8910 gimple_seq tseq = NULL;
8911 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8912 gimple_bind_add_seq (bind, tseq);
8913
8914 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8915 gimple_call_set_lhs (g, nonneg);
8916 gimple_bind_add_stmt (bind, g);
8917
8918 tree end = create_artificial_label (UNKNOWN_LOCATION);
8919 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8920 gimple_bind_add_stmt (bind, g);
8921
8922 gimple_bind_add_stmt (bind, gimple_build_label (end));
8923 }
8924 if (simd)
8925 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8926 build_int_cst (NULL_TREE, threads));
8927 else
8928 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8929 0);
8930 gimple_bind_add_stmt (bind, x);
8931
8932 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8933
8934 pop_gimplify_context (bind);
8935
8936 gimple_bind_append_vars (bind, ctx->block_vars);
8937 BLOCK_VARS (block) = gimple_bind_vars (bind);
8938 }
8939
8940
8941 /* Expand code for an OpenMP scan directive and the structured block
8942 before the scan directive. */
8943
8944 static void
8945 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8946 {
8947 gimple *stmt = gsi_stmt (*gsi_p);
8948 bool has_clauses
8949 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8950 tree lane = NULL_TREE;
8951 gimple_seq before = NULL;
8952 omp_context *octx = ctx->outer;
8953 gcc_assert (octx);
8954 if (octx->scan_exclusive && !has_clauses)
8955 {
8956 gimple_stmt_iterator gsi2 = *gsi_p;
8957 gsi_next (&gsi2);
8958 gimple *stmt2 = gsi_stmt (gsi2);
8959 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8960 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8961 the one with exclusive clause(s), comes first. */
8962 if (stmt2
8963 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8964 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8965 {
8966 gsi_remove (gsi_p, false);
8967 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8968 ctx = maybe_lookup_ctx (stmt2);
8969 gcc_assert (ctx);
8970 lower_omp_scan (gsi_p, ctx);
8971 return;
8972 }
8973 }
8974
8975 bool input_phase = has_clauses ^ octx->scan_inclusive;
8976 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8977 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
8978 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8979 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
8980 && !gimple_omp_for_combined_p (octx->stmt));
8981 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
8982 if (is_for_simd && octx->for_simd_scan_phase)
8983 is_simd = false;
8984 if (is_simd)
8985 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
8986 OMP_CLAUSE__SIMDUID_))
8987 {
8988 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
8989 lane = create_tmp_var (unsigned_type_node);
8990 tree t = build_int_cst (integer_type_node,
8991 input_phase ? 1
8992 : octx->scan_inclusive ? 2 : 3);
8993 gimple *g
8994 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
8995 gimple_call_set_lhs (g, lane);
8996 gimple_seq_add_stmt (&before, g);
8997 }
8998
8999 if (is_simd || is_for)
9000 {
9001 for (tree c = gimple_omp_for_clauses (octx->stmt);
9002 c; c = OMP_CLAUSE_CHAIN (c))
9003 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9004 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9005 {
9006 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9007 tree var = OMP_CLAUSE_DECL (c);
9008 tree new_var = lookup_decl (var, octx);
9009 tree val = new_var;
9010 tree var2 = NULL_TREE;
9011 tree var3 = NULL_TREE;
9012 tree var4 = NULL_TREE;
9013 tree lane0 = NULL_TREE;
9014 tree new_vard = new_var;
9015 if (omp_is_reference (var))
9016 {
9017 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9018 val = new_var;
9019 }
9020 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9021 {
9022 val = DECL_VALUE_EXPR (new_vard);
9023 if (new_vard != new_var)
9024 {
9025 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9026 val = TREE_OPERAND (val, 0);
9027 }
9028 if (TREE_CODE (val) == ARRAY_REF
9029 && VAR_P (TREE_OPERAND (val, 0)))
9030 {
9031 tree v = TREE_OPERAND (val, 0);
9032 if (lookup_attribute ("omp simd array",
9033 DECL_ATTRIBUTES (v)))
9034 {
9035 val = unshare_expr (val);
9036 lane0 = TREE_OPERAND (val, 1);
9037 TREE_OPERAND (val, 1) = lane;
9038 var2 = lookup_decl (v, octx);
9039 if (octx->scan_exclusive)
9040 var4 = lookup_decl (var2, octx);
9041 if (input_phase
9042 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9043 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9044 if (!input_phase)
9045 {
9046 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9047 var2, lane, NULL_TREE, NULL_TREE);
9048 TREE_THIS_NOTRAP (var2) = 1;
9049 if (octx->scan_exclusive)
9050 {
9051 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9052 var4, lane, NULL_TREE,
9053 NULL_TREE);
9054 TREE_THIS_NOTRAP (var4) = 1;
9055 }
9056 }
9057 else
9058 var2 = val;
9059 }
9060 }
9061 gcc_assert (var2);
9062 }
9063 else
9064 {
9065 var2 = build_outer_var_ref (var, octx);
9066 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9067 {
9068 var3 = maybe_lookup_decl (new_vard, octx);
9069 if (var3 == new_vard || var3 == NULL_TREE)
9070 var3 = NULL_TREE;
9071 else if (is_simd && octx->scan_exclusive && !input_phase)
9072 {
9073 var4 = maybe_lookup_decl (var3, octx);
9074 if (var4 == var3 || var4 == NULL_TREE)
9075 {
9076 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9077 {
9078 var4 = var3;
9079 var3 = NULL_TREE;
9080 }
9081 else
9082 var4 = NULL_TREE;
9083 }
9084 }
9085 }
9086 if (is_simd
9087 && octx->scan_exclusive
9088 && !input_phase
9089 && var4 == NULL_TREE)
9090 var4 = create_tmp_var (TREE_TYPE (val));
9091 }
9092 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9093 {
9094 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9095 if (input_phase)
9096 {
9097 if (var3)
9098 {
9099 /* If we've added a separate identity element
9100 variable, copy it over into val. */
9101 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9102 var3);
9103 gimplify_and_add (x, &before);
9104 }
9105 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9106 {
9107 /* Otherwise, assign to it the identity element. */
9108 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9109 if (is_for)
9110 tseq = copy_gimple_seq_and_replace_locals (tseq);
9111 tree ref = build_outer_var_ref (var, octx);
9112 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9113 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9114 if (x)
9115 {
9116 if (new_vard != new_var)
9117 val = build_fold_addr_expr_loc (clause_loc, val);
9118 SET_DECL_VALUE_EXPR (new_vard, val);
9119 }
9120 SET_DECL_VALUE_EXPR (placeholder, ref);
9121 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9122 lower_omp (&tseq, octx);
9123 if (x)
9124 SET_DECL_VALUE_EXPR (new_vard, x);
9125 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9126 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9127 gimple_seq_add_seq (&before, tseq);
9128 if (is_simd)
9129 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9130 }
9131 }
9132 else if (is_simd)
9133 {
9134 tree x;
9135 if (octx->scan_exclusive)
9136 {
9137 tree v4 = unshare_expr (var4);
9138 tree v2 = unshare_expr (var2);
9139 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9140 gimplify_and_add (x, &before);
9141 }
9142 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9143 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9144 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9145 tree vexpr = val;
9146 if (x && new_vard != new_var)
9147 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9148 if (x)
9149 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9150 SET_DECL_VALUE_EXPR (placeholder, var2);
9151 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9152 lower_omp (&tseq, octx);
9153 gimple_seq_add_seq (&before, tseq);
9154 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9155 if (x)
9156 SET_DECL_VALUE_EXPR (new_vard, x);
9157 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9158 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9159 if (octx->scan_inclusive)
9160 {
9161 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9162 var2);
9163 gimplify_and_add (x, &before);
9164 }
9165 else if (lane0 == NULL_TREE)
9166 {
9167 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9168 var4);
9169 gimplify_and_add (x, &before);
9170 }
9171 }
9172 }
9173 else
9174 {
9175 if (input_phase)
9176 {
9177 /* input phase. Set val to initializer before
9178 the body. */
9179 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9180 gimplify_assign (val, x, &before);
9181 }
9182 else if (is_simd)
9183 {
9184 /* scan phase. */
9185 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9186 if (code == MINUS_EXPR)
9187 code = PLUS_EXPR;
9188
9189 tree x = build2 (code, TREE_TYPE (var2),
9190 unshare_expr (var2), unshare_expr (val));
9191 if (octx->scan_inclusive)
9192 {
9193 gimplify_assign (unshare_expr (var2), x, &before);
9194 gimplify_assign (val, var2, &before);
9195 }
9196 else
9197 {
9198 gimplify_assign (unshare_expr (var4),
9199 unshare_expr (var2), &before);
9200 gimplify_assign (var2, x, &before);
9201 if (lane0 == NULL_TREE)
9202 gimplify_assign (val, var4, &before);
9203 }
9204 }
9205 }
9206 if (octx->scan_exclusive && !input_phase && lane0)
9207 {
9208 tree vexpr = unshare_expr (var4);
9209 TREE_OPERAND (vexpr, 1) = lane0;
9210 if (new_vard != new_var)
9211 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9212 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9213 }
9214 }
9215 }
9216 if (is_simd && !is_for_simd)
9217 {
9218 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9219 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9220 gsi_replace (gsi_p, gimple_build_nop (), true);
9221 return;
9222 }
9223 lower_omp (gimple_omp_body_ptr (stmt), octx);
9224 if (before)
9225 {
9226 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9227 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9228 }
9229 }
9230
9231
9232 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9233 substitution of a couple of function calls. But in the NAMED case,
9234 requires that languages coordinate a symbol name. It is therefore
9235 best put here in common code. */
9236
9237 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9238
9239 static void
9240 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9241 {
9242 tree block;
9243 tree name, lock, unlock;
9244 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9245 gbind *bind;
9246 location_t loc = gimple_location (stmt);
9247 gimple_seq tbody;
9248
9249 name = gimple_omp_critical_name (stmt);
9250 if (name)
9251 {
9252 tree decl;
9253
9254 if (!critical_name_mutexes)
9255 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9256
9257 tree *n = critical_name_mutexes->get (name);
9258 if (n == NULL)
9259 {
9260 char *new_str;
9261
9262 decl = create_tmp_var_raw (ptr_type_node);
9263
9264 new_str = ACONCAT ((".gomp_critical_user_",
9265 IDENTIFIER_POINTER (name), NULL));
9266 DECL_NAME (decl) = get_identifier (new_str);
9267 TREE_PUBLIC (decl) = 1;
9268 TREE_STATIC (decl) = 1;
9269 DECL_COMMON (decl) = 1;
9270 DECL_ARTIFICIAL (decl) = 1;
9271 DECL_IGNORED_P (decl) = 1;
9272
9273 varpool_node::finalize_decl (decl);
9274
9275 critical_name_mutexes->put (name, decl);
9276 }
9277 else
9278 decl = *n;
9279
9280 /* If '#pragma omp critical' is inside offloaded region or
9281 inside function marked as offloadable, the symbol must be
9282 marked as offloadable too. */
9283 omp_context *octx;
9284 if (cgraph_node::get (current_function_decl)->offloadable)
9285 varpool_node::get_create (decl)->offloadable = 1;
9286 else
9287 for (octx = ctx->outer; octx; octx = octx->outer)
9288 if (is_gimple_omp_offloaded (octx->stmt))
9289 {
9290 varpool_node::get_create (decl)->offloadable = 1;
9291 break;
9292 }
9293
9294 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9295 lock = build_call_expr_loc (loc, lock, 1,
9296 build_fold_addr_expr_loc (loc, decl));
9297
9298 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9299 unlock = build_call_expr_loc (loc, unlock, 1,
9300 build_fold_addr_expr_loc (loc, decl));
9301 }
9302 else
9303 {
9304 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9305 lock = build_call_expr_loc (loc, lock, 0);
9306
9307 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9308 unlock = build_call_expr_loc (loc, unlock, 0);
9309 }
9310
9311 push_gimplify_context ();
9312
9313 block = make_node (BLOCK);
9314 bind = gimple_build_bind (NULL, NULL, block);
9315 gsi_replace (gsi_p, bind, true);
9316 gimple_bind_add_stmt (bind, stmt);
9317
9318 tbody = gimple_bind_body (bind);
9319 gimplify_and_add (lock, &tbody);
9320 gimple_bind_set_body (bind, tbody);
9321
9322 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9323 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9324 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9325 gimple_omp_set_body (stmt, NULL);
9326
9327 tbody = gimple_bind_body (bind);
9328 gimplify_and_add (unlock, &tbody);
9329 gimple_bind_set_body (bind, tbody);
9330
9331 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9332
9333 pop_gimplify_context (bind);
9334 gimple_bind_append_vars (bind, ctx->block_vars);
9335 BLOCK_VARS (block) = gimple_bind_vars (bind);
9336 }
9337
9338 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9339 for a lastprivate clause. Given a loop control predicate of (V
9340 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9341 is appended to *DLIST, iterator initialization is appended to
9342 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9343 to be emitted in a critical section. */
9344
9345 static void
9346 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9347 gimple_seq *dlist, gimple_seq *clist,
9348 struct omp_context *ctx)
9349 {
9350 tree clauses, cond, vinit;
9351 enum tree_code cond_code;
9352 gimple_seq stmts;
9353
9354 cond_code = fd->loop.cond_code;
9355 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9356
9357 /* When possible, use a strict equality expression. This can let VRP
9358 type optimizations deduce the value and remove a copy. */
9359 if (tree_fits_shwi_p (fd->loop.step))
9360 {
9361 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9362 if (step == 1 || step == -1)
9363 cond_code = EQ_EXPR;
9364 }
9365
9366 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9367 || gimple_omp_for_grid_phony (fd->for_stmt))
9368 cond = omp_grid_lastprivate_predicate (fd);
9369 else
9370 {
9371 tree n2 = fd->loop.n2;
9372 if (fd->collapse > 1
9373 && TREE_CODE (n2) != INTEGER_CST
9374 && gimple_omp_for_combined_into_p (fd->for_stmt))
9375 {
9376 struct omp_context *taskreg_ctx = NULL;
9377 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9378 {
9379 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9380 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9381 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9382 {
9383 if (gimple_omp_for_combined_into_p (gfor))
9384 {
9385 gcc_assert (ctx->outer->outer
9386 && is_parallel_ctx (ctx->outer->outer));
9387 taskreg_ctx = ctx->outer->outer;
9388 }
9389 else
9390 {
9391 struct omp_for_data outer_fd;
9392 omp_extract_for_data (gfor, &outer_fd, NULL);
9393 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9394 }
9395 }
9396 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9397 taskreg_ctx = ctx->outer->outer;
9398 }
9399 else if (is_taskreg_ctx (ctx->outer))
9400 taskreg_ctx = ctx->outer;
9401 if (taskreg_ctx)
9402 {
9403 int i;
9404 tree taskreg_clauses
9405 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9406 tree innerc = omp_find_clause (taskreg_clauses,
9407 OMP_CLAUSE__LOOPTEMP_);
9408 gcc_assert (innerc);
9409 for (i = 0; i < fd->collapse; i++)
9410 {
9411 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9412 OMP_CLAUSE__LOOPTEMP_);
9413 gcc_assert (innerc);
9414 }
9415 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9416 OMP_CLAUSE__LOOPTEMP_);
9417 if (innerc)
9418 n2 = fold_convert (TREE_TYPE (n2),
9419 lookup_decl (OMP_CLAUSE_DECL (innerc),
9420 taskreg_ctx));
9421 }
9422 }
9423 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9424 }
9425
9426 clauses = gimple_omp_for_clauses (fd->for_stmt);
9427 stmts = NULL;
9428 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9429 if (!gimple_seq_empty_p (stmts))
9430 {
9431 gimple_seq_add_seq (&stmts, *dlist);
9432 *dlist = stmts;
9433
9434 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9435 vinit = fd->loop.n1;
9436 if (cond_code == EQ_EXPR
9437 && tree_fits_shwi_p (fd->loop.n2)
9438 && ! integer_zerop (fd->loop.n2))
9439 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9440 else
9441 vinit = unshare_expr (vinit);
9442
9443 /* Initialize the iterator variable, so that threads that don't execute
9444 any iterations don't execute the lastprivate clauses by accident. */
9445 gimplify_assign (fd->loop.v, vinit, body_p);
9446 }
9447 }
9448
9449 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9450
9451 static tree
9452 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9453 struct walk_stmt_info *wi)
9454 {
9455 gimple *stmt = gsi_stmt (*gsi_p);
9456
9457 *handled_ops_p = true;
9458 switch (gimple_code (stmt))
9459 {
9460 WALK_SUBSTMTS;
9461
9462 case GIMPLE_OMP_FOR:
9463 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9464 && gimple_omp_for_combined_into_p (stmt))
9465 *handled_ops_p = false;
9466 break;
9467
9468 case GIMPLE_OMP_SCAN:
9469 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9470 return integer_zero_node;
9471 default:
9472 break;
9473 }
9474 return NULL;
9475 }
9476
9477 /* Helper function for lower_omp_for, add transformations for a worksharing
9478 loop with scan directives inside of it.
9479 For worksharing loop not combined with simd, transform:
9480 #pragma omp for reduction(inscan,+:r) private(i)
9481 for (i = 0; i < n; i = i + 1)
9482 {
9483 {
9484 update (r);
9485 }
9486 #pragma omp scan inclusive(r)
9487 {
9488 use (r);
9489 }
9490 }
9491
9492 into two worksharing loops + code to merge results:
9493
9494 num_threads = omp_get_num_threads ();
9495 thread_num = omp_get_thread_num ();
9496 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9497 <D.2099>:
9498 var2 = r;
9499 goto <D.2101>;
9500 <D.2100>:
9501 // For UDRs this is UDR init, or if ctors are needed, copy from
9502 // var3 that has been constructed to contain the neutral element.
9503 var2 = 0;
9504 <D.2101>:
9505 ivar = 0;
9506 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9507 // a shared array with num_threads elements and rprivb to a local array
9508 // number of elements equal to the number of (contiguous) iterations the
9509 // current thread will perform. controlb and controlp variables are
9510 // temporaries to handle deallocation of rprivb at the end of second
9511 // GOMP_FOR.
9512 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9513 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9514 for (i = 0; i < n; i = i + 1)
9515 {
9516 {
9517 // For UDRs this is UDR init or copy from var3.
9518 r = 0;
9519 // This is the input phase from user code.
9520 update (r);
9521 }
9522 {
9523 // For UDRs this is UDR merge.
9524 var2 = var2 + r;
9525 // Rather than handing it over to the user, save to local thread's
9526 // array.
9527 rprivb[ivar] = var2;
9528 // For exclusive scan, the above two statements are swapped.
9529 ivar = ivar + 1;
9530 }
9531 }
9532 // And remember the final value from this thread's into the shared
9533 // rpriva array.
9534 rpriva[(sizetype) thread_num] = var2;
9535 // If more than one thread, compute using Work-Efficient prefix sum
9536 // the inclusive parallel scan of the rpriva array.
9537 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9538 <D.2102>:
9539 GOMP_barrier ();
9540 down = 0;
9541 k = 1;
9542 num_threadsu = (unsigned int) num_threads;
9543 thread_numup1 = (unsigned int) thread_num + 1;
9544 <D.2108>:
9545 twok = k << 1;
9546 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9547 <D.2110>:
9548 down = 4294967295;
9549 k = k >> 1;
9550 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9551 <D.2112>:
9552 k = k >> 1;
9553 <D.2111>:
9554 twok = k << 1;
9555 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9556 mul = REALPART_EXPR <cplx>;
9557 ovf = IMAGPART_EXPR <cplx>;
9558 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9559 <D.2116>:
9560 andv = k & down;
9561 andvm1 = andv + 4294967295;
9562 l = mul + andvm1;
9563 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9564 <D.2120>:
9565 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9566 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9567 rpriva[l] = rpriva[l - k] + rpriva[l];
9568 <D.2117>:
9569 if (down == 0) goto <D.2121>; else goto <D.2122>;
9570 <D.2121>:
9571 k = k << 1;
9572 goto <D.2123>;
9573 <D.2122>:
9574 k = k >> 1;
9575 <D.2123>:
9576 GOMP_barrier ();
9577 if (k != 0) goto <D.2108>; else goto <D.2103>;
9578 <D.2103>:
9579 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9580 <D.2124>:
9581 // For UDRs this is UDR init or copy from var3.
9582 var2 = 0;
9583 goto <D.2126>;
9584 <D.2125>:
9585 var2 = rpriva[thread_num - 1];
9586 <D.2126>:
9587 ivar = 0;
9588 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9589 reduction(inscan,+:r) private(i)
9590 for (i = 0; i < n; i = i + 1)
9591 {
9592 {
9593 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9594 r = var2 + rprivb[ivar];
9595 }
9596 {
9597 // This is the scan phase from user code.
9598 use (r);
9599 // Plus a bump of the iterator.
9600 ivar = ivar + 1;
9601 }
9602 } */
9603
9604 static void
9605 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9606 struct omp_for_data *fd, omp_context *ctx)
9607 {
9608 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9609 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9610
9611 gimple_seq body = gimple_omp_body (stmt);
9612 gimple_stmt_iterator input1_gsi = gsi_none ();
9613 struct walk_stmt_info wi;
9614 memset (&wi, 0, sizeof (wi));
9615 wi.val_only = true;
9616 wi.info = (void *) &input1_gsi;
9617 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9618 gcc_assert (!gsi_end_p (input1_gsi));
9619
9620 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9621 gimple_stmt_iterator gsi = input1_gsi;
9622 gsi_next (&gsi);
9623 gimple_stmt_iterator scan1_gsi = gsi;
9624 gimple *scan_stmt1 = gsi_stmt (gsi);
9625 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9626
9627 gimple_seq input_body = gimple_omp_body (input_stmt1);
9628 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9629 gimple_omp_set_body (input_stmt1, NULL);
9630 gimple_omp_set_body (scan_stmt1, NULL);
9631 gimple_omp_set_body (stmt, NULL);
9632
9633 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9634 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9635 gimple_omp_set_body (stmt, body);
9636 gimple_omp_set_body (input_stmt1, input_body);
9637
9638 gimple_stmt_iterator input2_gsi = gsi_none ();
9639 memset (&wi, 0, sizeof (wi));
9640 wi.val_only = true;
9641 wi.info = (void *) &input2_gsi;
9642 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9643 gcc_assert (!gsi_end_p (input2_gsi));
9644
9645 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9646 gsi = input2_gsi;
9647 gsi_next (&gsi);
9648 gimple_stmt_iterator scan2_gsi = gsi;
9649 gimple *scan_stmt2 = gsi_stmt (gsi);
9650 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9651 gimple_omp_set_body (scan_stmt2, scan_body);
9652
9653 gimple_stmt_iterator input3_gsi = gsi_none ();
9654 gimple_stmt_iterator scan3_gsi = gsi_none ();
9655 gimple_stmt_iterator input4_gsi = gsi_none ();
9656 gimple_stmt_iterator scan4_gsi = gsi_none ();
9657 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9658 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9659 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9660 if (is_for_simd)
9661 {
9662 memset (&wi, 0, sizeof (wi));
9663 wi.val_only = true;
9664 wi.info = (void *) &input3_gsi;
9665 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9666 gcc_assert (!gsi_end_p (input3_gsi));
9667
9668 input_stmt3 = gsi_stmt (input3_gsi);
9669 gsi = input3_gsi;
9670 gsi_next (&gsi);
9671 scan3_gsi = gsi;
9672 scan_stmt3 = gsi_stmt (gsi);
9673 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9674
9675 memset (&wi, 0, sizeof (wi));
9676 wi.val_only = true;
9677 wi.info = (void *) &input4_gsi;
9678 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9679 gcc_assert (!gsi_end_p (input4_gsi));
9680
9681 input_stmt4 = gsi_stmt (input4_gsi);
9682 gsi = input4_gsi;
9683 gsi_next (&gsi);
9684 scan4_gsi = gsi;
9685 scan_stmt4 = gsi_stmt (gsi);
9686 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9687
9688 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9689 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9690 }
9691
9692 tree num_threads = create_tmp_var (integer_type_node);
9693 tree thread_num = create_tmp_var (integer_type_node);
9694 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9695 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9696 gimple *g = gimple_build_call (nthreads_decl, 0);
9697 gimple_call_set_lhs (g, num_threads);
9698 gimple_seq_add_stmt (body_p, g);
9699 g = gimple_build_call (threadnum_decl, 0);
9700 gimple_call_set_lhs (g, thread_num);
9701 gimple_seq_add_stmt (body_p, g);
9702
9703 tree ivar = create_tmp_var (sizetype);
9704 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9705 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9706 tree k = create_tmp_var (unsigned_type_node);
9707 tree l = create_tmp_var (unsigned_type_node);
9708
9709 gimple_seq clist = NULL, mdlist = NULL;
9710 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9711 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9712 gimple_seq scan1_list = NULL, input2_list = NULL;
9713 gimple_seq last_list = NULL, reduc_list = NULL;
9714 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9715 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9716 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9717 {
9718 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9719 tree var = OMP_CLAUSE_DECL (c);
9720 tree new_var = lookup_decl (var, ctx);
9721 tree var3 = NULL_TREE;
9722 tree new_vard = new_var;
9723 if (omp_is_reference (var))
9724 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9725 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9726 {
9727 var3 = maybe_lookup_decl (new_vard, ctx);
9728 if (var3 == new_vard)
9729 var3 = NULL_TREE;
9730 }
9731
9732 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9733 tree rpriva = create_tmp_var (ptype);
9734 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9735 OMP_CLAUSE_DECL (nc) = rpriva;
9736 *cp1 = nc;
9737 cp1 = &OMP_CLAUSE_CHAIN (nc);
9738
9739 tree rprivb = create_tmp_var (ptype);
9740 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9741 OMP_CLAUSE_DECL (nc) = rprivb;
9742 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9743 *cp1 = nc;
9744 cp1 = &OMP_CLAUSE_CHAIN (nc);
9745
9746 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9747 if (new_vard != new_var)
9748 TREE_ADDRESSABLE (var2) = 1;
9749 gimple_add_tmp_var (var2);
9750
9751 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9752 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9753 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9754 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9755 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9756
9757 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9758 thread_num, integer_minus_one_node);
9759 x = fold_convert_loc (clause_loc, sizetype, x);
9760 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9761 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9762 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9763 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9764
9765 x = fold_convert_loc (clause_loc, sizetype, l);
9766 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9767 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9768 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9769 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9770
9771 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9772 x = fold_convert_loc (clause_loc, sizetype, x);
9773 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9774 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9775 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9776 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9777
9778 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9779 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9780 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9781 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9782
9783 tree var4 = is_for_simd ? new_var : var2;
9784 tree var5 = NULL_TREE, var6 = NULL_TREE;
9785 if (is_for_simd)
9786 {
9787 var5 = lookup_decl (var, input_simd_ctx);
9788 var6 = lookup_decl (var, scan_simd_ctx);
9789 if (new_vard != new_var)
9790 {
9791 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9792 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9793 }
9794 }
9795 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9796 {
9797 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9798 tree val = var2;
9799
9800 x = lang_hooks.decls.omp_clause_default_ctor
9801 (c, var2, build_outer_var_ref (var, ctx));
9802 if (x)
9803 gimplify_and_add (x, &clist);
9804
9805 x = build_outer_var_ref (var, ctx);
9806 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9807 x);
9808 gimplify_and_add (x, &thr01_list);
9809
9810 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9811 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9812 if (var3)
9813 {
9814 x = unshare_expr (var4);
9815 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9816 gimplify_and_add (x, &thrn1_list);
9817 x = unshare_expr (var4);
9818 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9819 gimplify_and_add (x, &thr02_list);
9820 }
9821 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9822 {
9823 /* Otherwise, assign to it the identity element. */
9824 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9825 tseq = copy_gimple_seq_and_replace_locals (tseq);
9826 if (!is_for_simd)
9827 {
9828 if (new_vard != new_var)
9829 val = build_fold_addr_expr_loc (clause_loc, val);
9830 SET_DECL_VALUE_EXPR (new_vard, val);
9831 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9832 }
9833 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9834 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9835 lower_omp (&tseq, ctx);
9836 gimple_seq_add_seq (&thrn1_list, tseq);
9837 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9838 lower_omp (&tseq, ctx);
9839 gimple_seq_add_seq (&thr02_list, tseq);
9840 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9841 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9842 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9843 if (y)
9844 SET_DECL_VALUE_EXPR (new_vard, y);
9845 else
9846 {
9847 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9848 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9849 }
9850 }
9851
9852 x = unshare_expr (var4);
9853 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9854 gimplify_and_add (x, &thrn2_list);
9855
9856 if (is_for_simd)
9857 {
9858 x = unshare_expr (rprivb_ref);
9859 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9860 gimplify_and_add (x, &scan1_list);
9861 }
9862 else
9863 {
9864 if (ctx->scan_exclusive)
9865 {
9866 x = unshare_expr (rprivb_ref);
9867 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9868 gimplify_and_add (x, &scan1_list);
9869 }
9870
9871 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9872 tseq = copy_gimple_seq_and_replace_locals (tseq);
9873 SET_DECL_VALUE_EXPR (placeholder, var2);
9874 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9875 lower_omp (&tseq, ctx);
9876 gimple_seq_add_seq (&scan1_list, tseq);
9877
9878 if (ctx->scan_inclusive)
9879 {
9880 x = unshare_expr (rprivb_ref);
9881 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9882 gimplify_and_add (x, &scan1_list);
9883 }
9884 }
9885
9886 x = unshare_expr (rpriva_ref);
9887 x = lang_hooks.decls.omp_clause_assign_op (c, x,
9888 unshare_expr (var4));
9889 gimplify_and_add (x, &mdlist);
9890
9891 x = unshare_expr (is_for_simd ? var6 : new_var);
9892 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
9893 gimplify_and_add (x, &input2_list);
9894
9895 val = rprivb_ref;
9896 if (new_vard != new_var)
9897 val = build_fold_addr_expr_loc (clause_loc, val);
9898
9899 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9900 tseq = copy_gimple_seq_and_replace_locals (tseq);
9901 SET_DECL_VALUE_EXPR (new_vard, val);
9902 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9903 if (is_for_simd)
9904 {
9905 SET_DECL_VALUE_EXPR (placeholder, var6);
9906 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9907 }
9908 else
9909 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9910 lower_omp (&tseq, ctx);
9911 if (y)
9912 SET_DECL_VALUE_EXPR (new_vard, y);
9913 else
9914 {
9915 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9916 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9917 }
9918 if (!is_for_simd)
9919 {
9920 SET_DECL_VALUE_EXPR (placeholder, new_var);
9921 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9922 lower_omp (&tseq, ctx);
9923 }
9924 gimple_seq_add_seq (&input2_list, tseq);
9925
9926 x = build_outer_var_ref (var, ctx);
9927 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9928 gimplify_and_add (x, &last_list);
9929
9930 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9931 gimplify_and_add (x, &reduc_list);
9932 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9933 tseq = copy_gimple_seq_and_replace_locals (tseq);
9934 val = rprival_ref;
9935 if (new_vard != new_var)
9936 val = build_fold_addr_expr_loc (clause_loc, val);
9937 SET_DECL_VALUE_EXPR (new_vard, val);
9938 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9939 SET_DECL_VALUE_EXPR (placeholder, var2);
9940 lower_omp (&tseq, ctx);
9941 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9942 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9943 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9944 if (y)
9945 SET_DECL_VALUE_EXPR (new_vard, y);
9946 else
9947 {
9948 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9949 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9950 }
9951 gimple_seq_add_seq (&reduc_list, tseq);
9952 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9953 gimplify_and_add (x, &reduc_list);
9954
9955 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9956 if (x)
9957 gimplify_and_add (x, dlist);
9958 }
9959 else
9960 {
9961 x = build_outer_var_ref (var, ctx);
9962 gimplify_assign (unshare_expr (var4), x, &thr01_list);
9963
9964 x = omp_reduction_init (c, TREE_TYPE (new_var));
9965 gimplify_assign (unshare_expr (var4), unshare_expr (x),
9966 &thrn1_list);
9967 gimplify_assign (unshare_expr (var4), x, &thr02_list);
9968
9969 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
9970
9971 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9972 if (code == MINUS_EXPR)
9973 code = PLUS_EXPR;
9974
9975 if (is_for_simd)
9976 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
9977 else
9978 {
9979 if (ctx->scan_exclusive)
9980 gimplify_assign (unshare_expr (rprivb_ref), var2,
9981 &scan1_list);
9982 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
9983 gimplify_assign (var2, x, &scan1_list);
9984 if (ctx->scan_inclusive)
9985 gimplify_assign (unshare_expr (rprivb_ref), var2,
9986 &scan1_list);
9987 }
9988
9989 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
9990 &mdlist);
9991
9992 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
9993 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
9994
9995 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
9996 &last_list);
9997
9998 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
9999 unshare_expr (rprival_ref));
10000 gimplify_assign (rprival_ref, x, &reduc_list);
10001 }
10002 }
10003
10004 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10005 gimple_seq_add_stmt (&scan1_list, g);
10006 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10007 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10008 ? scan_stmt4 : scan_stmt2), g);
10009
10010 tree controlb = create_tmp_var (boolean_type_node);
10011 tree controlp = create_tmp_var (ptr_type_node);
10012 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10013 OMP_CLAUSE_DECL (nc) = controlb;
10014 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10015 *cp1 = nc;
10016 cp1 = &OMP_CLAUSE_CHAIN (nc);
10017 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10018 OMP_CLAUSE_DECL (nc) = controlp;
10019 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10020 *cp1 = nc;
10021 cp1 = &OMP_CLAUSE_CHAIN (nc);
10022 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10023 OMP_CLAUSE_DECL (nc) = controlb;
10024 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10025 *cp2 = nc;
10026 cp2 = &OMP_CLAUSE_CHAIN (nc);
10027 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10028 OMP_CLAUSE_DECL (nc) = controlp;
10029 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10030 *cp2 = nc;
10031 cp2 = &OMP_CLAUSE_CHAIN (nc);
10032
10033 *cp1 = gimple_omp_for_clauses (stmt);
10034 gimple_omp_for_set_clauses (stmt, new_clauses1);
10035 *cp2 = gimple_omp_for_clauses (new_stmt);
10036 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10037
10038 if (is_for_simd)
10039 {
10040 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10041 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10042
10043 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10044 GSI_SAME_STMT);
10045 gsi_remove (&input3_gsi, true);
10046 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10047 GSI_SAME_STMT);
10048 gsi_remove (&scan3_gsi, true);
10049 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10050 GSI_SAME_STMT);
10051 gsi_remove (&input4_gsi, true);
10052 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10053 GSI_SAME_STMT);
10054 gsi_remove (&scan4_gsi, true);
10055 }
10056 else
10057 {
10058 gimple_omp_set_body (scan_stmt1, scan1_list);
10059 gimple_omp_set_body (input_stmt2, input2_list);
10060 }
10061
10062 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10063 GSI_SAME_STMT);
10064 gsi_remove (&input1_gsi, true);
10065 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10066 GSI_SAME_STMT);
10067 gsi_remove (&scan1_gsi, true);
10068 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10069 GSI_SAME_STMT);
10070 gsi_remove (&input2_gsi, true);
10071 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10072 GSI_SAME_STMT);
10073 gsi_remove (&scan2_gsi, true);
10074
10075 gimple_seq_add_seq (body_p, clist);
10076
10077 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10078 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10079 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10080 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10081 gimple_seq_add_stmt (body_p, g);
10082 g = gimple_build_label (lab1);
10083 gimple_seq_add_stmt (body_p, g);
10084 gimple_seq_add_seq (body_p, thr01_list);
10085 g = gimple_build_goto (lab3);
10086 gimple_seq_add_stmt (body_p, g);
10087 g = gimple_build_label (lab2);
10088 gimple_seq_add_stmt (body_p, g);
10089 gimple_seq_add_seq (body_p, thrn1_list);
10090 g = gimple_build_label (lab3);
10091 gimple_seq_add_stmt (body_p, g);
10092
10093 g = gimple_build_assign (ivar, size_zero_node);
10094 gimple_seq_add_stmt (body_p, g);
10095
10096 gimple_seq_add_stmt (body_p, stmt);
10097 gimple_seq_add_seq (body_p, body);
10098 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10099 fd->loop.v));
10100
10101 g = gimple_build_omp_return (true);
10102 gimple_seq_add_stmt (body_p, g);
10103 gimple_seq_add_seq (body_p, mdlist);
10104
10105 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10106 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10107 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10108 gimple_seq_add_stmt (body_p, g);
10109 g = gimple_build_label (lab1);
10110 gimple_seq_add_stmt (body_p, g);
10111
10112 g = omp_build_barrier (NULL);
10113 gimple_seq_add_stmt (body_p, g);
10114
10115 tree down = create_tmp_var (unsigned_type_node);
10116 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10117 gimple_seq_add_stmt (body_p, g);
10118
10119 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10120 gimple_seq_add_stmt (body_p, g);
10121
10122 tree num_threadsu = create_tmp_var (unsigned_type_node);
10123 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10124 gimple_seq_add_stmt (body_p, g);
10125
10126 tree thread_numu = create_tmp_var (unsigned_type_node);
10127 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10128 gimple_seq_add_stmt (body_p, g);
10129
10130 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10131 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10132 build_int_cst (unsigned_type_node, 1));
10133 gimple_seq_add_stmt (body_p, g);
10134
10135 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10136 g = gimple_build_label (lab3);
10137 gimple_seq_add_stmt (body_p, g);
10138
10139 tree twok = create_tmp_var (unsigned_type_node);
10140 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10141 gimple_seq_add_stmt (body_p, g);
10142
10143 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10144 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10145 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10146 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10147 gimple_seq_add_stmt (body_p, g);
10148 g = gimple_build_label (lab4);
10149 gimple_seq_add_stmt (body_p, g);
10150 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10151 gimple_seq_add_stmt (body_p, g);
10152 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10153 gimple_seq_add_stmt (body_p, g);
10154
10155 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10156 gimple_seq_add_stmt (body_p, g);
10157 g = gimple_build_label (lab6);
10158 gimple_seq_add_stmt (body_p, g);
10159
10160 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10161 gimple_seq_add_stmt (body_p, g);
10162
10163 g = gimple_build_label (lab5);
10164 gimple_seq_add_stmt (body_p, g);
10165
10166 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10167 gimple_seq_add_stmt (body_p, g);
10168
10169 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10170 DECL_GIMPLE_REG_P (cplx) = 1;
10171 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10172 gimple_call_set_lhs (g, cplx);
10173 gimple_seq_add_stmt (body_p, g);
10174 tree mul = create_tmp_var (unsigned_type_node);
10175 g = gimple_build_assign (mul, REALPART_EXPR,
10176 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10177 gimple_seq_add_stmt (body_p, g);
10178 tree ovf = create_tmp_var (unsigned_type_node);
10179 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10180 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10181 gimple_seq_add_stmt (body_p, g);
10182
10183 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10184 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10185 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10186 lab7, lab8);
10187 gimple_seq_add_stmt (body_p, g);
10188 g = gimple_build_label (lab7);
10189 gimple_seq_add_stmt (body_p, g);
10190
10191 tree andv = create_tmp_var (unsigned_type_node);
10192 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10193 gimple_seq_add_stmt (body_p, g);
10194 tree andvm1 = create_tmp_var (unsigned_type_node);
10195 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10196 build_minus_one_cst (unsigned_type_node));
10197 gimple_seq_add_stmt (body_p, g);
10198
10199 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10200 gimple_seq_add_stmt (body_p, g);
10201
10202 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10203 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10204 gimple_seq_add_stmt (body_p, g);
10205 g = gimple_build_label (lab9);
10206 gimple_seq_add_stmt (body_p, g);
10207 gimple_seq_add_seq (body_p, reduc_list);
10208 g = gimple_build_label (lab8);
10209 gimple_seq_add_stmt (body_p, g);
10210
10211 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10212 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10213 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10214 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10215 lab10, lab11);
10216 gimple_seq_add_stmt (body_p, g);
10217 g = gimple_build_label (lab10);
10218 gimple_seq_add_stmt (body_p, g);
10219 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10220 gimple_seq_add_stmt (body_p, g);
10221 g = gimple_build_goto (lab12);
10222 gimple_seq_add_stmt (body_p, g);
10223 g = gimple_build_label (lab11);
10224 gimple_seq_add_stmt (body_p, g);
10225 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10226 gimple_seq_add_stmt (body_p, g);
10227 g = gimple_build_label (lab12);
10228 gimple_seq_add_stmt (body_p, g);
10229
10230 g = omp_build_barrier (NULL);
10231 gimple_seq_add_stmt (body_p, g);
10232
10233 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10234 lab3, lab2);
10235 gimple_seq_add_stmt (body_p, g);
10236
10237 g = gimple_build_label (lab2);
10238 gimple_seq_add_stmt (body_p, g);
10239
10240 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10241 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10242 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10243 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10244 gimple_seq_add_stmt (body_p, g);
10245 g = gimple_build_label (lab1);
10246 gimple_seq_add_stmt (body_p, g);
10247 gimple_seq_add_seq (body_p, thr02_list);
10248 g = gimple_build_goto (lab3);
10249 gimple_seq_add_stmt (body_p, g);
10250 g = gimple_build_label (lab2);
10251 gimple_seq_add_stmt (body_p, g);
10252 gimple_seq_add_seq (body_p, thrn2_list);
10253 g = gimple_build_label (lab3);
10254 gimple_seq_add_stmt (body_p, g);
10255
10256 g = gimple_build_assign (ivar, size_zero_node);
10257 gimple_seq_add_stmt (body_p, g);
10258 gimple_seq_add_stmt (body_p, new_stmt);
10259 gimple_seq_add_seq (body_p, new_body);
10260
10261 gimple_seq new_dlist = NULL;
10262 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10263 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10264 tree num_threadsm1 = create_tmp_var (integer_type_node);
10265 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10266 integer_minus_one_node);
10267 gimple_seq_add_stmt (&new_dlist, g);
10268 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10269 gimple_seq_add_stmt (&new_dlist, g);
10270 g = gimple_build_label (lab1);
10271 gimple_seq_add_stmt (&new_dlist, g);
10272 gimple_seq_add_seq (&new_dlist, last_list);
10273 g = gimple_build_label (lab2);
10274 gimple_seq_add_stmt (&new_dlist, g);
10275 gimple_seq_add_seq (&new_dlist, *dlist);
10276 *dlist = new_dlist;
10277 }
10278
10279 /* Lower code for an OMP loop directive. */
10280
10281 static void
10282 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10283 {
10284 tree *rhs_p, block;
10285 struct omp_for_data fd, *fdp = NULL;
10286 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10287 gbind *new_stmt;
10288 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10289 gimple_seq cnt_list = NULL, clist = NULL;
10290 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10291 size_t i;
10292
10293 push_gimplify_context ();
10294
10295 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10296
10297 block = make_node (BLOCK);
10298 new_stmt = gimple_build_bind (NULL, NULL, block);
10299 /* Replace at gsi right away, so that 'stmt' is no member
10300 of a sequence anymore as we're going to add to a different
10301 one below. */
10302 gsi_replace (gsi_p, new_stmt, true);
10303
10304 /* Move declaration of temporaries in the loop body before we make
10305 it go away. */
10306 omp_for_body = gimple_omp_body (stmt);
10307 if (!gimple_seq_empty_p (omp_for_body)
10308 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10309 {
10310 gbind *inner_bind
10311 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10312 tree vars = gimple_bind_vars (inner_bind);
10313 gimple_bind_append_vars (new_stmt, vars);
10314 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10315 keep them on the inner_bind and it's block. */
10316 gimple_bind_set_vars (inner_bind, NULL_TREE);
10317 if (gimple_bind_block (inner_bind))
10318 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10319 }
10320
10321 if (gimple_omp_for_combined_into_p (stmt))
10322 {
10323 omp_extract_for_data (stmt, &fd, NULL);
10324 fdp = &fd;
10325
10326 /* We need two temporaries with fd.loop.v type (istart/iend)
10327 and then (fd.collapse - 1) temporaries with the same
10328 type for count2 ... countN-1 vars if not constant. */
10329 size_t count = 2;
10330 tree type = fd.iter_type;
10331 if (fd.collapse > 1
10332 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10333 count += fd.collapse - 1;
10334 bool taskreg_for
10335 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10336 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10337 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10338 tree simtc = NULL;
10339 tree clauses = *pc;
10340 if (taskreg_for)
10341 outerc
10342 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10343 OMP_CLAUSE__LOOPTEMP_);
10344 if (ctx->simt_stmt)
10345 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10346 OMP_CLAUSE__LOOPTEMP_);
10347 for (i = 0; i < count; i++)
10348 {
10349 tree temp;
10350 if (taskreg_for)
10351 {
10352 gcc_assert (outerc);
10353 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10354 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10355 OMP_CLAUSE__LOOPTEMP_);
10356 }
10357 else
10358 {
10359 /* If there are 2 adjacent SIMD stmts, one with _simt_
10360 clause, another without, make sure they have the same
10361 decls in _looptemp_ clauses, because the outer stmt
10362 they are combined into will look up just one inner_stmt. */
10363 if (ctx->simt_stmt)
10364 temp = OMP_CLAUSE_DECL (simtc);
10365 else
10366 temp = create_tmp_var (type);
10367 insert_decl_map (&ctx->outer->cb, temp, temp);
10368 }
10369 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10370 OMP_CLAUSE_DECL (*pc) = temp;
10371 pc = &OMP_CLAUSE_CHAIN (*pc);
10372 if (ctx->simt_stmt)
10373 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10374 OMP_CLAUSE__LOOPTEMP_);
10375 }
10376 *pc = clauses;
10377 }
10378
10379 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10380 dlist = NULL;
10381 body = NULL;
10382 tree rclauses
10383 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10384 OMP_CLAUSE_REDUCTION);
10385 tree rtmp = NULL_TREE;
10386 if (rclauses)
10387 {
10388 tree type = build_pointer_type (pointer_sized_int_node);
10389 tree temp = create_tmp_var (type);
10390 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10391 OMP_CLAUSE_DECL (c) = temp;
10392 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10393 gimple_omp_for_set_clauses (stmt, c);
10394 lower_omp_task_reductions (ctx, OMP_FOR,
10395 gimple_omp_for_clauses (stmt),
10396 &tred_ilist, &tred_dlist);
10397 rclauses = c;
10398 rtmp = make_ssa_name (type);
10399 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10400 }
10401
10402 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10403 ctx);
10404
10405 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10406 fdp);
10407 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10408 gimple_omp_for_pre_body (stmt));
10409
10410 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10411
10412 /* Lower the header expressions. At this point, we can assume that
10413 the header is of the form:
10414
10415 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10416
10417 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10418 using the .omp_data_s mapping, if needed. */
10419 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10420 {
10421 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10422 if (!is_gimple_min_invariant (*rhs_p))
10423 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10424 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10425 recompute_tree_invariant_for_addr_expr (*rhs_p);
10426
10427 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10428 if (!is_gimple_min_invariant (*rhs_p))
10429 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10430 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10431 recompute_tree_invariant_for_addr_expr (*rhs_p);
10432
10433 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10434 if (!is_gimple_min_invariant (*rhs_p))
10435 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10436 }
10437 if (rclauses)
10438 gimple_seq_add_seq (&tred_ilist, cnt_list);
10439 else
10440 gimple_seq_add_seq (&body, cnt_list);
10441
10442 /* Once lowered, extract the bounds and clauses. */
10443 omp_extract_for_data (stmt, &fd, NULL);
10444
10445 if (is_gimple_omp_oacc (ctx->stmt)
10446 && !ctx_in_oacc_kernels_region (ctx))
10447 lower_oacc_head_tail (gimple_location (stmt),
10448 gimple_omp_for_clauses (stmt),
10449 &oacc_head, &oacc_tail, ctx);
10450
10451 /* Add OpenACC partitioning and reduction markers just before the loop. */
10452 if (oacc_head)
10453 gimple_seq_add_seq (&body, oacc_head);
10454
10455 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10456
10457 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10458 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10459 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10460 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10461 {
10462 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10463 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10464 OMP_CLAUSE_LINEAR_STEP (c)
10465 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10466 ctx);
10467 }
10468
10469 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10470 && gimple_omp_for_grid_phony (stmt));
10471 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10472 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10473 {
10474 gcc_assert (!phony_loop);
10475 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10476 }
10477 else
10478 {
10479 if (!phony_loop)
10480 gimple_seq_add_stmt (&body, stmt);
10481 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10482 }
10483
10484 if (!phony_loop)
10485 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10486 fd.loop.v));
10487
10488 /* After the loop, add exit clauses. */
10489 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10490
10491 if (clist)
10492 {
10493 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10494 gcall *g = gimple_build_call (fndecl, 0);
10495 gimple_seq_add_stmt (&body, g);
10496 gimple_seq_add_seq (&body, clist);
10497 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10498 g = gimple_build_call (fndecl, 0);
10499 gimple_seq_add_stmt (&body, g);
10500 }
10501
10502 if (ctx->cancellable)
10503 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10504
10505 gimple_seq_add_seq (&body, dlist);
10506
10507 if (rclauses)
10508 {
10509 gimple_seq_add_seq (&tred_ilist, body);
10510 body = tred_ilist;
10511 }
10512
10513 body = maybe_catch_exception (body);
10514
10515 if (!phony_loop)
10516 {
10517 /* Region exit marker goes at the end of the loop body. */
10518 gimple *g = gimple_build_omp_return (fd.have_nowait);
10519 gimple_seq_add_stmt (&body, g);
10520
10521 gimple_seq_add_seq (&body, tred_dlist);
10522
10523 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10524
10525 if (rclauses)
10526 OMP_CLAUSE_DECL (rclauses) = rtmp;
10527 }
10528
10529 /* Add OpenACC joining and reduction markers just after the loop. */
10530 if (oacc_tail)
10531 gimple_seq_add_seq (&body, oacc_tail);
10532
10533 pop_gimplify_context (new_stmt);
10534
10535 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10536 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10537 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10538 if (BLOCK_VARS (block))
10539 TREE_USED (block) = 1;
10540
10541 gimple_bind_set_body (new_stmt, body);
10542 gimple_omp_set_body (stmt, NULL);
10543 gimple_omp_for_set_pre_body (stmt, NULL);
10544 }
10545
10546 /* Callback for walk_stmts. Check if the current statement only contains
10547 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10548
10549 static tree
10550 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10551 bool *handled_ops_p,
10552 struct walk_stmt_info *wi)
10553 {
10554 int *info = (int *) wi->info;
10555 gimple *stmt = gsi_stmt (*gsi_p);
10556
10557 *handled_ops_p = true;
10558 switch (gimple_code (stmt))
10559 {
10560 WALK_SUBSTMTS;
10561
10562 case GIMPLE_DEBUG:
10563 break;
10564 case GIMPLE_OMP_FOR:
10565 case GIMPLE_OMP_SECTIONS:
10566 *info = *info == 0 ? 1 : -1;
10567 break;
10568 default:
10569 *info = -1;
10570 break;
10571 }
10572 return NULL;
10573 }
10574
10575 struct omp_taskcopy_context
10576 {
10577 /* This field must be at the beginning, as we do "inheritance": Some
10578 callback functions for tree-inline.c (e.g., omp_copy_decl)
10579 receive a copy_body_data pointer that is up-casted to an
10580 omp_context pointer. */
10581 copy_body_data cb;
10582 omp_context *ctx;
10583 };
10584
10585 static tree
10586 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10587 {
10588 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10589
10590 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10591 return create_tmp_var (TREE_TYPE (var));
10592
10593 return var;
10594 }
10595
10596 static tree
10597 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10598 {
10599 tree name, new_fields = NULL, type, f;
10600
10601 type = lang_hooks.types.make_type (RECORD_TYPE);
10602 name = DECL_NAME (TYPE_NAME (orig_type));
10603 name = build_decl (gimple_location (tcctx->ctx->stmt),
10604 TYPE_DECL, name, type);
10605 TYPE_NAME (type) = name;
10606
10607 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10608 {
10609 tree new_f = copy_node (f);
10610 DECL_CONTEXT (new_f) = type;
10611 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10612 TREE_CHAIN (new_f) = new_fields;
10613 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10614 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10615 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10616 &tcctx->cb, NULL);
10617 new_fields = new_f;
10618 tcctx->cb.decl_map->put (f, new_f);
10619 }
10620 TYPE_FIELDS (type) = nreverse (new_fields);
10621 layout_type (type);
10622 return type;
10623 }
10624
10625 /* Create task copyfn. */
10626
10627 static void
10628 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10629 {
10630 struct function *child_cfun;
10631 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10632 tree record_type, srecord_type, bind, list;
10633 bool record_needs_remap = false, srecord_needs_remap = false;
10634 splay_tree_node n;
10635 struct omp_taskcopy_context tcctx;
10636 location_t loc = gimple_location (task_stmt);
10637 size_t looptempno = 0;
10638
10639 child_fn = gimple_omp_task_copy_fn (task_stmt);
10640 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10641 gcc_assert (child_cfun->cfg == NULL);
10642 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10643
10644 /* Reset DECL_CONTEXT on function arguments. */
10645 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10646 DECL_CONTEXT (t) = child_fn;
10647
10648 /* Populate the function. */
10649 push_gimplify_context ();
10650 push_cfun (child_cfun);
10651
10652 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10653 TREE_SIDE_EFFECTS (bind) = 1;
10654 list = NULL;
10655 DECL_SAVED_TREE (child_fn) = bind;
10656 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10657
10658 /* Remap src and dst argument types if needed. */
10659 record_type = ctx->record_type;
10660 srecord_type = ctx->srecord_type;
10661 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10662 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10663 {
10664 record_needs_remap = true;
10665 break;
10666 }
10667 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10668 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10669 {
10670 srecord_needs_remap = true;
10671 break;
10672 }
10673
10674 if (record_needs_remap || srecord_needs_remap)
10675 {
10676 memset (&tcctx, '\0', sizeof (tcctx));
10677 tcctx.cb.src_fn = ctx->cb.src_fn;
10678 tcctx.cb.dst_fn = child_fn;
10679 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10680 gcc_checking_assert (tcctx.cb.src_node);
10681 tcctx.cb.dst_node = tcctx.cb.src_node;
10682 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10683 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10684 tcctx.cb.eh_lp_nr = 0;
10685 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10686 tcctx.cb.decl_map = new hash_map<tree, tree>;
10687 tcctx.ctx = ctx;
10688
10689 if (record_needs_remap)
10690 record_type = task_copyfn_remap_type (&tcctx, record_type);
10691 if (srecord_needs_remap)
10692 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10693 }
10694 else
10695 tcctx.cb.decl_map = NULL;
10696
10697 arg = DECL_ARGUMENTS (child_fn);
10698 TREE_TYPE (arg) = build_pointer_type (record_type);
10699 sarg = DECL_CHAIN (arg);
10700 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10701
10702 /* First pass: initialize temporaries used in record_type and srecord_type
10703 sizes and field offsets. */
10704 if (tcctx.cb.decl_map)
10705 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10706 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10707 {
10708 tree *p;
10709
10710 decl = OMP_CLAUSE_DECL (c);
10711 p = tcctx.cb.decl_map->get (decl);
10712 if (p == NULL)
10713 continue;
10714 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10715 sf = (tree) n->value;
10716 sf = *tcctx.cb.decl_map->get (sf);
10717 src = build_simple_mem_ref_loc (loc, sarg);
10718 src = omp_build_component_ref (src, sf);
10719 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10720 append_to_statement_list (t, &list);
10721 }
10722
10723 /* Second pass: copy shared var pointers and copy construct non-VLA
10724 firstprivate vars. */
10725 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10726 switch (OMP_CLAUSE_CODE (c))
10727 {
10728 splay_tree_key key;
10729 case OMP_CLAUSE_SHARED:
10730 decl = OMP_CLAUSE_DECL (c);
10731 key = (splay_tree_key) decl;
10732 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10733 key = (splay_tree_key) &DECL_UID (decl);
10734 n = splay_tree_lookup (ctx->field_map, key);
10735 if (n == NULL)
10736 break;
10737 f = (tree) n->value;
10738 if (tcctx.cb.decl_map)
10739 f = *tcctx.cb.decl_map->get (f);
10740 n = splay_tree_lookup (ctx->sfield_map, key);
10741 sf = (tree) n->value;
10742 if (tcctx.cb.decl_map)
10743 sf = *tcctx.cb.decl_map->get (sf);
10744 src = build_simple_mem_ref_loc (loc, sarg);
10745 src = omp_build_component_ref (src, sf);
10746 dst = build_simple_mem_ref_loc (loc, arg);
10747 dst = omp_build_component_ref (dst, f);
10748 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10749 append_to_statement_list (t, &list);
10750 break;
10751 case OMP_CLAUSE_REDUCTION:
10752 case OMP_CLAUSE_IN_REDUCTION:
10753 decl = OMP_CLAUSE_DECL (c);
10754 if (TREE_CODE (decl) == MEM_REF)
10755 {
10756 decl = TREE_OPERAND (decl, 0);
10757 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10758 decl = TREE_OPERAND (decl, 0);
10759 if (TREE_CODE (decl) == INDIRECT_REF
10760 || TREE_CODE (decl) == ADDR_EXPR)
10761 decl = TREE_OPERAND (decl, 0);
10762 }
10763 key = (splay_tree_key) decl;
10764 n = splay_tree_lookup (ctx->field_map, key);
10765 if (n == NULL)
10766 break;
10767 f = (tree) n->value;
10768 if (tcctx.cb.decl_map)
10769 f = *tcctx.cb.decl_map->get (f);
10770 n = splay_tree_lookup (ctx->sfield_map, key);
10771 sf = (tree) n->value;
10772 if (tcctx.cb.decl_map)
10773 sf = *tcctx.cb.decl_map->get (sf);
10774 src = build_simple_mem_ref_loc (loc, sarg);
10775 src = omp_build_component_ref (src, sf);
10776 if (decl != OMP_CLAUSE_DECL (c)
10777 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10778 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10779 src = build_simple_mem_ref_loc (loc, src);
10780 dst = build_simple_mem_ref_loc (loc, arg);
10781 dst = omp_build_component_ref (dst, f);
10782 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10783 append_to_statement_list (t, &list);
10784 break;
10785 case OMP_CLAUSE__LOOPTEMP_:
10786 /* Fields for first two _looptemp_ clauses are initialized by
10787 GOMP_taskloop*, the rest are handled like firstprivate. */
10788 if (looptempno < 2)
10789 {
10790 looptempno++;
10791 break;
10792 }
10793 /* FALLTHRU */
10794 case OMP_CLAUSE__REDUCTEMP_:
10795 case OMP_CLAUSE_FIRSTPRIVATE:
10796 decl = OMP_CLAUSE_DECL (c);
10797 if (is_variable_sized (decl))
10798 break;
10799 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10800 if (n == NULL)
10801 break;
10802 f = (tree) n->value;
10803 if (tcctx.cb.decl_map)
10804 f = *tcctx.cb.decl_map->get (f);
10805 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10806 if (n != NULL)
10807 {
10808 sf = (tree) n->value;
10809 if (tcctx.cb.decl_map)
10810 sf = *tcctx.cb.decl_map->get (sf);
10811 src = build_simple_mem_ref_loc (loc, sarg);
10812 src = omp_build_component_ref (src, sf);
10813 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10814 src = build_simple_mem_ref_loc (loc, src);
10815 }
10816 else
10817 src = decl;
10818 dst = build_simple_mem_ref_loc (loc, arg);
10819 dst = omp_build_component_ref (dst, f);
10820 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10821 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10822 else
10823 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10824 append_to_statement_list (t, &list);
10825 break;
10826 case OMP_CLAUSE_PRIVATE:
10827 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10828 break;
10829 decl = OMP_CLAUSE_DECL (c);
10830 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10831 f = (tree) n->value;
10832 if (tcctx.cb.decl_map)
10833 f = *tcctx.cb.decl_map->get (f);
10834 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10835 if (n != NULL)
10836 {
10837 sf = (tree) n->value;
10838 if (tcctx.cb.decl_map)
10839 sf = *tcctx.cb.decl_map->get (sf);
10840 src = build_simple_mem_ref_loc (loc, sarg);
10841 src = omp_build_component_ref (src, sf);
10842 if (use_pointer_for_field (decl, NULL))
10843 src = build_simple_mem_ref_loc (loc, src);
10844 }
10845 else
10846 src = decl;
10847 dst = build_simple_mem_ref_loc (loc, arg);
10848 dst = omp_build_component_ref (dst, f);
10849 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10850 append_to_statement_list (t, &list);
10851 break;
10852 default:
10853 break;
10854 }
10855
10856 /* Last pass: handle VLA firstprivates. */
10857 if (tcctx.cb.decl_map)
10858 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10859 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10860 {
10861 tree ind, ptr, df;
10862
10863 decl = OMP_CLAUSE_DECL (c);
10864 if (!is_variable_sized (decl))
10865 continue;
10866 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10867 if (n == NULL)
10868 continue;
10869 f = (tree) n->value;
10870 f = *tcctx.cb.decl_map->get (f);
10871 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10872 ind = DECL_VALUE_EXPR (decl);
10873 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10874 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10875 n = splay_tree_lookup (ctx->sfield_map,
10876 (splay_tree_key) TREE_OPERAND (ind, 0));
10877 sf = (tree) n->value;
10878 sf = *tcctx.cb.decl_map->get (sf);
10879 src = build_simple_mem_ref_loc (loc, sarg);
10880 src = omp_build_component_ref (src, sf);
10881 src = build_simple_mem_ref_loc (loc, src);
10882 dst = build_simple_mem_ref_loc (loc, arg);
10883 dst = omp_build_component_ref (dst, f);
10884 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10885 append_to_statement_list (t, &list);
10886 n = splay_tree_lookup (ctx->field_map,
10887 (splay_tree_key) TREE_OPERAND (ind, 0));
10888 df = (tree) n->value;
10889 df = *tcctx.cb.decl_map->get (df);
10890 ptr = build_simple_mem_ref_loc (loc, arg);
10891 ptr = omp_build_component_ref (ptr, df);
10892 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10893 build_fold_addr_expr_loc (loc, dst));
10894 append_to_statement_list (t, &list);
10895 }
10896
10897 t = build1 (RETURN_EXPR, void_type_node, NULL);
10898 append_to_statement_list (t, &list);
10899
10900 if (tcctx.cb.decl_map)
10901 delete tcctx.cb.decl_map;
10902 pop_gimplify_context (NULL);
10903 BIND_EXPR_BODY (bind) = list;
10904 pop_cfun ();
10905 }
10906
10907 static void
10908 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10909 {
10910 tree c, clauses;
10911 gimple *g;
10912 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10913
10914 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10915 gcc_assert (clauses);
10916 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10917 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10918 switch (OMP_CLAUSE_DEPEND_KIND (c))
10919 {
10920 case OMP_CLAUSE_DEPEND_LAST:
10921 /* Lowering already done at gimplification. */
10922 return;
10923 case OMP_CLAUSE_DEPEND_IN:
10924 cnt[2]++;
10925 break;
10926 case OMP_CLAUSE_DEPEND_OUT:
10927 case OMP_CLAUSE_DEPEND_INOUT:
10928 cnt[0]++;
10929 break;
10930 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10931 cnt[1]++;
10932 break;
10933 case OMP_CLAUSE_DEPEND_DEPOBJ:
10934 cnt[3]++;
10935 break;
10936 case OMP_CLAUSE_DEPEND_SOURCE:
10937 case OMP_CLAUSE_DEPEND_SINK:
10938 /* FALLTHRU */
10939 default:
10940 gcc_unreachable ();
10941 }
10942 if (cnt[1] || cnt[3])
10943 idx = 5;
10944 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10945 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10946 tree array = create_tmp_var (type);
10947 TREE_ADDRESSABLE (array) = 1;
10948 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10949 NULL_TREE);
10950 if (idx == 5)
10951 {
10952 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10953 gimple_seq_add_stmt (iseq, g);
10954 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10955 NULL_TREE);
10956 }
10957 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10958 gimple_seq_add_stmt (iseq, g);
10959 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10960 {
10961 r = build4 (ARRAY_REF, ptr_type_node, array,
10962 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10963 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10964 gimple_seq_add_stmt (iseq, g);
10965 }
10966 for (i = 0; i < 4; i++)
10967 {
10968 if (cnt[i] == 0)
10969 continue;
10970 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10971 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10972 continue;
10973 else
10974 {
10975 switch (OMP_CLAUSE_DEPEND_KIND (c))
10976 {
10977 case OMP_CLAUSE_DEPEND_IN:
10978 if (i != 2)
10979 continue;
10980 break;
10981 case OMP_CLAUSE_DEPEND_OUT:
10982 case OMP_CLAUSE_DEPEND_INOUT:
10983 if (i != 0)
10984 continue;
10985 break;
10986 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10987 if (i != 1)
10988 continue;
10989 break;
10990 case OMP_CLAUSE_DEPEND_DEPOBJ:
10991 if (i != 3)
10992 continue;
10993 break;
10994 default:
10995 gcc_unreachable ();
10996 }
10997 tree t = OMP_CLAUSE_DECL (c);
10998 t = fold_convert (ptr_type_node, t);
10999 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11000 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11001 NULL_TREE, NULL_TREE);
11002 g = gimple_build_assign (r, t);
11003 gimple_seq_add_stmt (iseq, g);
11004 }
11005 }
11006 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11007 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11008 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11009 OMP_CLAUSE_CHAIN (c) = *pclauses;
11010 *pclauses = c;
11011 tree clobber = build_constructor (type, NULL);
11012 TREE_THIS_VOLATILE (clobber) = 1;
11013 g = gimple_build_assign (array, clobber);
11014 gimple_seq_add_stmt (oseq, g);
11015 }
11016
11017 /* Lower the OpenMP parallel or task directive in the current statement
11018 in GSI_P. CTX holds context information for the directive. */
11019
11020 static void
11021 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11022 {
11023 tree clauses;
11024 tree child_fn, t;
11025 gimple *stmt = gsi_stmt (*gsi_p);
11026 gbind *par_bind, *bind, *dep_bind = NULL;
11027 gimple_seq par_body;
11028 location_t loc = gimple_location (stmt);
11029
11030 clauses = gimple_omp_taskreg_clauses (stmt);
11031 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11032 && gimple_omp_task_taskwait_p (stmt))
11033 {
11034 par_bind = NULL;
11035 par_body = NULL;
11036 }
11037 else
11038 {
11039 par_bind
11040 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11041 par_body = gimple_bind_body (par_bind);
11042 }
11043 child_fn = ctx->cb.dst_fn;
11044 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11045 && !gimple_omp_parallel_combined_p (stmt))
11046 {
11047 struct walk_stmt_info wi;
11048 int ws_num = 0;
11049
11050 memset (&wi, 0, sizeof (wi));
11051 wi.info = &ws_num;
11052 wi.val_only = true;
11053 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11054 if (ws_num == 1)
11055 gimple_omp_parallel_set_combined_p (stmt, true);
11056 }
11057 gimple_seq dep_ilist = NULL;
11058 gimple_seq dep_olist = NULL;
11059 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11060 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11061 {
11062 push_gimplify_context ();
11063 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11064 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11065 &dep_ilist, &dep_olist);
11066 }
11067
11068 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11069 && gimple_omp_task_taskwait_p (stmt))
11070 {
11071 if (dep_bind)
11072 {
11073 gsi_replace (gsi_p, dep_bind, true);
11074 gimple_bind_add_seq (dep_bind, dep_ilist);
11075 gimple_bind_add_stmt (dep_bind, stmt);
11076 gimple_bind_add_seq (dep_bind, dep_olist);
11077 pop_gimplify_context (dep_bind);
11078 }
11079 return;
11080 }
11081
11082 if (ctx->srecord_type)
11083 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11084
11085 gimple_seq tskred_ilist = NULL;
11086 gimple_seq tskred_olist = NULL;
11087 if ((is_task_ctx (ctx)
11088 && gimple_omp_task_taskloop_p (ctx->stmt)
11089 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11090 OMP_CLAUSE_REDUCTION))
11091 || (is_parallel_ctx (ctx)
11092 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11093 OMP_CLAUSE__REDUCTEMP_)))
11094 {
11095 if (dep_bind == NULL)
11096 {
11097 push_gimplify_context ();
11098 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11099 }
11100 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11101 : OMP_PARALLEL,
11102 gimple_omp_taskreg_clauses (ctx->stmt),
11103 &tskred_ilist, &tskred_olist);
11104 }
11105
11106 push_gimplify_context ();
11107
11108 gimple_seq par_olist = NULL;
11109 gimple_seq par_ilist = NULL;
11110 gimple_seq par_rlist = NULL;
11111 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11112 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11113 if (phony_construct && ctx->record_type)
11114 {
11115 gcc_checking_assert (!ctx->receiver_decl);
11116 ctx->receiver_decl = create_tmp_var
11117 (build_reference_type (ctx->record_type), ".omp_rec");
11118 }
11119 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11120 lower_omp (&par_body, ctx);
11121 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
11122 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11123
11124 /* Declare all the variables created by mapping and the variables
11125 declared in the scope of the parallel body. */
11126 record_vars_into (ctx->block_vars, child_fn);
11127 maybe_remove_omp_member_access_dummy_vars (par_bind);
11128 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11129
11130 if (ctx->record_type)
11131 {
11132 ctx->sender_decl
11133 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11134 : ctx->record_type, ".omp_data_o");
11135 DECL_NAMELESS (ctx->sender_decl) = 1;
11136 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11137 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11138 }
11139
11140 gimple_seq olist = NULL;
11141 gimple_seq ilist = NULL;
11142 lower_send_clauses (clauses, &ilist, &olist, ctx);
11143 lower_send_shared_vars (&ilist, &olist, ctx);
11144
11145 if (ctx->record_type)
11146 {
11147 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
11148 TREE_THIS_VOLATILE (clobber) = 1;
11149 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11150 clobber));
11151 }
11152
11153 /* Once all the expansions are done, sequence all the different
11154 fragments inside gimple_omp_body. */
11155
11156 gimple_seq new_body = NULL;
11157
11158 if (ctx->record_type)
11159 {
11160 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11161 /* fixup_child_record_type might have changed receiver_decl's type. */
11162 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11163 gimple_seq_add_stmt (&new_body,
11164 gimple_build_assign (ctx->receiver_decl, t));
11165 }
11166
11167 gimple_seq_add_seq (&new_body, par_ilist);
11168 gimple_seq_add_seq (&new_body, par_body);
11169 gimple_seq_add_seq (&new_body, par_rlist);
11170 if (ctx->cancellable)
11171 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11172 gimple_seq_add_seq (&new_body, par_olist);
11173 new_body = maybe_catch_exception (new_body);
11174 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11175 gimple_seq_add_stmt (&new_body,
11176 gimple_build_omp_continue (integer_zero_node,
11177 integer_zero_node));
11178 if (!phony_construct)
11179 {
11180 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11181 gimple_omp_set_body (stmt, new_body);
11182 }
11183
11184 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11185 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11186 else
11187 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11188 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11189 gimple_bind_add_seq (bind, ilist);
11190 if (!phony_construct)
11191 gimple_bind_add_stmt (bind, stmt);
11192 else
11193 gimple_bind_add_seq (bind, new_body);
11194 gimple_bind_add_seq (bind, olist);
11195
11196 pop_gimplify_context (NULL);
11197
11198 if (dep_bind)
11199 {
11200 gimple_bind_add_seq (dep_bind, dep_ilist);
11201 gimple_bind_add_seq (dep_bind, tskred_ilist);
11202 gimple_bind_add_stmt (dep_bind, bind);
11203 gimple_bind_add_seq (dep_bind, tskred_olist);
11204 gimple_bind_add_seq (dep_bind, dep_olist);
11205 pop_gimplify_context (dep_bind);
11206 }
11207 }
11208
11209 /* Lower the GIMPLE_OMP_TARGET in the current statement
11210 in GSI_P. CTX holds context information for the directive. */
11211
11212 static void
11213 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11214 {
11215 tree clauses;
11216 tree child_fn, t, c;
11217 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11218 gbind *tgt_bind, *bind, *dep_bind = NULL;
11219 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11220 location_t loc = gimple_location (stmt);
11221 bool offloaded, data_region;
11222 unsigned int map_cnt = 0;
11223
11224 offloaded = is_gimple_omp_offloaded (stmt);
11225 switch (gimple_omp_target_kind (stmt))
11226 {
11227 case GF_OMP_TARGET_KIND_REGION:
11228 case GF_OMP_TARGET_KIND_UPDATE:
11229 case GF_OMP_TARGET_KIND_ENTER_DATA:
11230 case GF_OMP_TARGET_KIND_EXIT_DATA:
11231 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11232 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11233 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11234 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11235 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11236 data_region = false;
11237 break;
11238 case GF_OMP_TARGET_KIND_DATA:
11239 case GF_OMP_TARGET_KIND_OACC_DATA:
11240 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11241 data_region = true;
11242 break;
11243 default:
11244 gcc_unreachable ();
11245 }
11246
11247 clauses = gimple_omp_target_clauses (stmt);
11248
11249 gimple_seq dep_ilist = NULL;
11250 gimple_seq dep_olist = NULL;
11251 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11252 {
11253 push_gimplify_context ();
11254 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11255 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11256 &dep_ilist, &dep_olist);
11257 }
11258
11259 tgt_bind = NULL;
11260 tgt_body = NULL;
11261 if (offloaded)
11262 {
11263 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11264 tgt_body = gimple_bind_body (tgt_bind);
11265 }
11266 else if (data_region)
11267 tgt_body = gimple_omp_body (stmt);
11268 child_fn = ctx->cb.dst_fn;
11269
11270 push_gimplify_context ();
11271 fplist = NULL;
11272
11273 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11274 switch (OMP_CLAUSE_CODE (c))
11275 {
11276 tree var, x;
11277
11278 default:
11279 break;
11280 case OMP_CLAUSE_MAP:
11281 #if CHECKING_P
11282 /* First check what we're prepared to handle in the following. */
11283 switch (OMP_CLAUSE_MAP_KIND (c))
11284 {
11285 case GOMP_MAP_ALLOC:
11286 case GOMP_MAP_TO:
11287 case GOMP_MAP_FROM:
11288 case GOMP_MAP_TOFROM:
11289 case GOMP_MAP_POINTER:
11290 case GOMP_MAP_TO_PSET:
11291 case GOMP_MAP_DELETE:
11292 case GOMP_MAP_RELEASE:
11293 case GOMP_MAP_ALWAYS_TO:
11294 case GOMP_MAP_ALWAYS_FROM:
11295 case GOMP_MAP_ALWAYS_TOFROM:
11296 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11297 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11298 case GOMP_MAP_STRUCT:
11299 case GOMP_MAP_ALWAYS_POINTER:
11300 break;
11301 case GOMP_MAP_FORCE_ALLOC:
11302 case GOMP_MAP_FORCE_TO:
11303 case GOMP_MAP_FORCE_FROM:
11304 case GOMP_MAP_FORCE_TOFROM:
11305 case GOMP_MAP_FORCE_PRESENT:
11306 case GOMP_MAP_FORCE_DEVICEPTR:
11307 case GOMP_MAP_DEVICE_RESIDENT:
11308 case GOMP_MAP_LINK:
11309 gcc_assert (is_gimple_omp_oacc (stmt));
11310 break;
11311 default:
11312 gcc_unreachable ();
11313 }
11314 #endif
11315 /* FALLTHRU */
11316 case OMP_CLAUSE_TO:
11317 case OMP_CLAUSE_FROM:
11318 oacc_firstprivate:
11319 var = OMP_CLAUSE_DECL (c);
11320 if (!DECL_P (var))
11321 {
11322 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11323 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11324 && (OMP_CLAUSE_MAP_KIND (c)
11325 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11326 map_cnt++;
11327 continue;
11328 }
11329
11330 if (DECL_SIZE (var)
11331 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11332 {
11333 tree var2 = DECL_VALUE_EXPR (var);
11334 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11335 var2 = TREE_OPERAND (var2, 0);
11336 gcc_assert (DECL_P (var2));
11337 var = var2;
11338 }
11339
11340 if (offloaded
11341 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11342 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11343 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11344 {
11345 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11346 {
11347 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11348 && varpool_node::get_create (var)->offloadable)
11349 continue;
11350
11351 tree type = build_pointer_type (TREE_TYPE (var));
11352 tree new_var = lookup_decl (var, ctx);
11353 x = create_tmp_var_raw (type, get_name (new_var));
11354 gimple_add_tmp_var (x);
11355 x = build_simple_mem_ref (x);
11356 SET_DECL_VALUE_EXPR (new_var, x);
11357 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11358 }
11359 continue;
11360 }
11361
11362 if (!maybe_lookup_field (var, ctx))
11363 continue;
11364
11365 /* Don't remap oacc parallel reduction variables, because the
11366 intermediate result must be local to each gang. */
11367 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11368 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11369 {
11370 x = build_receiver_ref (var, true, ctx);
11371 tree new_var = lookup_decl (var, ctx);
11372
11373 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11374 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11375 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11376 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11377 x = build_simple_mem_ref (x);
11378 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11379 {
11380 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11381 if (omp_is_reference (new_var)
11382 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
11383 {
11384 /* Create a local object to hold the instance
11385 value. */
11386 tree type = TREE_TYPE (TREE_TYPE (new_var));
11387 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11388 tree inst = create_tmp_var (type, id);
11389 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11390 x = build_fold_addr_expr (inst);
11391 }
11392 gimplify_assign (new_var, x, &fplist);
11393 }
11394 else if (DECL_P (new_var))
11395 {
11396 SET_DECL_VALUE_EXPR (new_var, x);
11397 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11398 }
11399 else
11400 gcc_unreachable ();
11401 }
11402 map_cnt++;
11403 break;
11404
11405 case OMP_CLAUSE_FIRSTPRIVATE:
11406 if (is_oacc_parallel (ctx))
11407 goto oacc_firstprivate;
11408 map_cnt++;
11409 var = OMP_CLAUSE_DECL (c);
11410 if (!omp_is_reference (var)
11411 && !is_gimple_reg_type (TREE_TYPE (var)))
11412 {
11413 tree new_var = lookup_decl (var, ctx);
11414 if (is_variable_sized (var))
11415 {
11416 tree pvar = DECL_VALUE_EXPR (var);
11417 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11418 pvar = TREE_OPERAND (pvar, 0);
11419 gcc_assert (DECL_P (pvar));
11420 tree new_pvar = lookup_decl (pvar, ctx);
11421 x = build_fold_indirect_ref (new_pvar);
11422 TREE_THIS_NOTRAP (x) = 1;
11423 }
11424 else
11425 x = build_receiver_ref (var, true, ctx);
11426 SET_DECL_VALUE_EXPR (new_var, x);
11427 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11428 }
11429 break;
11430
11431 case OMP_CLAUSE_PRIVATE:
11432 if (is_gimple_omp_oacc (ctx->stmt))
11433 break;
11434 var = OMP_CLAUSE_DECL (c);
11435 if (is_variable_sized (var))
11436 {
11437 tree new_var = lookup_decl (var, ctx);
11438 tree pvar = DECL_VALUE_EXPR (var);
11439 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11440 pvar = TREE_OPERAND (pvar, 0);
11441 gcc_assert (DECL_P (pvar));
11442 tree new_pvar = lookup_decl (pvar, ctx);
11443 x = build_fold_indirect_ref (new_pvar);
11444 TREE_THIS_NOTRAP (x) = 1;
11445 SET_DECL_VALUE_EXPR (new_var, x);
11446 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11447 }
11448 break;
11449
11450 case OMP_CLAUSE_USE_DEVICE_PTR:
11451 case OMP_CLAUSE_IS_DEVICE_PTR:
11452 var = OMP_CLAUSE_DECL (c);
11453 map_cnt++;
11454 if (is_variable_sized (var))
11455 {
11456 tree new_var = lookup_decl (var, ctx);
11457 tree pvar = DECL_VALUE_EXPR (var);
11458 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11459 pvar = TREE_OPERAND (pvar, 0);
11460 gcc_assert (DECL_P (pvar));
11461 tree new_pvar = lookup_decl (pvar, ctx);
11462 x = build_fold_indirect_ref (new_pvar);
11463 TREE_THIS_NOTRAP (x) = 1;
11464 SET_DECL_VALUE_EXPR (new_var, x);
11465 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11466 }
11467 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11468 {
11469 tree new_var = lookup_decl (var, ctx);
11470 tree type = build_pointer_type (TREE_TYPE (var));
11471 x = create_tmp_var_raw (type, get_name (new_var));
11472 gimple_add_tmp_var (x);
11473 x = build_simple_mem_ref (x);
11474 SET_DECL_VALUE_EXPR (new_var, x);
11475 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11476 }
11477 else
11478 {
11479 tree new_var = lookup_decl (var, ctx);
11480 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11481 gimple_add_tmp_var (x);
11482 SET_DECL_VALUE_EXPR (new_var, x);
11483 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11484 }
11485 break;
11486 }
11487
11488 if (offloaded)
11489 {
11490 target_nesting_level++;
11491 lower_omp (&tgt_body, ctx);
11492 target_nesting_level--;
11493 }
11494 else if (data_region)
11495 lower_omp (&tgt_body, ctx);
11496
11497 if (offloaded)
11498 {
11499 /* Declare all the variables created by mapping and the variables
11500 declared in the scope of the target body. */
11501 record_vars_into (ctx->block_vars, child_fn);
11502 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11503 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11504 }
11505
11506 olist = NULL;
11507 ilist = NULL;
11508 if (ctx->record_type)
11509 {
11510 ctx->sender_decl
11511 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11512 DECL_NAMELESS (ctx->sender_decl) = 1;
11513 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11514 t = make_tree_vec (3);
11515 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11516 TREE_VEC_ELT (t, 1)
11517 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11518 ".omp_data_sizes");
11519 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11520 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11521 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11522 tree tkind_type = short_unsigned_type_node;
11523 int talign_shift = 8;
11524 TREE_VEC_ELT (t, 2)
11525 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11526 ".omp_data_kinds");
11527 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11528 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11529 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11530 gimple_omp_target_set_data_arg (stmt, t);
11531
11532 vec<constructor_elt, va_gc> *vsize;
11533 vec<constructor_elt, va_gc> *vkind;
11534 vec_alloc (vsize, map_cnt);
11535 vec_alloc (vkind, map_cnt);
11536 unsigned int map_idx = 0;
11537
11538 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11539 switch (OMP_CLAUSE_CODE (c))
11540 {
11541 tree ovar, nc, s, purpose, var, x, type;
11542 unsigned int talign;
11543
11544 default:
11545 break;
11546
11547 case OMP_CLAUSE_MAP:
11548 case OMP_CLAUSE_TO:
11549 case OMP_CLAUSE_FROM:
11550 oacc_firstprivate_map:
11551 nc = c;
11552 ovar = OMP_CLAUSE_DECL (c);
11553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11554 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11555 || (OMP_CLAUSE_MAP_KIND (c)
11556 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11557 break;
11558 if (!DECL_P (ovar))
11559 {
11560 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11561 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11562 {
11563 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11564 == get_base_address (ovar));
11565 nc = OMP_CLAUSE_CHAIN (c);
11566 ovar = OMP_CLAUSE_DECL (nc);
11567 }
11568 else
11569 {
11570 tree x = build_sender_ref (ovar, ctx);
11571 tree v
11572 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11573 gimplify_assign (x, v, &ilist);
11574 nc = NULL_TREE;
11575 }
11576 }
11577 else
11578 {
11579 if (DECL_SIZE (ovar)
11580 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11581 {
11582 tree ovar2 = DECL_VALUE_EXPR (ovar);
11583 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11584 ovar2 = TREE_OPERAND (ovar2, 0);
11585 gcc_assert (DECL_P (ovar2));
11586 ovar = ovar2;
11587 }
11588 if (!maybe_lookup_field (ovar, ctx))
11589 continue;
11590 }
11591
11592 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11593 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11594 talign = DECL_ALIGN_UNIT (ovar);
11595 if (nc)
11596 {
11597 var = lookup_decl_in_outer_ctx (ovar, ctx);
11598 x = build_sender_ref (ovar, ctx);
11599
11600 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11601 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11602 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11603 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11604 {
11605 gcc_assert (offloaded);
11606 tree avar
11607 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11608 mark_addressable (avar);
11609 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11610 talign = DECL_ALIGN_UNIT (avar);
11611 avar = build_fold_addr_expr (avar);
11612 gimplify_assign (x, avar, &ilist);
11613 }
11614 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11615 {
11616 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11617 if (!omp_is_reference (var))
11618 {
11619 if (is_gimple_reg (var)
11620 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11621 TREE_NO_WARNING (var) = 1;
11622 var = build_fold_addr_expr (var);
11623 }
11624 else
11625 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11626 gimplify_assign (x, var, &ilist);
11627 }
11628 else if (is_gimple_reg (var))
11629 {
11630 gcc_assert (offloaded);
11631 tree avar = create_tmp_var (TREE_TYPE (var));
11632 mark_addressable (avar);
11633 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11634 if (GOMP_MAP_COPY_TO_P (map_kind)
11635 || map_kind == GOMP_MAP_POINTER
11636 || map_kind == GOMP_MAP_TO_PSET
11637 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11638 {
11639 /* If we need to initialize a temporary
11640 with VAR because it is not addressable, and
11641 the variable hasn't been initialized yet, then
11642 we'll get a warning for the store to avar.
11643 Don't warn in that case, the mapping might
11644 be implicit. */
11645 TREE_NO_WARNING (var) = 1;
11646 gimplify_assign (avar, var, &ilist);
11647 }
11648 avar = build_fold_addr_expr (avar);
11649 gimplify_assign (x, avar, &ilist);
11650 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11651 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11652 && !TYPE_READONLY (TREE_TYPE (var)))
11653 {
11654 x = unshare_expr (x);
11655 x = build_simple_mem_ref (x);
11656 gimplify_assign (var, x, &olist);
11657 }
11658 }
11659 else
11660 {
11661 var = build_fold_addr_expr (var);
11662 gimplify_assign (x, var, &ilist);
11663 }
11664 }
11665 s = NULL_TREE;
11666 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11667 {
11668 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11669 s = TREE_TYPE (ovar);
11670 if (TREE_CODE (s) == REFERENCE_TYPE)
11671 s = TREE_TYPE (s);
11672 s = TYPE_SIZE_UNIT (s);
11673 }
11674 else
11675 s = OMP_CLAUSE_SIZE (c);
11676 if (s == NULL_TREE)
11677 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11678 s = fold_convert (size_type_node, s);
11679 purpose = size_int (map_idx++);
11680 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11681 if (TREE_CODE (s) != INTEGER_CST)
11682 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11683
11684 unsigned HOST_WIDE_INT tkind, tkind_zero;
11685 switch (OMP_CLAUSE_CODE (c))
11686 {
11687 case OMP_CLAUSE_MAP:
11688 tkind = OMP_CLAUSE_MAP_KIND (c);
11689 tkind_zero = tkind;
11690 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11691 switch (tkind)
11692 {
11693 case GOMP_MAP_ALLOC:
11694 case GOMP_MAP_TO:
11695 case GOMP_MAP_FROM:
11696 case GOMP_MAP_TOFROM:
11697 case GOMP_MAP_ALWAYS_TO:
11698 case GOMP_MAP_ALWAYS_FROM:
11699 case GOMP_MAP_ALWAYS_TOFROM:
11700 case GOMP_MAP_RELEASE:
11701 case GOMP_MAP_FORCE_TO:
11702 case GOMP_MAP_FORCE_FROM:
11703 case GOMP_MAP_FORCE_TOFROM:
11704 case GOMP_MAP_FORCE_PRESENT:
11705 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11706 break;
11707 case GOMP_MAP_DELETE:
11708 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11709 default:
11710 break;
11711 }
11712 if (tkind_zero != tkind)
11713 {
11714 if (integer_zerop (s))
11715 tkind = tkind_zero;
11716 else if (integer_nonzerop (s))
11717 tkind_zero = tkind;
11718 }
11719 break;
11720 case OMP_CLAUSE_FIRSTPRIVATE:
11721 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11722 tkind = GOMP_MAP_TO;
11723 tkind_zero = tkind;
11724 break;
11725 case OMP_CLAUSE_TO:
11726 tkind = GOMP_MAP_TO;
11727 tkind_zero = tkind;
11728 break;
11729 case OMP_CLAUSE_FROM:
11730 tkind = GOMP_MAP_FROM;
11731 tkind_zero = tkind;
11732 break;
11733 default:
11734 gcc_unreachable ();
11735 }
11736 gcc_checking_assert (tkind
11737 < (HOST_WIDE_INT_C (1U) << talign_shift));
11738 gcc_checking_assert (tkind_zero
11739 < (HOST_WIDE_INT_C (1U) << talign_shift));
11740 talign = ceil_log2 (talign);
11741 tkind |= talign << talign_shift;
11742 tkind_zero |= talign << talign_shift;
11743 gcc_checking_assert (tkind
11744 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11745 gcc_checking_assert (tkind_zero
11746 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11747 if (tkind == tkind_zero)
11748 x = build_int_cstu (tkind_type, tkind);
11749 else
11750 {
11751 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11752 x = build3 (COND_EXPR, tkind_type,
11753 fold_build2 (EQ_EXPR, boolean_type_node,
11754 unshare_expr (s), size_zero_node),
11755 build_int_cstu (tkind_type, tkind_zero),
11756 build_int_cstu (tkind_type, tkind));
11757 }
11758 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11759 if (nc && nc != c)
11760 c = nc;
11761 break;
11762
11763 case OMP_CLAUSE_FIRSTPRIVATE:
11764 if (is_oacc_parallel (ctx))
11765 goto oacc_firstprivate_map;
11766 ovar = OMP_CLAUSE_DECL (c);
11767 if (omp_is_reference (ovar))
11768 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11769 else
11770 talign = DECL_ALIGN_UNIT (ovar);
11771 var = lookup_decl_in_outer_ctx (ovar, ctx);
11772 x = build_sender_ref (ovar, ctx);
11773 tkind = GOMP_MAP_FIRSTPRIVATE;
11774 type = TREE_TYPE (ovar);
11775 if (omp_is_reference (ovar))
11776 type = TREE_TYPE (type);
11777 if ((INTEGRAL_TYPE_P (type)
11778 && TYPE_PRECISION (type) <= POINTER_SIZE)
11779 || TREE_CODE (type) == POINTER_TYPE)
11780 {
11781 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11782 tree t = var;
11783 if (omp_is_reference (var))
11784 t = build_simple_mem_ref (var);
11785 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11786 TREE_NO_WARNING (var) = 1;
11787 if (TREE_CODE (type) != POINTER_TYPE)
11788 t = fold_convert (pointer_sized_int_node, t);
11789 t = fold_convert (TREE_TYPE (x), t);
11790 gimplify_assign (x, t, &ilist);
11791 }
11792 else if (omp_is_reference (var))
11793 gimplify_assign (x, var, &ilist);
11794 else if (is_gimple_reg (var))
11795 {
11796 tree avar = create_tmp_var (TREE_TYPE (var));
11797 mark_addressable (avar);
11798 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11799 TREE_NO_WARNING (var) = 1;
11800 gimplify_assign (avar, var, &ilist);
11801 avar = build_fold_addr_expr (avar);
11802 gimplify_assign (x, avar, &ilist);
11803 }
11804 else
11805 {
11806 var = build_fold_addr_expr (var);
11807 gimplify_assign (x, var, &ilist);
11808 }
11809 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11810 s = size_int (0);
11811 else if (omp_is_reference (ovar))
11812 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11813 else
11814 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11815 s = fold_convert (size_type_node, s);
11816 purpose = size_int (map_idx++);
11817 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11818 if (TREE_CODE (s) != INTEGER_CST)
11819 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11820
11821 gcc_checking_assert (tkind
11822 < (HOST_WIDE_INT_C (1U) << talign_shift));
11823 talign = ceil_log2 (talign);
11824 tkind |= talign << talign_shift;
11825 gcc_checking_assert (tkind
11826 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11827 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11828 build_int_cstu (tkind_type, tkind));
11829 break;
11830
11831 case OMP_CLAUSE_USE_DEVICE_PTR:
11832 case OMP_CLAUSE_IS_DEVICE_PTR:
11833 ovar = OMP_CLAUSE_DECL (c);
11834 var = lookup_decl_in_outer_ctx (ovar, ctx);
11835 x = build_sender_ref (ovar, ctx);
11836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11837 tkind = GOMP_MAP_USE_DEVICE_PTR;
11838 else
11839 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11840 type = TREE_TYPE (ovar);
11841 if (TREE_CODE (type) == ARRAY_TYPE)
11842 var = build_fold_addr_expr (var);
11843 else
11844 {
11845 if (omp_is_reference (ovar))
11846 {
11847 type = TREE_TYPE (type);
11848 if (TREE_CODE (type) != ARRAY_TYPE)
11849 var = build_simple_mem_ref (var);
11850 var = fold_convert (TREE_TYPE (x), var);
11851 }
11852 }
11853 gimplify_assign (x, var, &ilist);
11854 s = size_int (0);
11855 purpose = size_int (map_idx++);
11856 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11857 gcc_checking_assert (tkind
11858 < (HOST_WIDE_INT_C (1U) << talign_shift));
11859 gcc_checking_assert (tkind
11860 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11861 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11862 build_int_cstu (tkind_type, tkind));
11863 break;
11864 }
11865
11866 gcc_assert (map_idx == map_cnt);
11867
11868 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11869 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11870 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11871 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11872 for (int i = 1; i <= 2; i++)
11873 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11874 {
11875 gimple_seq initlist = NULL;
11876 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11877 TREE_VEC_ELT (t, i)),
11878 &initlist, true, NULL_TREE);
11879 gimple_seq_add_seq (&ilist, initlist);
11880
11881 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
11882 NULL);
11883 TREE_THIS_VOLATILE (clobber) = 1;
11884 gimple_seq_add_stmt (&olist,
11885 gimple_build_assign (TREE_VEC_ELT (t, i),
11886 clobber));
11887 }
11888
11889 tree clobber = build_constructor (ctx->record_type, NULL);
11890 TREE_THIS_VOLATILE (clobber) = 1;
11891 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11892 clobber));
11893 }
11894
11895 /* Once all the expansions are done, sequence all the different
11896 fragments inside gimple_omp_body. */
11897
11898 new_body = NULL;
11899
11900 if (offloaded
11901 && ctx->record_type)
11902 {
11903 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11904 /* fixup_child_record_type might have changed receiver_decl's type. */
11905 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11906 gimple_seq_add_stmt (&new_body,
11907 gimple_build_assign (ctx->receiver_decl, t));
11908 }
11909 gimple_seq_add_seq (&new_body, fplist);
11910
11911 if (offloaded || data_region)
11912 {
11913 tree prev = NULL_TREE;
11914 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11915 switch (OMP_CLAUSE_CODE (c))
11916 {
11917 tree var, x;
11918 default:
11919 break;
11920 case OMP_CLAUSE_FIRSTPRIVATE:
11921 if (is_gimple_omp_oacc (ctx->stmt))
11922 break;
11923 var = OMP_CLAUSE_DECL (c);
11924 if (omp_is_reference (var)
11925 || is_gimple_reg_type (TREE_TYPE (var)))
11926 {
11927 tree new_var = lookup_decl (var, ctx);
11928 tree type;
11929 type = TREE_TYPE (var);
11930 if (omp_is_reference (var))
11931 type = TREE_TYPE (type);
11932 if ((INTEGRAL_TYPE_P (type)
11933 && TYPE_PRECISION (type) <= POINTER_SIZE)
11934 || TREE_CODE (type) == POINTER_TYPE)
11935 {
11936 x = build_receiver_ref (var, false, ctx);
11937 if (TREE_CODE (type) != POINTER_TYPE)
11938 x = fold_convert (pointer_sized_int_node, x);
11939 x = fold_convert (type, x);
11940 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11941 fb_rvalue);
11942 if (omp_is_reference (var))
11943 {
11944 tree v = create_tmp_var_raw (type, get_name (var));
11945 gimple_add_tmp_var (v);
11946 TREE_ADDRESSABLE (v) = 1;
11947 gimple_seq_add_stmt (&new_body,
11948 gimple_build_assign (v, x));
11949 x = build_fold_addr_expr (v);
11950 }
11951 gimple_seq_add_stmt (&new_body,
11952 gimple_build_assign (new_var, x));
11953 }
11954 else
11955 {
11956 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
11957 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11958 fb_rvalue);
11959 gimple_seq_add_stmt (&new_body,
11960 gimple_build_assign (new_var, x));
11961 }
11962 }
11963 else if (is_variable_sized (var))
11964 {
11965 tree pvar = DECL_VALUE_EXPR (var);
11966 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11967 pvar = TREE_OPERAND (pvar, 0);
11968 gcc_assert (DECL_P (pvar));
11969 tree new_var = lookup_decl (pvar, ctx);
11970 x = build_receiver_ref (var, false, ctx);
11971 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11972 gimple_seq_add_stmt (&new_body,
11973 gimple_build_assign (new_var, x));
11974 }
11975 break;
11976 case OMP_CLAUSE_PRIVATE:
11977 if (is_gimple_omp_oacc (ctx->stmt))
11978 break;
11979 var = OMP_CLAUSE_DECL (c);
11980 if (omp_is_reference (var))
11981 {
11982 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11983 tree new_var = lookup_decl (var, ctx);
11984 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11985 if (TREE_CONSTANT (x))
11986 {
11987 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
11988 get_name (var));
11989 gimple_add_tmp_var (x);
11990 TREE_ADDRESSABLE (x) = 1;
11991 x = build_fold_addr_expr_loc (clause_loc, x);
11992 }
11993 else
11994 break;
11995
11996 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11997 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11998 gimple_seq_add_stmt (&new_body,
11999 gimple_build_assign (new_var, x));
12000 }
12001 break;
12002 case OMP_CLAUSE_USE_DEVICE_PTR:
12003 case OMP_CLAUSE_IS_DEVICE_PTR:
12004 var = OMP_CLAUSE_DECL (c);
12005 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
12006 x = build_sender_ref (var, ctx);
12007 else
12008 x = build_receiver_ref (var, false, ctx);
12009 if (is_variable_sized (var))
12010 {
12011 tree pvar = DECL_VALUE_EXPR (var);
12012 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12013 pvar = TREE_OPERAND (pvar, 0);
12014 gcc_assert (DECL_P (pvar));
12015 tree new_var = lookup_decl (pvar, ctx);
12016 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12017 gimple_seq_add_stmt (&new_body,
12018 gimple_build_assign (new_var, x));
12019 }
12020 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12021 {
12022 tree new_var = lookup_decl (var, ctx);
12023 new_var = DECL_VALUE_EXPR (new_var);
12024 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12025 new_var = TREE_OPERAND (new_var, 0);
12026 gcc_assert (DECL_P (new_var));
12027 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12028 gimple_seq_add_stmt (&new_body,
12029 gimple_build_assign (new_var, x));
12030 }
12031 else
12032 {
12033 tree type = TREE_TYPE (var);
12034 tree new_var = lookup_decl (var, ctx);
12035 if (omp_is_reference (var))
12036 {
12037 type = TREE_TYPE (type);
12038 if (TREE_CODE (type) != ARRAY_TYPE)
12039 {
12040 tree v = create_tmp_var_raw (type, get_name (var));
12041 gimple_add_tmp_var (v);
12042 TREE_ADDRESSABLE (v) = 1;
12043 x = fold_convert (type, x);
12044 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12045 fb_rvalue);
12046 gimple_seq_add_stmt (&new_body,
12047 gimple_build_assign (v, x));
12048 x = build_fold_addr_expr (v);
12049 }
12050 }
12051 new_var = DECL_VALUE_EXPR (new_var);
12052 x = fold_convert (TREE_TYPE (new_var), x);
12053 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12054 gimple_seq_add_stmt (&new_body,
12055 gimple_build_assign (new_var, x));
12056 }
12057 break;
12058 }
12059 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12060 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12061 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12062 or references to VLAs. */
12063 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12064 switch (OMP_CLAUSE_CODE (c))
12065 {
12066 tree var;
12067 default:
12068 break;
12069 case OMP_CLAUSE_MAP:
12070 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12071 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12072 {
12073 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12074 poly_int64 offset = 0;
12075 gcc_assert (prev);
12076 var = OMP_CLAUSE_DECL (c);
12077 if (DECL_P (var)
12078 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12079 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12080 ctx))
12081 && varpool_node::get_create (var)->offloadable)
12082 break;
12083 if (TREE_CODE (var) == INDIRECT_REF
12084 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12085 var = TREE_OPERAND (var, 0);
12086 if (TREE_CODE (var) == COMPONENT_REF)
12087 {
12088 var = get_addr_base_and_unit_offset (var, &offset);
12089 gcc_assert (var != NULL_TREE && DECL_P (var));
12090 }
12091 else if (DECL_SIZE (var)
12092 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12093 {
12094 tree var2 = DECL_VALUE_EXPR (var);
12095 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12096 var2 = TREE_OPERAND (var2, 0);
12097 gcc_assert (DECL_P (var2));
12098 var = var2;
12099 }
12100 tree new_var = lookup_decl (var, ctx), x;
12101 tree type = TREE_TYPE (new_var);
12102 bool is_ref;
12103 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12104 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12105 == COMPONENT_REF))
12106 {
12107 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12108 is_ref = true;
12109 new_var = build2 (MEM_REF, type,
12110 build_fold_addr_expr (new_var),
12111 build_int_cst (build_pointer_type (type),
12112 offset));
12113 }
12114 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12115 {
12116 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12117 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12118 new_var = build2 (MEM_REF, type,
12119 build_fold_addr_expr (new_var),
12120 build_int_cst (build_pointer_type (type),
12121 offset));
12122 }
12123 else
12124 is_ref = omp_is_reference (var);
12125 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12126 is_ref = false;
12127 bool ref_to_array = false;
12128 if (is_ref)
12129 {
12130 type = TREE_TYPE (type);
12131 if (TREE_CODE (type) == ARRAY_TYPE)
12132 {
12133 type = build_pointer_type (type);
12134 ref_to_array = true;
12135 }
12136 }
12137 else if (TREE_CODE (type) == ARRAY_TYPE)
12138 {
12139 tree decl2 = DECL_VALUE_EXPR (new_var);
12140 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12141 decl2 = TREE_OPERAND (decl2, 0);
12142 gcc_assert (DECL_P (decl2));
12143 new_var = decl2;
12144 type = TREE_TYPE (new_var);
12145 }
12146 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12147 x = fold_convert_loc (clause_loc, type, x);
12148 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12149 {
12150 tree bias = OMP_CLAUSE_SIZE (c);
12151 if (DECL_P (bias))
12152 bias = lookup_decl (bias, ctx);
12153 bias = fold_convert_loc (clause_loc, sizetype, bias);
12154 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12155 bias);
12156 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12157 TREE_TYPE (x), x, bias);
12158 }
12159 if (ref_to_array)
12160 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12161 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12162 if (is_ref && !ref_to_array)
12163 {
12164 tree t = create_tmp_var_raw (type, get_name (var));
12165 gimple_add_tmp_var (t);
12166 TREE_ADDRESSABLE (t) = 1;
12167 gimple_seq_add_stmt (&new_body,
12168 gimple_build_assign (t, x));
12169 x = build_fold_addr_expr_loc (clause_loc, t);
12170 }
12171 gimple_seq_add_stmt (&new_body,
12172 gimple_build_assign (new_var, x));
12173 prev = NULL_TREE;
12174 }
12175 else if (OMP_CLAUSE_CHAIN (c)
12176 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12177 == OMP_CLAUSE_MAP
12178 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12179 == GOMP_MAP_FIRSTPRIVATE_POINTER
12180 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12181 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12182 prev = c;
12183 break;
12184 case OMP_CLAUSE_PRIVATE:
12185 var = OMP_CLAUSE_DECL (c);
12186 if (is_variable_sized (var))
12187 {
12188 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12189 tree new_var = lookup_decl (var, ctx);
12190 tree pvar = DECL_VALUE_EXPR (var);
12191 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12192 pvar = TREE_OPERAND (pvar, 0);
12193 gcc_assert (DECL_P (pvar));
12194 tree new_pvar = lookup_decl (pvar, ctx);
12195 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12196 tree al = size_int (DECL_ALIGN (var));
12197 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12198 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12199 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12200 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12201 gimple_seq_add_stmt (&new_body,
12202 gimple_build_assign (new_pvar, x));
12203 }
12204 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12205 {
12206 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12207 tree new_var = lookup_decl (var, ctx);
12208 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12209 if (TREE_CONSTANT (x))
12210 break;
12211 else
12212 {
12213 tree atmp
12214 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12215 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12216 tree al = size_int (TYPE_ALIGN (rtype));
12217 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12218 }
12219
12220 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12221 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12222 gimple_seq_add_stmt (&new_body,
12223 gimple_build_assign (new_var, x));
12224 }
12225 break;
12226 }
12227
12228 gimple_seq fork_seq = NULL;
12229 gimple_seq join_seq = NULL;
12230
12231 if (is_oacc_parallel (ctx))
12232 {
12233 /* If there are reductions on the offloaded region itself, treat
12234 them as a dummy GANG loop. */
12235 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12236
12237 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12238 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12239 }
12240
12241 gimple_seq_add_seq (&new_body, fork_seq);
12242 gimple_seq_add_seq (&new_body, tgt_body);
12243 gimple_seq_add_seq (&new_body, join_seq);
12244
12245 if (offloaded)
12246 new_body = maybe_catch_exception (new_body);
12247
12248 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12249 gimple_omp_set_body (stmt, new_body);
12250 }
12251
12252 bind = gimple_build_bind (NULL, NULL,
12253 tgt_bind ? gimple_bind_block (tgt_bind)
12254 : NULL_TREE);
12255 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12256 gimple_bind_add_seq (bind, ilist);
12257 gimple_bind_add_stmt (bind, stmt);
12258 gimple_bind_add_seq (bind, olist);
12259
12260 pop_gimplify_context (NULL);
12261
12262 if (dep_bind)
12263 {
12264 gimple_bind_add_seq (dep_bind, dep_ilist);
12265 gimple_bind_add_stmt (dep_bind, bind);
12266 gimple_bind_add_seq (dep_bind, dep_olist);
12267 pop_gimplify_context (dep_bind);
12268 }
12269 }
12270
12271 /* Expand code for an OpenMP teams directive. */
12272
12273 static void
12274 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12275 {
12276 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12277 push_gimplify_context ();
12278
12279 tree block = make_node (BLOCK);
12280 gbind *bind = gimple_build_bind (NULL, NULL, block);
12281 gsi_replace (gsi_p, bind, true);
12282 gimple_seq bind_body = NULL;
12283 gimple_seq dlist = NULL;
12284 gimple_seq olist = NULL;
12285
12286 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12287 OMP_CLAUSE_NUM_TEAMS);
12288 if (num_teams == NULL_TREE)
12289 num_teams = build_int_cst (unsigned_type_node, 0);
12290 else
12291 {
12292 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12293 num_teams = fold_convert (unsigned_type_node, num_teams);
12294 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12295 }
12296 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12297 OMP_CLAUSE_THREAD_LIMIT);
12298 if (thread_limit == NULL_TREE)
12299 thread_limit = build_int_cst (unsigned_type_node, 0);
12300 else
12301 {
12302 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12303 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12304 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12305 fb_rvalue);
12306 }
12307
12308 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12309 &bind_body, &dlist, ctx, NULL);
12310 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12311 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12312 NULL, ctx);
12313 if (!gimple_omp_teams_grid_phony (teams_stmt))
12314 {
12315 gimple_seq_add_stmt (&bind_body, teams_stmt);
12316 location_t loc = gimple_location (teams_stmt);
12317 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12318 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12319 gimple_set_location (call, loc);
12320 gimple_seq_add_stmt (&bind_body, call);
12321 }
12322
12323 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12324 gimple_omp_set_body (teams_stmt, NULL);
12325 gimple_seq_add_seq (&bind_body, olist);
12326 gimple_seq_add_seq (&bind_body, dlist);
12327 if (!gimple_omp_teams_grid_phony (teams_stmt))
12328 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12329 gimple_bind_set_body (bind, bind_body);
12330
12331 pop_gimplify_context (bind);
12332
12333 gimple_bind_append_vars (bind, ctx->block_vars);
12334 BLOCK_VARS (block) = ctx->block_vars;
12335 if (BLOCK_VARS (block))
12336 TREE_USED (block) = 1;
12337 }
12338
12339 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12340
12341 static void
12342 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12343 {
12344 gimple *stmt = gsi_stmt (*gsi_p);
12345 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12346 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12347 gimple_build_omp_return (false));
12348 }
12349
12350
12351 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12352 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12353 of OMP context, but with task_shared_vars set. */
12354
12355 static tree
12356 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12357 void *data)
12358 {
12359 tree t = *tp;
12360
12361 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12362 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12363 return t;
12364
12365 if (task_shared_vars
12366 && DECL_P (t)
12367 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12368 return t;
12369
12370 /* If a global variable has been privatized, TREE_CONSTANT on
12371 ADDR_EXPR might be wrong. */
12372 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12373 recompute_tree_invariant_for_addr_expr (t);
12374
12375 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12376 return NULL_TREE;
12377 }
12378
12379 /* Data to be communicated between lower_omp_regimplify_operands and
12380 lower_omp_regimplify_operands_p. */
12381
12382 struct lower_omp_regimplify_operands_data
12383 {
12384 omp_context *ctx;
12385 vec<tree> *decls;
12386 };
12387
12388 /* Helper function for lower_omp_regimplify_operands. Find
12389 omp_member_access_dummy_var vars and adjust temporarily their
12390 DECL_VALUE_EXPRs if needed. */
12391
12392 static tree
12393 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12394 void *data)
12395 {
12396 tree t = omp_member_access_dummy_var (*tp);
12397 if (t)
12398 {
12399 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12400 lower_omp_regimplify_operands_data *ldata
12401 = (lower_omp_regimplify_operands_data *) wi->info;
12402 tree o = maybe_lookup_decl (t, ldata->ctx);
12403 if (o != t)
12404 {
12405 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12406 ldata->decls->safe_push (*tp);
12407 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12408 SET_DECL_VALUE_EXPR (*tp, v);
12409 }
12410 }
12411 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12412 return NULL_TREE;
12413 }
12414
12415 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12416 of omp_member_access_dummy_var vars during regimplification. */
12417
12418 static void
12419 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12420 gimple_stmt_iterator *gsi_p)
12421 {
12422 auto_vec<tree, 10> decls;
12423 if (ctx)
12424 {
12425 struct walk_stmt_info wi;
12426 memset (&wi, '\0', sizeof (wi));
12427 struct lower_omp_regimplify_operands_data data;
12428 data.ctx = ctx;
12429 data.decls = &decls;
12430 wi.info = &data;
12431 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12432 }
12433 gimple_regimplify_operands (stmt, gsi_p);
12434 while (!decls.is_empty ())
12435 {
12436 tree t = decls.pop ();
12437 tree v = decls.pop ();
12438 SET_DECL_VALUE_EXPR (t, v);
12439 }
12440 }
12441
12442 static void
12443 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12444 {
12445 gimple *stmt = gsi_stmt (*gsi_p);
12446 struct walk_stmt_info wi;
12447 gcall *call_stmt;
12448
12449 if (gimple_has_location (stmt))
12450 input_location = gimple_location (stmt);
12451
12452 if (task_shared_vars)
12453 memset (&wi, '\0', sizeof (wi));
12454
12455 /* If we have issued syntax errors, avoid doing any heavy lifting.
12456 Just replace the OMP directives with a NOP to avoid
12457 confusing RTL expansion. */
12458 if (seen_error () && is_gimple_omp (stmt))
12459 {
12460 gsi_replace (gsi_p, gimple_build_nop (), true);
12461 return;
12462 }
12463
12464 switch (gimple_code (stmt))
12465 {
12466 case GIMPLE_COND:
12467 {
12468 gcond *cond_stmt = as_a <gcond *> (stmt);
12469 if ((ctx || task_shared_vars)
12470 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12471 lower_omp_regimplify_p,
12472 ctx ? NULL : &wi, NULL)
12473 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12474 lower_omp_regimplify_p,
12475 ctx ? NULL : &wi, NULL)))
12476 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12477 }
12478 break;
12479 case GIMPLE_CATCH:
12480 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12481 break;
12482 case GIMPLE_EH_FILTER:
12483 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12484 break;
12485 case GIMPLE_TRY:
12486 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12487 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12488 break;
12489 case GIMPLE_TRANSACTION:
12490 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12491 ctx);
12492 break;
12493 case GIMPLE_BIND:
12494 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12495 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12496 break;
12497 case GIMPLE_OMP_PARALLEL:
12498 case GIMPLE_OMP_TASK:
12499 ctx = maybe_lookup_ctx (stmt);
12500 gcc_assert (ctx);
12501 if (ctx->cancellable)
12502 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12503 lower_omp_taskreg (gsi_p, ctx);
12504 break;
12505 case GIMPLE_OMP_FOR:
12506 ctx = maybe_lookup_ctx (stmt);
12507 gcc_assert (ctx);
12508 if (ctx->cancellable)
12509 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12510 lower_omp_for (gsi_p, ctx);
12511 break;
12512 case GIMPLE_OMP_SECTIONS:
12513 ctx = maybe_lookup_ctx (stmt);
12514 gcc_assert (ctx);
12515 if (ctx->cancellable)
12516 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12517 lower_omp_sections (gsi_p, ctx);
12518 break;
12519 case GIMPLE_OMP_SINGLE:
12520 ctx = maybe_lookup_ctx (stmt);
12521 gcc_assert (ctx);
12522 lower_omp_single (gsi_p, ctx);
12523 break;
12524 case GIMPLE_OMP_MASTER:
12525 ctx = maybe_lookup_ctx (stmt);
12526 gcc_assert (ctx);
12527 lower_omp_master (gsi_p, ctx);
12528 break;
12529 case GIMPLE_OMP_TASKGROUP:
12530 ctx = maybe_lookup_ctx (stmt);
12531 gcc_assert (ctx);
12532 lower_omp_taskgroup (gsi_p, ctx);
12533 break;
12534 case GIMPLE_OMP_ORDERED:
12535 ctx = maybe_lookup_ctx (stmt);
12536 gcc_assert (ctx);
12537 lower_omp_ordered (gsi_p, ctx);
12538 break;
12539 case GIMPLE_OMP_SCAN:
12540 ctx = maybe_lookup_ctx (stmt);
12541 gcc_assert (ctx);
12542 lower_omp_scan (gsi_p, ctx);
12543 break;
12544 case GIMPLE_OMP_CRITICAL:
12545 ctx = maybe_lookup_ctx (stmt);
12546 gcc_assert (ctx);
12547 lower_omp_critical (gsi_p, ctx);
12548 break;
12549 case GIMPLE_OMP_ATOMIC_LOAD:
12550 if ((ctx || task_shared_vars)
12551 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12552 as_a <gomp_atomic_load *> (stmt)),
12553 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12554 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12555 break;
12556 case GIMPLE_OMP_TARGET:
12557 ctx = maybe_lookup_ctx (stmt);
12558 gcc_assert (ctx);
12559 lower_omp_target (gsi_p, ctx);
12560 break;
12561 case GIMPLE_OMP_TEAMS:
12562 ctx = maybe_lookup_ctx (stmt);
12563 gcc_assert (ctx);
12564 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12565 lower_omp_taskreg (gsi_p, ctx);
12566 else
12567 lower_omp_teams (gsi_p, ctx);
12568 break;
12569 case GIMPLE_OMP_GRID_BODY:
12570 ctx = maybe_lookup_ctx (stmt);
12571 gcc_assert (ctx);
12572 lower_omp_grid_body (gsi_p, ctx);
12573 break;
12574 case GIMPLE_CALL:
12575 tree fndecl;
12576 call_stmt = as_a <gcall *> (stmt);
12577 fndecl = gimple_call_fndecl (call_stmt);
12578 if (fndecl
12579 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12580 switch (DECL_FUNCTION_CODE (fndecl))
12581 {
12582 case BUILT_IN_GOMP_BARRIER:
12583 if (ctx == NULL)
12584 break;
12585 /* FALLTHRU */
12586 case BUILT_IN_GOMP_CANCEL:
12587 case BUILT_IN_GOMP_CANCELLATION_POINT:
12588 omp_context *cctx;
12589 cctx = ctx;
12590 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12591 cctx = cctx->outer;
12592 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12593 if (!cctx->cancellable)
12594 {
12595 if (DECL_FUNCTION_CODE (fndecl)
12596 == BUILT_IN_GOMP_CANCELLATION_POINT)
12597 {
12598 stmt = gimple_build_nop ();
12599 gsi_replace (gsi_p, stmt, false);
12600 }
12601 break;
12602 }
12603 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12604 {
12605 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12606 gimple_call_set_fndecl (call_stmt, fndecl);
12607 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12608 }
12609 tree lhs;
12610 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12611 gimple_call_set_lhs (call_stmt, lhs);
12612 tree fallthru_label;
12613 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12614 gimple *g;
12615 g = gimple_build_label (fallthru_label);
12616 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12617 g = gimple_build_cond (NE_EXPR, lhs,
12618 fold_convert (TREE_TYPE (lhs),
12619 boolean_false_node),
12620 cctx->cancel_label, fallthru_label);
12621 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12622 break;
12623 default:
12624 break;
12625 }
12626 goto regimplify;
12627
12628 case GIMPLE_ASSIGN:
12629 for (omp_context *up = ctx; up; up = up->outer)
12630 {
12631 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12632 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12633 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12634 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12635 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12636 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12637 && (gimple_omp_target_kind (up->stmt)
12638 == GF_OMP_TARGET_KIND_DATA)))
12639 continue;
12640 else if (!up->lastprivate_conditional_map)
12641 break;
12642 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12643 if (TREE_CODE (lhs) == MEM_REF
12644 && DECL_P (TREE_OPERAND (lhs, 0))
12645 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12646 0))) == REFERENCE_TYPE)
12647 lhs = TREE_OPERAND (lhs, 0);
12648 if (DECL_P (lhs))
12649 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12650 {
12651 tree clauses;
12652 if (up->combined_into_simd_safelen1)
12653 {
12654 up = up->outer;
12655 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12656 up = up->outer;
12657 }
12658 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12659 clauses = gimple_omp_for_clauses (up->stmt);
12660 else
12661 clauses = gimple_omp_sections_clauses (up->stmt);
12662 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12663 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12664 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12665 OMP_CLAUSE__CONDTEMP_);
12666 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12667 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12668 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12669 }
12670 }
12671 /* FALLTHRU */
12672
12673 default:
12674 regimplify:
12675 if ((ctx || task_shared_vars)
12676 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12677 ctx ? NULL : &wi))
12678 {
12679 /* Just remove clobbers, this should happen only if we have
12680 "privatized" local addressable variables in SIMD regions,
12681 the clobber isn't needed in that case and gimplifying address
12682 of the ARRAY_REF into a pointer and creating MEM_REF based
12683 clobber would create worse code than we get with the clobber
12684 dropped. */
12685 if (gimple_clobber_p (stmt))
12686 {
12687 gsi_replace (gsi_p, gimple_build_nop (), true);
12688 break;
12689 }
12690 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12691 }
12692 break;
12693 }
12694 }
12695
12696 static void
12697 lower_omp (gimple_seq *body, omp_context *ctx)
12698 {
12699 location_t saved_location = input_location;
12700 gimple_stmt_iterator gsi;
12701 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12702 lower_omp_1 (&gsi, ctx);
12703 /* During gimplification, we haven't folded statments inside offloading
12704 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12705 if (target_nesting_level || taskreg_nesting_level)
12706 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12707 fold_stmt (&gsi);
12708 input_location = saved_location;
12709 }
12710
12711 /* Main entry point. */
12712
12713 static unsigned int
12714 execute_lower_omp (void)
12715 {
12716 gimple_seq body;
12717 int i;
12718 omp_context *ctx;
12719
12720 /* This pass always runs, to provide PROP_gimple_lomp.
12721 But often, there is nothing to do. */
12722 if (flag_openacc == 0 && flag_openmp == 0
12723 && flag_openmp_simd == 0)
12724 return 0;
12725
12726 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12727 delete_omp_context);
12728
12729 body = gimple_body (current_function_decl);
12730
12731 if (hsa_gen_requested_p ())
12732 omp_grid_gridify_all_targets (&body);
12733
12734 scan_omp (&body, NULL);
12735 gcc_assert (taskreg_nesting_level == 0);
12736 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12737 finish_taskreg_scan (ctx);
12738 taskreg_contexts.release ();
12739
12740 if (all_contexts->root)
12741 {
12742 if (task_shared_vars)
12743 push_gimplify_context ();
12744 lower_omp (&body, NULL);
12745 if (task_shared_vars)
12746 pop_gimplify_context (NULL);
12747 }
12748
12749 if (all_contexts)
12750 {
12751 splay_tree_delete (all_contexts);
12752 all_contexts = NULL;
12753 }
12754 BITMAP_FREE (task_shared_vars);
12755 BITMAP_FREE (global_nonaddressable_vars);
12756
12757 /* If current function is a method, remove artificial dummy VAR_DECL created
12758 for non-static data member privatization, they aren't needed for
12759 debuginfo nor anything else, have been already replaced everywhere in the
12760 IL and cause problems with LTO. */
12761 if (DECL_ARGUMENTS (current_function_decl)
12762 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12763 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12764 == POINTER_TYPE))
12765 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12766 return 0;
12767 }
12768
12769 namespace {
12770
12771 const pass_data pass_data_lower_omp =
12772 {
12773 GIMPLE_PASS, /* type */
12774 "omplower", /* name */
12775 OPTGROUP_OMP, /* optinfo_flags */
12776 TV_NONE, /* tv_id */
12777 PROP_gimple_any, /* properties_required */
12778 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12779 0, /* properties_destroyed */
12780 0, /* todo_flags_start */
12781 0, /* todo_flags_finish */
12782 };
12783
12784 class pass_lower_omp : public gimple_opt_pass
12785 {
12786 public:
12787 pass_lower_omp (gcc::context *ctxt)
12788 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12789 {}
12790
12791 /* opt_pass methods: */
12792 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12793
12794 }; // class pass_lower_omp
12795
12796 } // anon namespace
12797
12798 gimple_opt_pass *
12799 make_pass_lower_omp (gcc::context *ctxt)
12800 {
12801 return new pass_lower_omp (ctxt);
12802 }
12803 \f
12804 /* The following is a utility to diagnose structured block violations.
12805 It is not part of the "omplower" pass, as that's invoked too late. It
12806 should be invoked by the respective front ends after gimplification. */
12807
12808 static splay_tree all_labels;
12809
12810 /* Check for mismatched contexts and generate an error if needed. Return
12811 true if an error is detected. */
12812
12813 static bool
12814 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12815 gimple *branch_ctx, gimple *label_ctx)
12816 {
12817 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12818 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12819
12820 if (label_ctx == branch_ctx)
12821 return false;
12822
12823 const char* kind = NULL;
12824
12825 if (flag_openacc)
12826 {
12827 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12828 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12829 {
12830 gcc_checking_assert (kind == NULL);
12831 kind = "OpenACC";
12832 }
12833 }
12834 if (kind == NULL)
12835 {
12836 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12837 kind = "OpenMP";
12838 }
12839
12840 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12841 so we could traverse it and issue a correct "exit" or "enter" error
12842 message upon a structured block violation.
12843
12844 We built the context by building a list with tree_cons'ing, but there is
12845 no easy counterpart in gimple tuples. It seems like far too much work
12846 for issuing exit/enter error messages. If someone really misses the
12847 distinct error message... patches welcome. */
12848
12849 #if 0
12850 /* Try to avoid confusing the user by producing and error message
12851 with correct "exit" or "enter" verbiage. We prefer "exit"
12852 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12853 if (branch_ctx == NULL)
12854 exit_p = false;
12855 else
12856 {
12857 while (label_ctx)
12858 {
12859 if (TREE_VALUE (label_ctx) == branch_ctx)
12860 {
12861 exit_p = false;
12862 break;
12863 }
12864 label_ctx = TREE_CHAIN (label_ctx);
12865 }
12866 }
12867
12868 if (exit_p)
12869 error ("invalid exit from %s structured block", kind);
12870 else
12871 error ("invalid entry to %s structured block", kind);
12872 #endif
12873
12874 /* If it's obvious we have an invalid entry, be specific about the error. */
12875 if (branch_ctx == NULL)
12876 error ("invalid entry to %s structured block", kind);
12877 else
12878 {
12879 /* Otherwise, be vague and lazy, but efficient. */
12880 error ("invalid branch to/from %s structured block", kind);
12881 }
12882
12883 gsi_replace (gsi_p, gimple_build_nop (), false);
12884 return true;
12885 }
12886
12887 /* Pass 1: Create a minimal tree of structured blocks, and record
12888 where each label is found. */
12889
12890 static tree
12891 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12892 struct walk_stmt_info *wi)
12893 {
12894 gimple *context = (gimple *) wi->info;
12895 gimple *inner_context;
12896 gimple *stmt = gsi_stmt (*gsi_p);
12897
12898 *handled_ops_p = true;
12899
12900 switch (gimple_code (stmt))
12901 {
12902 WALK_SUBSTMTS;
12903
12904 case GIMPLE_OMP_PARALLEL:
12905 case GIMPLE_OMP_TASK:
12906 case GIMPLE_OMP_SECTIONS:
12907 case GIMPLE_OMP_SINGLE:
12908 case GIMPLE_OMP_SECTION:
12909 case GIMPLE_OMP_MASTER:
12910 case GIMPLE_OMP_ORDERED:
12911 case GIMPLE_OMP_SCAN:
12912 case GIMPLE_OMP_CRITICAL:
12913 case GIMPLE_OMP_TARGET:
12914 case GIMPLE_OMP_TEAMS:
12915 case GIMPLE_OMP_TASKGROUP:
12916 /* The minimal context here is just the current OMP construct. */
12917 inner_context = stmt;
12918 wi->info = inner_context;
12919 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12920 wi->info = context;
12921 break;
12922
12923 case GIMPLE_OMP_FOR:
12924 inner_context = stmt;
12925 wi->info = inner_context;
12926 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12927 walk them. */
12928 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12929 diagnose_sb_1, NULL, wi);
12930 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12931 wi->info = context;
12932 break;
12933
12934 case GIMPLE_LABEL:
12935 splay_tree_insert (all_labels,
12936 (splay_tree_key) gimple_label_label (
12937 as_a <glabel *> (stmt)),
12938 (splay_tree_value) context);
12939 break;
12940
12941 default:
12942 break;
12943 }
12944
12945 return NULL_TREE;
12946 }
12947
12948 /* Pass 2: Check each branch and see if its context differs from that of
12949 the destination label's context. */
12950
12951 static tree
12952 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12953 struct walk_stmt_info *wi)
12954 {
12955 gimple *context = (gimple *) wi->info;
12956 splay_tree_node n;
12957 gimple *stmt = gsi_stmt (*gsi_p);
12958
12959 *handled_ops_p = true;
12960
12961 switch (gimple_code (stmt))
12962 {
12963 WALK_SUBSTMTS;
12964
12965 case GIMPLE_OMP_PARALLEL:
12966 case GIMPLE_OMP_TASK:
12967 case GIMPLE_OMP_SECTIONS:
12968 case GIMPLE_OMP_SINGLE:
12969 case GIMPLE_OMP_SECTION:
12970 case GIMPLE_OMP_MASTER:
12971 case GIMPLE_OMP_ORDERED:
12972 case GIMPLE_OMP_SCAN:
12973 case GIMPLE_OMP_CRITICAL:
12974 case GIMPLE_OMP_TARGET:
12975 case GIMPLE_OMP_TEAMS:
12976 case GIMPLE_OMP_TASKGROUP:
12977 wi->info = stmt;
12978 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12979 wi->info = context;
12980 break;
12981
12982 case GIMPLE_OMP_FOR:
12983 wi->info = stmt;
12984 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12985 walk them. */
12986 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
12987 diagnose_sb_2, NULL, wi);
12988 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12989 wi->info = context;
12990 break;
12991
12992 case GIMPLE_COND:
12993 {
12994 gcond *cond_stmt = as_a <gcond *> (stmt);
12995 tree lab = gimple_cond_true_label (cond_stmt);
12996 if (lab)
12997 {
12998 n = splay_tree_lookup (all_labels,
12999 (splay_tree_key) lab);
13000 diagnose_sb_0 (gsi_p, context,
13001 n ? (gimple *) n->value : NULL);
13002 }
13003 lab = gimple_cond_false_label (cond_stmt);
13004 if (lab)
13005 {
13006 n = splay_tree_lookup (all_labels,
13007 (splay_tree_key) lab);
13008 diagnose_sb_0 (gsi_p, context,
13009 n ? (gimple *) n->value : NULL);
13010 }
13011 }
13012 break;
13013
13014 case GIMPLE_GOTO:
13015 {
13016 tree lab = gimple_goto_dest (stmt);
13017 if (TREE_CODE (lab) != LABEL_DECL)
13018 break;
13019
13020 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13021 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13022 }
13023 break;
13024
13025 case GIMPLE_SWITCH:
13026 {
13027 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13028 unsigned int i;
13029 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13030 {
13031 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13032 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13033 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13034 break;
13035 }
13036 }
13037 break;
13038
13039 case GIMPLE_RETURN:
13040 diagnose_sb_0 (gsi_p, context, NULL);
13041 break;
13042
13043 default:
13044 break;
13045 }
13046
13047 return NULL_TREE;
13048 }
13049
13050 static unsigned int
13051 diagnose_omp_structured_block_errors (void)
13052 {
13053 struct walk_stmt_info wi;
13054 gimple_seq body = gimple_body (current_function_decl);
13055
13056 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13057
13058 memset (&wi, 0, sizeof (wi));
13059 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13060
13061 memset (&wi, 0, sizeof (wi));
13062 wi.want_locations = true;
13063 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13064
13065 gimple_set_body (current_function_decl, body);
13066
13067 splay_tree_delete (all_labels);
13068 all_labels = NULL;
13069
13070 return 0;
13071 }
13072
13073 namespace {
13074
13075 const pass_data pass_data_diagnose_omp_blocks =
13076 {
13077 GIMPLE_PASS, /* type */
13078 "*diagnose_omp_blocks", /* name */
13079 OPTGROUP_OMP, /* optinfo_flags */
13080 TV_NONE, /* tv_id */
13081 PROP_gimple_any, /* properties_required */
13082 0, /* properties_provided */
13083 0, /* properties_destroyed */
13084 0, /* todo_flags_start */
13085 0, /* todo_flags_finish */
13086 };
13087
13088 class pass_diagnose_omp_blocks : public gimple_opt_pass
13089 {
13090 public:
13091 pass_diagnose_omp_blocks (gcc::context *ctxt)
13092 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13093 {}
13094
13095 /* opt_pass methods: */
13096 virtual bool gate (function *)
13097 {
13098 return flag_openacc || flag_openmp || flag_openmp_simd;
13099 }
13100 virtual unsigned int execute (function *)
13101 {
13102 return diagnose_omp_structured_block_errors ();
13103 }
13104
13105 }; // class pass_diagnose_omp_blocks
13106
13107 } // anon namespace
13108
13109 gimple_opt_pass *
13110 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13111 {
13112 return new pass_diagnose_omp_blocks (ctxt);
13113 }
13114 \f
13115
13116 #include "gt-omp-low.h"