omp-low.c (omp_find_scan): Make static.
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen0;
144
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
147
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
150 };
151
152 static splay_tree all_contexts;
153 static int taskreg_nesting_level;
154 static int target_nesting_level;
155 static bitmap task_shared_vars;
156 static vec<omp_context *> taskreg_contexts;
157
158 static void scan_omp (gimple_seq *, omp_context *);
159 static tree scan_omp_1_op (tree *, int *, void *);
160
161 #define WALK_SUBSTMTS \
162 case GIMPLE_BIND: \
163 case GIMPLE_TRY: \
164 case GIMPLE_CATCH: \
165 case GIMPLE_EH_FILTER: \
166 case GIMPLE_TRANSACTION: \
167 /* The sub-statements for these should be walked. */ \
168 *handled_ops_p = false; \
169 break;
170
171 /* Return true if CTX corresponds to an oacc parallel region. */
172
173 static bool
174 is_oacc_parallel (omp_context *ctx)
175 {
176 enum gimple_code outer_type = gimple_code (ctx->stmt);
177 return ((outer_type == GIMPLE_OMP_TARGET)
178 && (gimple_omp_target_kind (ctx->stmt)
179 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
180 }
181
182 /* Return true if CTX corresponds to an oacc kernels region. */
183
184 static bool
185 is_oacc_kernels (omp_context *ctx)
186 {
187 enum gimple_code outer_type = gimple_code (ctx->stmt);
188 return ((outer_type == GIMPLE_OMP_TARGET)
189 && (gimple_omp_target_kind (ctx->stmt)
190 == GF_OMP_TARGET_KIND_OACC_KERNELS));
191 }
192
193 /* If DECL is the artificial dummy VAR_DECL created for non-static
194 data member privatization, return the underlying "this" parameter,
195 otherwise return NULL. */
196
197 tree
198 omp_member_access_dummy_var (tree decl)
199 {
200 if (!VAR_P (decl)
201 || !DECL_ARTIFICIAL (decl)
202 || !DECL_IGNORED_P (decl)
203 || !DECL_HAS_VALUE_EXPR_P (decl)
204 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
205 return NULL_TREE;
206
207 tree v = DECL_VALUE_EXPR (decl);
208 if (TREE_CODE (v) != COMPONENT_REF)
209 return NULL_TREE;
210
211 while (1)
212 switch (TREE_CODE (v))
213 {
214 case COMPONENT_REF:
215 case MEM_REF:
216 case INDIRECT_REF:
217 CASE_CONVERT:
218 case POINTER_PLUS_EXPR:
219 v = TREE_OPERAND (v, 0);
220 continue;
221 case PARM_DECL:
222 if (DECL_CONTEXT (v) == current_function_decl
223 && DECL_ARTIFICIAL (v)
224 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
225 return v;
226 return NULL_TREE;
227 default:
228 return NULL_TREE;
229 }
230 }
231
232 /* Helper for unshare_and_remap, called through walk_tree. */
233
234 static tree
235 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
236 {
237 tree *pair = (tree *) data;
238 if (*tp == pair[0])
239 {
240 *tp = unshare_expr (pair[1]);
241 *walk_subtrees = 0;
242 }
243 else if (IS_TYPE_OR_DECL_P (*tp))
244 *walk_subtrees = 0;
245 return NULL_TREE;
246 }
247
248 /* Return unshare_expr (X) with all occurrences of FROM
249 replaced with TO. */
250
251 static tree
252 unshare_and_remap (tree x, tree from, tree to)
253 {
254 tree pair[2] = { from, to };
255 x = unshare_expr (x);
256 walk_tree (&x, unshare_and_remap_1, pair, NULL);
257 return x;
258 }
259
260 /* Convenience function for calling scan_omp_1_op on tree operands. */
261
262 static inline tree
263 scan_omp_op (tree *tp, omp_context *ctx)
264 {
265 struct walk_stmt_info wi;
266
267 memset (&wi, 0, sizeof (wi));
268 wi.info = ctx;
269 wi.want_locations = true;
270
271 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
272 }
273
274 static void lower_omp (gimple_seq *, omp_context *);
275 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
276 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
277
278 /* Return true if CTX is for an omp parallel. */
279
280 static inline bool
281 is_parallel_ctx (omp_context *ctx)
282 {
283 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
284 }
285
286
287 /* Return true if CTX is for an omp task. */
288
289 static inline bool
290 is_task_ctx (omp_context *ctx)
291 {
292 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
293 }
294
295
296 /* Return true if CTX is for an omp taskloop. */
297
298 static inline bool
299 is_taskloop_ctx (omp_context *ctx)
300 {
301 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
302 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
303 }
304
305
306 /* Return true if CTX is for a host omp teams. */
307
308 static inline bool
309 is_host_teams_ctx (omp_context *ctx)
310 {
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
312 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
313 }
314
315 /* Return true if CTX is for an omp parallel or omp task or host omp teams
316 (the last one is strictly not a task region in OpenMP speak, but we
317 need to treat it similarly). */
318
319 static inline bool
320 is_taskreg_ctx (omp_context *ctx)
321 {
322 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
323 }
324
325 /* Return true if EXPR is variable sized. */
326
327 static inline bool
328 is_variable_sized (const_tree expr)
329 {
330 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
331 }
332
333 /* Lookup variables. The "maybe" form
334 allows for the variable form to not have been entered, otherwise we
335 assert that the variable must have been entered. */
336
337 static inline tree
338 lookup_decl (tree var, omp_context *ctx)
339 {
340 tree *n = ctx->cb.decl_map->get (var);
341 return *n;
342 }
343
344 static inline tree
345 maybe_lookup_decl (const_tree var, omp_context *ctx)
346 {
347 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
348 return n ? *n : NULL_TREE;
349 }
350
351 static inline tree
352 lookup_field (tree var, omp_context *ctx)
353 {
354 splay_tree_node n;
355 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
356 return (tree) n->value;
357 }
358
359 static inline tree
360 lookup_sfield (splay_tree_key key, omp_context *ctx)
361 {
362 splay_tree_node n;
363 n = splay_tree_lookup (ctx->sfield_map
364 ? ctx->sfield_map : ctx->field_map, key);
365 return (tree) n->value;
366 }
367
368 static inline tree
369 lookup_sfield (tree var, omp_context *ctx)
370 {
371 return lookup_sfield ((splay_tree_key) var, ctx);
372 }
373
374 static inline tree
375 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
376 {
377 splay_tree_node n;
378 n = splay_tree_lookup (ctx->field_map, key);
379 return n ? (tree) n->value : NULL_TREE;
380 }
381
382 static inline tree
383 maybe_lookup_field (tree var, omp_context *ctx)
384 {
385 return maybe_lookup_field ((splay_tree_key) var, ctx);
386 }
387
388 /* Return true if DECL should be copied by pointer. SHARED_CTX is
389 the parallel context if DECL is to be shared. */
390
391 static bool
392 use_pointer_for_field (tree decl, omp_context *shared_ctx)
393 {
394 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
395 || TYPE_ATOMIC (TREE_TYPE (decl)))
396 return true;
397
398 /* We can only use copy-in/copy-out semantics for shared variables
399 when we know the value is not accessible from an outer scope. */
400 if (shared_ctx)
401 {
402 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
403
404 /* ??? Trivially accessible from anywhere. But why would we even
405 be passing an address in this case? Should we simply assert
406 this to be false, or should we have a cleanup pass that removes
407 these from the list of mappings? */
408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
409 return true;
410
411 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
412 without analyzing the expression whether or not its location
413 is accessible to anyone else. In the case of nested parallel
414 regions it certainly may be. */
415 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
416 return true;
417
418 /* Do not use copy-in/copy-out for variables that have their
419 address taken. */
420 if (TREE_ADDRESSABLE (decl))
421 return true;
422
423 /* lower_send_shared_vars only uses copy-in, but not copy-out
424 for these. */
425 if (TREE_READONLY (decl)
426 || ((TREE_CODE (decl) == RESULT_DECL
427 || TREE_CODE (decl) == PARM_DECL)
428 && DECL_BY_REFERENCE (decl)))
429 return false;
430
431 /* Disallow copy-in/out in nested parallel if
432 decl is shared in outer parallel, otherwise
433 each thread could store the shared variable
434 in its own copy-in location, making the
435 variable no longer really shared. */
436 if (shared_ctx->is_nested)
437 {
438 omp_context *up;
439
440 for (up = shared_ctx->outer; up; up = up->outer)
441 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
442 break;
443
444 if (up)
445 {
446 tree c;
447
448 for (c = gimple_omp_taskreg_clauses (up->stmt);
449 c; c = OMP_CLAUSE_CHAIN (c))
450 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
451 && OMP_CLAUSE_DECL (c) == decl)
452 break;
453
454 if (c)
455 goto maybe_mark_addressable_and_ret;
456 }
457 }
458
459 /* For tasks avoid using copy-in/out. As tasks can be
460 deferred or executed in different thread, when GOMP_task
461 returns, the task hasn't necessarily terminated. */
462 if (is_task_ctx (shared_ctx))
463 {
464 tree outer;
465 maybe_mark_addressable_and_ret:
466 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
467 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
468 {
469 /* Taking address of OUTER in lower_send_shared_vars
470 might need regimplification of everything that uses the
471 variable. */
472 if (!task_shared_vars)
473 task_shared_vars = BITMAP_ALLOC (NULL);
474 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
475 TREE_ADDRESSABLE (outer) = 1;
476 }
477 return true;
478 }
479 }
480
481 return false;
482 }
483
484 /* Construct a new automatic decl similar to VAR. */
485
486 static tree
487 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
488 {
489 tree copy = copy_var_decl (var, name, type);
490
491 DECL_CONTEXT (copy) = current_function_decl;
492 DECL_CHAIN (copy) = ctx->block_vars;
493 /* If VAR is listed in task_shared_vars, it means it wasn't
494 originally addressable and is just because task needs to take
495 it's address. But we don't need to take address of privatizations
496 from that var. */
497 if (TREE_ADDRESSABLE (var)
498 && task_shared_vars
499 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
500 TREE_ADDRESSABLE (copy) = 0;
501 ctx->block_vars = copy;
502
503 return copy;
504 }
505
506 static tree
507 omp_copy_decl_1 (tree var, omp_context *ctx)
508 {
509 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
510 }
511
512 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
513 as appropriate. */
514 static tree
515 omp_build_component_ref (tree obj, tree field)
516 {
517 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
518 if (TREE_THIS_VOLATILE (field))
519 TREE_THIS_VOLATILE (ret) |= 1;
520 if (TREE_READONLY (field))
521 TREE_READONLY (ret) |= 1;
522 return ret;
523 }
524
525 /* Build tree nodes to access the field for VAR on the receiver side. */
526
527 static tree
528 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
529 {
530 tree x, field = lookup_field (var, ctx);
531
532 /* If the receiver record type was remapped in the child function,
533 remap the field into the new record type. */
534 x = maybe_lookup_field (field, ctx);
535 if (x != NULL)
536 field = x;
537
538 x = build_simple_mem_ref (ctx->receiver_decl);
539 TREE_THIS_NOTRAP (x) = 1;
540 x = omp_build_component_ref (x, field);
541 if (by_ref)
542 {
543 x = build_simple_mem_ref (x);
544 TREE_THIS_NOTRAP (x) = 1;
545 }
546
547 return x;
548 }
549
550 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
551 of a parallel, this is a component reference; for workshare constructs
552 this is some variable. */
553
554 static tree
555 build_outer_var_ref (tree var, omp_context *ctx,
556 enum omp_clause_code code = OMP_CLAUSE_ERROR)
557 {
558 tree x;
559 omp_context *outer = ctx->outer;
560 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
561 outer = outer->outer;
562
563 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
564 x = var;
565 else if (is_variable_sized (var))
566 {
567 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
568 x = build_outer_var_ref (x, ctx, code);
569 x = build_simple_mem_ref (x);
570 }
571 else if (is_taskreg_ctx (ctx))
572 {
573 bool by_ref = use_pointer_for_field (var, NULL);
574 x = build_receiver_ref (var, by_ref, ctx);
575 }
576 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
577 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
578 || (code == OMP_CLAUSE_PRIVATE
579 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
580 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
581 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
582 {
583 /* #pragma omp simd isn't a worksharing construct, and can reference
584 even private vars in its linear etc. clauses.
585 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
586 to private vars in all worksharing constructs. */
587 x = NULL_TREE;
588 if (outer && is_taskreg_ctx (outer))
589 x = lookup_decl (var, outer);
590 else if (outer)
591 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
592 if (x == NULL_TREE)
593 x = var;
594 }
595 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
596 {
597 gcc_assert (outer);
598 splay_tree_node n
599 = splay_tree_lookup (outer->field_map,
600 (splay_tree_key) &DECL_UID (var));
601 if (n == NULL)
602 {
603 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
604 x = var;
605 else
606 x = lookup_decl (var, outer);
607 }
608 else
609 {
610 tree field = (tree) n->value;
611 /* If the receiver record type was remapped in the child function,
612 remap the field into the new record type. */
613 x = maybe_lookup_field (field, outer);
614 if (x != NULL)
615 field = x;
616
617 x = build_simple_mem_ref (outer->receiver_decl);
618 x = omp_build_component_ref (x, field);
619 if (use_pointer_for_field (var, outer))
620 x = build_simple_mem_ref (x);
621 }
622 }
623 else if (outer)
624 {
625 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
626 {
627 outer = outer->outer;
628 gcc_assert (outer
629 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
630 }
631 x = lookup_decl (var, outer);
632 }
633 else if (omp_is_reference (var))
634 /* This can happen with orphaned constructs. If var is reference, it is
635 possible it is shared and as such valid. */
636 x = var;
637 else if (omp_member_access_dummy_var (var))
638 x = var;
639 else
640 gcc_unreachable ();
641
642 if (x == var)
643 {
644 tree t = omp_member_access_dummy_var (var);
645 if (t)
646 {
647 x = DECL_VALUE_EXPR (var);
648 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
649 if (o != t)
650 x = unshare_and_remap (x, t, o);
651 else
652 x = unshare_expr (x);
653 }
654 }
655
656 if (omp_is_reference (var))
657 x = build_simple_mem_ref (x);
658
659 return x;
660 }
661
662 /* Build tree nodes to access the field for VAR on the sender side. */
663
664 static tree
665 build_sender_ref (splay_tree_key key, omp_context *ctx)
666 {
667 tree field = lookup_sfield (key, ctx);
668 return omp_build_component_ref (ctx->sender_decl, field);
669 }
670
671 static tree
672 build_sender_ref (tree var, omp_context *ctx)
673 {
674 return build_sender_ref ((splay_tree_key) var, ctx);
675 }
676
677 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
678 BASE_POINTERS_RESTRICT, declare the field with restrict. */
679
680 static void
681 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
682 {
683 tree field, type, sfield = NULL_TREE;
684 splay_tree_key key = (splay_tree_key) var;
685
686 if ((mask & 8) != 0)
687 {
688 key = (splay_tree_key) &DECL_UID (var);
689 gcc_checking_assert (key != (splay_tree_key) var);
690 }
691 gcc_assert ((mask & 1) == 0
692 || !splay_tree_lookup (ctx->field_map, key));
693 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
694 || !splay_tree_lookup (ctx->sfield_map, key));
695 gcc_assert ((mask & 3) == 3
696 || !is_gimple_omp_oacc (ctx->stmt));
697
698 type = TREE_TYPE (var);
699 /* Prevent redeclaring the var in the split-off function with a restrict
700 pointer type. Note that we only clear type itself, restrict qualifiers in
701 the pointed-to type will be ignored by points-to analysis. */
702 if (POINTER_TYPE_P (type)
703 && TYPE_RESTRICT (type))
704 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
705
706 if (mask & 4)
707 {
708 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
709 type = build_pointer_type (build_pointer_type (type));
710 }
711 else if (by_ref)
712 type = build_pointer_type (type);
713 else if ((mask & 3) == 1 && omp_is_reference (var))
714 type = TREE_TYPE (type);
715
716 field = build_decl (DECL_SOURCE_LOCATION (var),
717 FIELD_DECL, DECL_NAME (var), type);
718
719 /* Remember what variable this field was created for. This does have a
720 side effect of making dwarf2out ignore this member, so for helpful
721 debugging we clear it later in delete_omp_context. */
722 DECL_ABSTRACT_ORIGIN (field) = var;
723 if (type == TREE_TYPE (var))
724 {
725 SET_DECL_ALIGN (field, DECL_ALIGN (var));
726 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
727 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
728 }
729 else
730 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
731
732 if ((mask & 3) == 3)
733 {
734 insert_field_into_struct (ctx->record_type, field);
735 if (ctx->srecord_type)
736 {
737 sfield = build_decl (DECL_SOURCE_LOCATION (var),
738 FIELD_DECL, DECL_NAME (var), type);
739 DECL_ABSTRACT_ORIGIN (sfield) = var;
740 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
741 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
742 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
743 insert_field_into_struct (ctx->srecord_type, sfield);
744 }
745 }
746 else
747 {
748 if (ctx->srecord_type == NULL_TREE)
749 {
750 tree t;
751
752 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
753 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
754 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
755 {
756 sfield = build_decl (DECL_SOURCE_LOCATION (t),
757 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
758 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
759 insert_field_into_struct (ctx->srecord_type, sfield);
760 splay_tree_insert (ctx->sfield_map,
761 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
762 (splay_tree_value) sfield);
763 }
764 }
765 sfield = field;
766 insert_field_into_struct ((mask & 1) ? ctx->record_type
767 : ctx->srecord_type, field);
768 }
769
770 if (mask & 1)
771 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
772 if ((mask & 2) && ctx->sfield_map)
773 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
774 }
775
776 static tree
777 install_var_local (tree var, omp_context *ctx)
778 {
779 tree new_var = omp_copy_decl_1 (var, ctx);
780 insert_decl_map (&ctx->cb, var, new_var);
781 return new_var;
782 }
783
784 /* Adjust the replacement for DECL in CTX for the new context. This means
785 copying the DECL_VALUE_EXPR, and fixing up the type. */
786
787 static void
788 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
789 {
790 tree new_decl, size;
791
792 new_decl = lookup_decl (decl, ctx);
793
794 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
795
796 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
797 && DECL_HAS_VALUE_EXPR_P (decl))
798 {
799 tree ve = DECL_VALUE_EXPR (decl);
800 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
801 SET_DECL_VALUE_EXPR (new_decl, ve);
802 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
803 }
804
805 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
806 {
807 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
808 if (size == error_mark_node)
809 size = TYPE_SIZE (TREE_TYPE (new_decl));
810 DECL_SIZE (new_decl) = size;
811
812 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
813 if (size == error_mark_node)
814 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
815 DECL_SIZE_UNIT (new_decl) = size;
816 }
817 }
818
819 /* The callback for remap_decl. Search all containing contexts for a
820 mapping of the variable; this avoids having to duplicate the splay
821 tree ahead of time. We know a mapping doesn't already exist in the
822 given context. Create new mappings to implement default semantics. */
823
824 static tree
825 omp_copy_decl (tree var, copy_body_data *cb)
826 {
827 omp_context *ctx = (omp_context *) cb;
828 tree new_var;
829
830 if (TREE_CODE (var) == LABEL_DECL)
831 {
832 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
833 return var;
834 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
835 DECL_CONTEXT (new_var) = current_function_decl;
836 insert_decl_map (&ctx->cb, var, new_var);
837 return new_var;
838 }
839
840 while (!is_taskreg_ctx (ctx))
841 {
842 ctx = ctx->outer;
843 if (ctx == NULL)
844 return var;
845 new_var = maybe_lookup_decl (var, ctx);
846 if (new_var)
847 return new_var;
848 }
849
850 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
851 return var;
852
853 return error_mark_node;
854 }
855
856 /* Create a new context, with OUTER_CTX being the surrounding context. */
857
858 static omp_context *
859 new_omp_context (gimple *stmt, omp_context *outer_ctx)
860 {
861 omp_context *ctx = XCNEW (omp_context);
862
863 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
864 (splay_tree_value) ctx);
865 ctx->stmt = stmt;
866
867 if (outer_ctx)
868 {
869 ctx->outer = outer_ctx;
870 ctx->cb = outer_ctx->cb;
871 ctx->cb.block = NULL;
872 ctx->depth = outer_ctx->depth + 1;
873 }
874 else
875 {
876 ctx->cb.src_fn = current_function_decl;
877 ctx->cb.dst_fn = current_function_decl;
878 ctx->cb.src_node = cgraph_node::get (current_function_decl);
879 gcc_checking_assert (ctx->cb.src_node);
880 ctx->cb.dst_node = ctx->cb.src_node;
881 ctx->cb.src_cfun = cfun;
882 ctx->cb.copy_decl = omp_copy_decl;
883 ctx->cb.eh_lp_nr = 0;
884 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
885 ctx->cb.adjust_array_error_bounds = true;
886 ctx->cb.dont_remap_vla_if_no_change = true;
887 ctx->depth = 1;
888 }
889
890 ctx->cb.decl_map = new hash_map<tree, tree>;
891
892 return ctx;
893 }
894
895 static gimple_seq maybe_catch_exception (gimple_seq);
896
897 /* Finalize task copyfn. */
898
899 static void
900 finalize_task_copyfn (gomp_task *task_stmt)
901 {
902 struct function *child_cfun;
903 tree child_fn;
904 gimple_seq seq = NULL, new_seq;
905 gbind *bind;
906
907 child_fn = gimple_omp_task_copy_fn (task_stmt);
908 if (child_fn == NULL_TREE)
909 return;
910
911 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
912 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
913
914 push_cfun (child_cfun);
915 bind = gimplify_body (child_fn, false);
916 gimple_seq_add_stmt (&seq, bind);
917 new_seq = maybe_catch_exception (seq);
918 if (new_seq != seq)
919 {
920 bind = gimple_build_bind (NULL, new_seq, NULL);
921 seq = NULL;
922 gimple_seq_add_stmt (&seq, bind);
923 }
924 gimple_set_body (child_fn, seq);
925 pop_cfun ();
926
927 /* Inform the callgraph about the new function. */
928 cgraph_node *node = cgraph_node::get_create (child_fn);
929 node->parallelized_function = 1;
930 cgraph_node::add_new_function (child_fn, false);
931 }
932
933 /* Destroy a omp_context data structures. Called through the splay tree
934 value delete callback. */
935
936 static void
937 delete_omp_context (splay_tree_value value)
938 {
939 omp_context *ctx = (omp_context *) value;
940
941 delete ctx->cb.decl_map;
942
943 if (ctx->field_map)
944 splay_tree_delete (ctx->field_map);
945 if (ctx->sfield_map)
946 splay_tree_delete (ctx->sfield_map);
947
948 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
949 it produces corrupt debug information. */
950 if (ctx->record_type)
951 {
952 tree t;
953 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
954 DECL_ABSTRACT_ORIGIN (t) = NULL;
955 }
956 if (ctx->srecord_type)
957 {
958 tree t;
959 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
960 DECL_ABSTRACT_ORIGIN (t) = NULL;
961 }
962
963 if (is_task_ctx (ctx))
964 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
965
966 if (ctx->task_reduction_map)
967 {
968 ctx->task_reductions.release ();
969 delete ctx->task_reduction_map;
970 }
971
972 delete ctx->lastprivate_conditional_map;
973
974 XDELETE (ctx);
975 }
976
977 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
978 context. */
979
980 static void
981 fixup_child_record_type (omp_context *ctx)
982 {
983 tree f, type = ctx->record_type;
984
985 if (!ctx->receiver_decl)
986 return;
987 /* ??? It isn't sufficient to just call remap_type here, because
988 variably_modified_type_p doesn't work the way we expect for
989 record types. Testing each field for whether it needs remapping
990 and creating a new record by hand works, however. */
991 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
992 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
993 break;
994 if (f)
995 {
996 tree name, new_fields = NULL;
997
998 type = lang_hooks.types.make_type (RECORD_TYPE);
999 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1000 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1001 TYPE_DECL, name, type);
1002 TYPE_NAME (type) = name;
1003
1004 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1005 {
1006 tree new_f = copy_node (f);
1007 DECL_CONTEXT (new_f) = type;
1008 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1009 DECL_CHAIN (new_f) = new_fields;
1010 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1011 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1012 &ctx->cb, NULL);
1013 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1014 &ctx->cb, NULL);
1015 new_fields = new_f;
1016
1017 /* Arrange to be able to look up the receiver field
1018 given the sender field. */
1019 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1020 (splay_tree_value) new_f);
1021 }
1022 TYPE_FIELDS (type) = nreverse (new_fields);
1023 layout_type (type);
1024 }
1025
1026 /* In a target region we never modify any of the pointers in *.omp_data_i,
1027 so attempt to help the optimizers. */
1028 if (is_gimple_omp_offloaded (ctx->stmt))
1029 type = build_qualified_type (type, TYPE_QUAL_CONST);
1030
1031 TREE_TYPE (ctx->receiver_decl)
1032 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1033 }
1034
1035 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1036 specified by CLAUSES. */
1037
1038 static void
1039 scan_sharing_clauses (tree clauses, omp_context *ctx)
1040 {
1041 tree c, decl;
1042 bool scan_array_reductions = false;
1043
1044 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1045 {
1046 bool by_ref;
1047
1048 switch (OMP_CLAUSE_CODE (c))
1049 {
1050 case OMP_CLAUSE_PRIVATE:
1051 decl = OMP_CLAUSE_DECL (c);
1052 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1053 goto do_private;
1054 else if (!is_variable_sized (decl))
1055 install_var_local (decl, ctx);
1056 break;
1057
1058 case OMP_CLAUSE_SHARED:
1059 decl = OMP_CLAUSE_DECL (c);
1060 /* Ignore shared directives in teams construct inside of
1061 target construct. */
1062 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1063 && !is_host_teams_ctx (ctx))
1064 {
1065 /* Global variables don't need to be copied,
1066 the receiver side will use them directly. */
1067 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1068 if (is_global_var (odecl))
1069 break;
1070 insert_decl_map (&ctx->cb, decl, odecl);
1071 break;
1072 }
1073 gcc_assert (is_taskreg_ctx (ctx));
1074 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1075 || !is_variable_sized (decl));
1076 /* Global variables don't need to be copied,
1077 the receiver side will use them directly. */
1078 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1079 break;
1080 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1081 {
1082 use_pointer_for_field (decl, ctx);
1083 break;
1084 }
1085 by_ref = use_pointer_for_field (decl, NULL);
1086 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1087 || TREE_ADDRESSABLE (decl)
1088 || by_ref
1089 || omp_is_reference (decl))
1090 {
1091 by_ref = use_pointer_for_field (decl, ctx);
1092 install_var_field (decl, by_ref, 3, ctx);
1093 install_var_local (decl, ctx);
1094 break;
1095 }
1096 /* We don't need to copy const scalar vars back. */
1097 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1098 goto do_private;
1099
1100 case OMP_CLAUSE_REDUCTION:
1101 case OMP_CLAUSE_IN_REDUCTION:
1102 decl = OMP_CLAUSE_DECL (c);
1103 if (TREE_CODE (decl) == MEM_REF)
1104 {
1105 tree t = TREE_OPERAND (decl, 0);
1106 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1107 t = TREE_OPERAND (t, 0);
1108 if (TREE_CODE (t) == INDIRECT_REF
1109 || TREE_CODE (t) == ADDR_EXPR)
1110 t = TREE_OPERAND (t, 0);
1111 install_var_local (t, ctx);
1112 if (is_taskreg_ctx (ctx)
1113 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1114 || (is_task_ctx (ctx)
1115 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1116 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1117 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1118 == POINTER_TYPE)))))
1119 && !is_variable_sized (t)
1120 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1121 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1122 && !is_task_ctx (ctx))))
1123 {
1124 by_ref = use_pointer_for_field (t, NULL);
1125 if (is_task_ctx (ctx)
1126 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1127 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1128 {
1129 install_var_field (t, false, 1, ctx);
1130 install_var_field (t, by_ref, 2, ctx);
1131 }
1132 else
1133 install_var_field (t, by_ref, 3, ctx);
1134 }
1135 break;
1136 }
1137 if (is_task_ctx (ctx)
1138 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1139 && OMP_CLAUSE_REDUCTION_TASK (c)
1140 && is_parallel_ctx (ctx)))
1141 {
1142 /* Global variables don't need to be copied,
1143 the receiver side will use them directly. */
1144 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1145 {
1146 by_ref = use_pointer_for_field (decl, ctx);
1147 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1148 install_var_field (decl, by_ref, 3, ctx);
1149 }
1150 install_var_local (decl, ctx);
1151 break;
1152 }
1153 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1154 && OMP_CLAUSE_REDUCTION_TASK (c))
1155 {
1156 install_var_local (decl, ctx);
1157 break;
1158 }
1159 goto do_private;
1160
1161 case OMP_CLAUSE_LASTPRIVATE:
1162 /* Let the corresponding firstprivate clause create
1163 the variable. */
1164 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1165 break;
1166 /* FALLTHRU */
1167
1168 case OMP_CLAUSE_FIRSTPRIVATE:
1169 case OMP_CLAUSE_LINEAR:
1170 decl = OMP_CLAUSE_DECL (c);
1171 do_private:
1172 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1173 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1174 && is_gimple_omp_offloaded (ctx->stmt))
1175 {
1176 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1177 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1178 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1179 install_var_field (decl, true, 3, ctx);
1180 else
1181 install_var_field (decl, false, 3, ctx);
1182 }
1183 if (is_variable_sized (decl))
1184 {
1185 if (is_task_ctx (ctx))
1186 install_var_field (decl, false, 1, ctx);
1187 break;
1188 }
1189 else if (is_taskreg_ctx (ctx))
1190 {
1191 bool global
1192 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1193 by_ref = use_pointer_for_field (decl, NULL);
1194
1195 if (is_task_ctx (ctx)
1196 && (global || by_ref || omp_is_reference (decl)))
1197 {
1198 install_var_field (decl, false, 1, ctx);
1199 if (!global)
1200 install_var_field (decl, by_ref, 2, ctx);
1201 }
1202 else if (!global)
1203 install_var_field (decl, by_ref, 3, ctx);
1204 }
1205 install_var_local (decl, ctx);
1206 break;
1207
1208 case OMP_CLAUSE_USE_DEVICE_PTR:
1209 decl = OMP_CLAUSE_DECL (c);
1210 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1211 install_var_field (decl, true, 3, ctx);
1212 else
1213 install_var_field (decl, false, 3, ctx);
1214 if (DECL_SIZE (decl)
1215 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1216 {
1217 tree decl2 = DECL_VALUE_EXPR (decl);
1218 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1219 decl2 = TREE_OPERAND (decl2, 0);
1220 gcc_assert (DECL_P (decl2));
1221 install_var_local (decl2, ctx);
1222 }
1223 install_var_local (decl, ctx);
1224 break;
1225
1226 case OMP_CLAUSE_IS_DEVICE_PTR:
1227 decl = OMP_CLAUSE_DECL (c);
1228 goto do_private;
1229
1230 case OMP_CLAUSE__LOOPTEMP_:
1231 case OMP_CLAUSE__REDUCTEMP_:
1232 gcc_assert (is_taskreg_ctx (ctx));
1233 decl = OMP_CLAUSE_DECL (c);
1234 install_var_field (decl, false, 3, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1237
1238 case OMP_CLAUSE_COPYPRIVATE:
1239 case OMP_CLAUSE_COPYIN:
1240 decl = OMP_CLAUSE_DECL (c);
1241 by_ref = use_pointer_for_field (decl, NULL);
1242 install_var_field (decl, by_ref, 3, ctx);
1243 break;
1244
1245 case OMP_CLAUSE_FINAL:
1246 case OMP_CLAUSE_IF:
1247 case OMP_CLAUSE_NUM_THREADS:
1248 case OMP_CLAUSE_NUM_TEAMS:
1249 case OMP_CLAUSE_THREAD_LIMIT:
1250 case OMP_CLAUSE_DEVICE:
1251 case OMP_CLAUSE_SCHEDULE:
1252 case OMP_CLAUSE_DIST_SCHEDULE:
1253 case OMP_CLAUSE_DEPEND:
1254 case OMP_CLAUSE_PRIORITY:
1255 case OMP_CLAUSE_GRAINSIZE:
1256 case OMP_CLAUSE_NUM_TASKS:
1257 case OMP_CLAUSE_NUM_GANGS:
1258 case OMP_CLAUSE_NUM_WORKERS:
1259 case OMP_CLAUSE_VECTOR_LENGTH:
1260 if (ctx->outer)
1261 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1262 break;
1263
1264 case OMP_CLAUSE_TO:
1265 case OMP_CLAUSE_FROM:
1266 case OMP_CLAUSE_MAP:
1267 if (ctx->outer)
1268 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1269 decl = OMP_CLAUSE_DECL (c);
1270 /* Global variables with "omp declare target" attribute
1271 don't need to be copied, the receiver side will use them
1272 directly. However, global variables with "omp declare target link"
1273 attribute need to be copied. Or when ALWAYS modifier is used. */
1274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1275 && DECL_P (decl)
1276 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1277 && (OMP_CLAUSE_MAP_KIND (c)
1278 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1279 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1280 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1281 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1282 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1283 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1284 && varpool_node::get_create (decl)->offloadable
1285 && !lookup_attribute ("omp declare target link",
1286 DECL_ATTRIBUTES (decl)))
1287 break;
1288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1289 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1290 {
1291 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1292 not offloaded; there is nothing to map for those. */
1293 if (!is_gimple_omp_offloaded (ctx->stmt)
1294 && !POINTER_TYPE_P (TREE_TYPE (decl))
1295 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1296 break;
1297 }
1298 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1299 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1300 || (OMP_CLAUSE_MAP_KIND (c)
1301 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1302 {
1303 if (TREE_CODE (decl) == COMPONENT_REF
1304 || (TREE_CODE (decl) == INDIRECT_REF
1305 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1306 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1307 == REFERENCE_TYPE)))
1308 break;
1309 if (DECL_SIZE (decl)
1310 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1311 {
1312 tree decl2 = DECL_VALUE_EXPR (decl);
1313 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1314 decl2 = TREE_OPERAND (decl2, 0);
1315 gcc_assert (DECL_P (decl2));
1316 install_var_local (decl2, ctx);
1317 }
1318 install_var_local (decl, ctx);
1319 break;
1320 }
1321 if (DECL_P (decl))
1322 {
1323 if (DECL_SIZE (decl)
1324 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1325 {
1326 tree decl2 = DECL_VALUE_EXPR (decl);
1327 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1328 decl2 = TREE_OPERAND (decl2, 0);
1329 gcc_assert (DECL_P (decl2));
1330 install_var_field (decl2, true, 3, ctx);
1331 install_var_local (decl2, ctx);
1332 install_var_local (decl, ctx);
1333 }
1334 else
1335 {
1336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1338 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1339 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1340 install_var_field (decl, true, 7, ctx);
1341 else
1342 install_var_field (decl, true, 3, ctx);
1343 if (is_gimple_omp_offloaded (ctx->stmt)
1344 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1345 install_var_local (decl, ctx);
1346 }
1347 }
1348 else
1349 {
1350 tree base = get_base_address (decl);
1351 tree nc = OMP_CLAUSE_CHAIN (c);
1352 if (DECL_P (base)
1353 && nc != NULL_TREE
1354 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1355 && OMP_CLAUSE_DECL (nc) == base
1356 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1357 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1358 {
1359 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1360 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1361 }
1362 else
1363 {
1364 if (ctx->outer)
1365 {
1366 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1367 decl = OMP_CLAUSE_DECL (c);
1368 }
1369 gcc_assert (!splay_tree_lookup (ctx->field_map,
1370 (splay_tree_key) decl));
1371 tree field
1372 = build_decl (OMP_CLAUSE_LOCATION (c),
1373 FIELD_DECL, NULL_TREE, ptr_type_node);
1374 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1375 insert_field_into_struct (ctx->record_type, field);
1376 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1377 (splay_tree_value) field);
1378 }
1379 }
1380 break;
1381
1382 case OMP_CLAUSE__GRIDDIM_:
1383 if (ctx->outer)
1384 {
1385 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1386 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1387 }
1388 break;
1389
1390 case OMP_CLAUSE_NOWAIT:
1391 case OMP_CLAUSE_ORDERED:
1392 case OMP_CLAUSE_COLLAPSE:
1393 case OMP_CLAUSE_UNTIED:
1394 case OMP_CLAUSE_MERGEABLE:
1395 case OMP_CLAUSE_PROC_BIND:
1396 case OMP_CLAUSE_SAFELEN:
1397 case OMP_CLAUSE_SIMDLEN:
1398 case OMP_CLAUSE_THREADS:
1399 case OMP_CLAUSE_SIMD:
1400 case OMP_CLAUSE_NOGROUP:
1401 case OMP_CLAUSE_DEFAULTMAP:
1402 case OMP_CLAUSE_ASYNC:
1403 case OMP_CLAUSE_WAIT:
1404 case OMP_CLAUSE_GANG:
1405 case OMP_CLAUSE_WORKER:
1406 case OMP_CLAUSE_VECTOR:
1407 case OMP_CLAUSE_INDEPENDENT:
1408 case OMP_CLAUSE_AUTO:
1409 case OMP_CLAUSE_SEQ:
1410 case OMP_CLAUSE_TILE:
1411 case OMP_CLAUSE__SIMT_:
1412 case OMP_CLAUSE_DEFAULT:
1413 case OMP_CLAUSE_NONTEMPORAL:
1414 case OMP_CLAUSE_IF_PRESENT:
1415 case OMP_CLAUSE_FINALIZE:
1416 case OMP_CLAUSE_TASK_REDUCTION:
1417 break;
1418
1419 case OMP_CLAUSE_ALIGNED:
1420 decl = OMP_CLAUSE_DECL (c);
1421 if (is_global_var (decl)
1422 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1423 install_var_local (decl, ctx);
1424 break;
1425
1426 case OMP_CLAUSE__CONDTEMP_:
1427 decl = OMP_CLAUSE_DECL (c);
1428 if (is_parallel_ctx (ctx))
1429 {
1430 install_var_field (decl, false, 3, ctx);
1431 install_var_local (decl, ctx);
1432 }
1433 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1434 && (gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
1435 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1436 install_var_local (decl, ctx);
1437 break;
1438
1439 case OMP_CLAUSE__CACHE_:
1440 default:
1441 gcc_unreachable ();
1442 }
1443 }
1444
1445 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1446 {
1447 switch (OMP_CLAUSE_CODE (c))
1448 {
1449 case OMP_CLAUSE_LASTPRIVATE:
1450 /* Let the corresponding firstprivate clause create
1451 the variable. */
1452 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1453 scan_array_reductions = true;
1454 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1455 break;
1456 /* FALLTHRU */
1457
1458 case OMP_CLAUSE_FIRSTPRIVATE:
1459 case OMP_CLAUSE_PRIVATE:
1460 case OMP_CLAUSE_LINEAR:
1461 case OMP_CLAUSE_IS_DEVICE_PTR:
1462 decl = OMP_CLAUSE_DECL (c);
1463 if (is_variable_sized (decl))
1464 {
1465 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1466 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1467 && is_gimple_omp_offloaded (ctx->stmt))
1468 {
1469 tree decl2 = DECL_VALUE_EXPR (decl);
1470 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1471 decl2 = TREE_OPERAND (decl2, 0);
1472 gcc_assert (DECL_P (decl2));
1473 install_var_local (decl2, ctx);
1474 fixup_remapped_decl (decl2, ctx, false);
1475 }
1476 install_var_local (decl, ctx);
1477 }
1478 fixup_remapped_decl (decl, ctx,
1479 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1480 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1482 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1483 scan_array_reductions = true;
1484 break;
1485
1486 case OMP_CLAUSE_REDUCTION:
1487 case OMP_CLAUSE_IN_REDUCTION:
1488 decl = OMP_CLAUSE_DECL (c);
1489 if (TREE_CODE (decl) != MEM_REF)
1490 {
1491 if (is_variable_sized (decl))
1492 install_var_local (decl, ctx);
1493 fixup_remapped_decl (decl, ctx, false);
1494 }
1495 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1496 scan_array_reductions = true;
1497 break;
1498
1499 case OMP_CLAUSE_TASK_REDUCTION:
1500 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1501 scan_array_reductions = true;
1502 break;
1503
1504 case OMP_CLAUSE_SHARED:
1505 /* Ignore shared directives in teams construct inside of
1506 target construct. */
1507 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1508 && !is_host_teams_ctx (ctx))
1509 break;
1510 decl = OMP_CLAUSE_DECL (c);
1511 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1512 break;
1513 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1514 {
1515 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1516 ctx->outer)))
1517 break;
1518 bool by_ref = use_pointer_for_field (decl, ctx);
1519 install_var_field (decl, by_ref, 11, ctx);
1520 break;
1521 }
1522 fixup_remapped_decl (decl, ctx, false);
1523 break;
1524
1525 case OMP_CLAUSE_MAP:
1526 if (!is_gimple_omp_offloaded (ctx->stmt))
1527 break;
1528 decl = OMP_CLAUSE_DECL (c);
1529 if (DECL_P (decl)
1530 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1531 && (OMP_CLAUSE_MAP_KIND (c)
1532 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1533 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1534 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1535 && varpool_node::get_create (decl)->offloadable)
1536 break;
1537 if (DECL_P (decl))
1538 {
1539 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1540 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1541 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1542 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1543 {
1544 tree new_decl = lookup_decl (decl, ctx);
1545 TREE_TYPE (new_decl)
1546 = remap_type (TREE_TYPE (decl), &ctx->cb);
1547 }
1548 else if (DECL_SIZE (decl)
1549 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1550 {
1551 tree decl2 = DECL_VALUE_EXPR (decl);
1552 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1553 decl2 = TREE_OPERAND (decl2, 0);
1554 gcc_assert (DECL_P (decl2));
1555 fixup_remapped_decl (decl2, ctx, false);
1556 fixup_remapped_decl (decl, ctx, true);
1557 }
1558 else
1559 fixup_remapped_decl (decl, ctx, false);
1560 }
1561 break;
1562
1563 case OMP_CLAUSE_COPYPRIVATE:
1564 case OMP_CLAUSE_COPYIN:
1565 case OMP_CLAUSE_DEFAULT:
1566 case OMP_CLAUSE_IF:
1567 case OMP_CLAUSE_NUM_THREADS:
1568 case OMP_CLAUSE_NUM_TEAMS:
1569 case OMP_CLAUSE_THREAD_LIMIT:
1570 case OMP_CLAUSE_DEVICE:
1571 case OMP_CLAUSE_SCHEDULE:
1572 case OMP_CLAUSE_DIST_SCHEDULE:
1573 case OMP_CLAUSE_NOWAIT:
1574 case OMP_CLAUSE_ORDERED:
1575 case OMP_CLAUSE_COLLAPSE:
1576 case OMP_CLAUSE_UNTIED:
1577 case OMP_CLAUSE_FINAL:
1578 case OMP_CLAUSE_MERGEABLE:
1579 case OMP_CLAUSE_PROC_BIND:
1580 case OMP_CLAUSE_SAFELEN:
1581 case OMP_CLAUSE_SIMDLEN:
1582 case OMP_CLAUSE_ALIGNED:
1583 case OMP_CLAUSE_DEPEND:
1584 case OMP_CLAUSE__LOOPTEMP_:
1585 case OMP_CLAUSE__REDUCTEMP_:
1586 case OMP_CLAUSE_TO:
1587 case OMP_CLAUSE_FROM:
1588 case OMP_CLAUSE_PRIORITY:
1589 case OMP_CLAUSE_GRAINSIZE:
1590 case OMP_CLAUSE_NUM_TASKS:
1591 case OMP_CLAUSE_THREADS:
1592 case OMP_CLAUSE_SIMD:
1593 case OMP_CLAUSE_NOGROUP:
1594 case OMP_CLAUSE_DEFAULTMAP:
1595 case OMP_CLAUSE_USE_DEVICE_PTR:
1596 case OMP_CLAUSE_NONTEMPORAL:
1597 case OMP_CLAUSE_ASYNC:
1598 case OMP_CLAUSE_WAIT:
1599 case OMP_CLAUSE_NUM_GANGS:
1600 case OMP_CLAUSE_NUM_WORKERS:
1601 case OMP_CLAUSE_VECTOR_LENGTH:
1602 case OMP_CLAUSE_GANG:
1603 case OMP_CLAUSE_WORKER:
1604 case OMP_CLAUSE_VECTOR:
1605 case OMP_CLAUSE_INDEPENDENT:
1606 case OMP_CLAUSE_AUTO:
1607 case OMP_CLAUSE_SEQ:
1608 case OMP_CLAUSE_TILE:
1609 case OMP_CLAUSE__GRIDDIM_:
1610 case OMP_CLAUSE__SIMT_:
1611 case OMP_CLAUSE_IF_PRESENT:
1612 case OMP_CLAUSE_FINALIZE:
1613 case OMP_CLAUSE__CONDTEMP_:
1614 break;
1615
1616 case OMP_CLAUSE__CACHE_:
1617 default:
1618 gcc_unreachable ();
1619 }
1620 }
1621
1622 gcc_checking_assert (!scan_array_reductions
1623 || !is_gimple_omp_oacc (ctx->stmt));
1624 if (scan_array_reductions)
1625 {
1626 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1627 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1628 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1629 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1630 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1631 {
1632 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1633 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1634 }
1635 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1636 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1637 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1638 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1639 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1640 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1641 }
1642 }
1643
1644 /* Create a new name for omp child function. Returns an identifier. */
1645
1646 static tree
1647 create_omp_child_function_name (bool task_copy)
1648 {
1649 return clone_function_name_numbered (current_function_decl,
1650 task_copy ? "_omp_cpyfn" : "_omp_fn");
1651 }
1652
1653 /* Return true if CTX may belong to offloaded code: either if current function
1654 is offloaded, or any enclosing context corresponds to a target region. */
1655
1656 static bool
1657 omp_maybe_offloaded_ctx (omp_context *ctx)
1658 {
1659 if (cgraph_node::get (current_function_decl)->offloadable)
1660 return true;
1661 for (; ctx; ctx = ctx->outer)
1662 if (is_gimple_omp_offloaded (ctx->stmt))
1663 return true;
1664 return false;
1665 }
1666
1667 /* Build a decl for the omp child function. It'll not contain a body
1668 yet, just the bare decl. */
1669
1670 static void
1671 create_omp_child_function (omp_context *ctx, bool task_copy)
1672 {
1673 tree decl, type, name, t;
1674
1675 name = create_omp_child_function_name (task_copy);
1676 if (task_copy)
1677 type = build_function_type_list (void_type_node, ptr_type_node,
1678 ptr_type_node, NULL_TREE);
1679 else
1680 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1681
1682 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1683
1684 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1685 || !task_copy);
1686 if (!task_copy)
1687 ctx->cb.dst_fn = decl;
1688 else
1689 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1690
1691 TREE_STATIC (decl) = 1;
1692 TREE_USED (decl) = 1;
1693 DECL_ARTIFICIAL (decl) = 1;
1694 DECL_IGNORED_P (decl) = 0;
1695 TREE_PUBLIC (decl) = 0;
1696 DECL_UNINLINABLE (decl) = 1;
1697 DECL_EXTERNAL (decl) = 0;
1698 DECL_CONTEXT (decl) = NULL_TREE;
1699 DECL_INITIAL (decl) = make_node (BLOCK);
1700 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1701 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1702 /* Remove omp declare simd attribute from the new attributes. */
1703 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1704 {
1705 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1706 a = a2;
1707 a = TREE_CHAIN (a);
1708 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1709 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1710 *p = TREE_CHAIN (*p);
1711 else
1712 {
1713 tree chain = TREE_CHAIN (*p);
1714 *p = copy_node (*p);
1715 p = &TREE_CHAIN (*p);
1716 *p = chain;
1717 }
1718 }
1719 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1720 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1721 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1722 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1723 DECL_FUNCTION_VERSIONED (decl)
1724 = DECL_FUNCTION_VERSIONED (current_function_decl);
1725
1726 if (omp_maybe_offloaded_ctx (ctx))
1727 {
1728 cgraph_node::get_create (decl)->offloadable = 1;
1729 if (ENABLE_OFFLOADING)
1730 g->have_offload = true;
1731 }
1732
1733 if (cgraph_node::get_create (decl)->offloadable
1734 && !lookup_attribute ("omp declare target",
1735 DECL_ATTRIBUTES (current_function_decl)))
1736 {
1737 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1738 ? "omp target entrypoint"
1739 : "omp declare target");
1740 DECL_ATTRIBUTES (decl)
1741 = tree_cons (get_identifier (target_attr),
1742 NULL_TREE, DECL_ATTRIBUTES (decl));
1743 }
1744
1745 t = build_decl (DECL_SOURCE_LOCATION (decl),
1746 RESULT_DECL, NULL_TREE, void_type_node);
1747 DECL_ARTIFICIAL (t) = 1;
1748 DECL_IGNORED_P (t) = 1;
1749 DECL_CONTEXT (t) = decl;
1750 DECL_RESULT (decl) = t;
1751
1752 tree data_name = get_identifier (".omp_data_i");
1753 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1754 ptr_type_node);
1755 DECL_ARTIFICIAL (t) = 1;
1756 DECL_NAMELESS (t) = 1;
1757 DECL_ARG_TYPE (t) = ptr_type_node;
1758 DECL_CONTEXT (t) = current_function_decl;
1759 TREE_USED (t) = 1;
1760 TREE_READONLY (t) = 1;
1761 DECL_ARGUMENTS (decl) = t;
1762 if (!task_copy)
1763 ctx->receiver_decl = t;
1764 else
1765 {
1766 t = build_decl (DECL_SOURCE_LOCATION (decl),
1767 PARM_DECL, get_identifier (".omp_data_o"),
1768 ptr_type_node);
1769 DECL_ARTIFICIAL (t) = 1;
1770 DECL_NAMELESS (t) = 1;
1771 DECL_ARG_TYPE (t) = ptr_type_node;
1772 DECL_CONTEXT (t) = current_function_decl;
1773 TREE_USED (t) = 1;
1774 TREE_ADDRESSABLE (t) = 1;
1775 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1776 DECL_ARGUMENTS (decl) = t;
1777 }
1778
1779 /* Allocate memory for the function structure. The call to
1780 allocate_struct_function clobbers CFUN, so we need to restore
1781 it afterward. */
1782 push_struct_function (decl);
1783 cfun->function_end_locus = gimple_location (ctx->stmt);
1784 init_tree_ssa (cfun);
1785 pop_cfun ();
1786 }
1787
1788 /* Callback for walk_gimple_seq. Check if combined parallel
1789 contains gimple_omp_for_combined_into_p OMP_FOR. */
1790
1791 tree
1792 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1793 bool *handled_ops_p,
1794 struct walk_stmt_info *wi)
1795 {
1796 gimple *stmt = gsi_stmt (*gsi_p);
1797
1798 *handled_ops_p = true;
1799 switch (gimple_code (stmt))
1800 {
1801 WALK_SUBSTMTS;
1802
1803 case GIMPLE_OMP_FOR:
1804 if (gimple_omp_for_combined_into_p (stmt)
1805 && gimple_omp_for_kind (stmt)
1806 == *(const enum gf_mask *) (wi->info))
1807 {
1808 wi->info = stmt;
1809 return integer_zero_node;
1810 }
1811 break;
1812 default:
1813 break;
1814 }
1815 return NULL;
1816 }
1817
1818 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1819
1820 static void
1821 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1822 omp_context *outer_ctx)
1823 {
1824 struct walk_stmt_info wi;
1825
1826 memset (&wi, 0, sizeof (wi));
1827 wi.val_only = true;
1828 wi.info = (void *) &msk;
1829 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1830 if (wi.info != (void *) &msk)
1831 {
1832 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1833 struct omp_for_data fd;
1834 omp_extract_for_data (for_stmt, &fd, NULL);
1835 /* We need two temporaries with fd.loop.v type (istart/iend)
1836 and then (fd.collapse - 1) temporaries with the same
1837 type for count2 ... countN-1 vars if not constant. */
1838 size_t count = 2, i;
1839 tree type = fd.iter_type;
1840 if (fd.collapse > 1
1841 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1842 {
1843 count += fd.collapse - 1;
1844 /* If there are lastprivate clauses on the inner
1845 GIMPLE_OMP_FOR, add one more temporaries for the total number
1846 of iterations (product of count1 ... countN-1). */
1847 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1848 OMP_CLAUSE_LASTPRIVATE))
1849 count++;
1850 else if (msk == GF_OMP_FOR_KIND_FOR
1851 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1852 OMP_CLAUSE_LASTPRIVATE))
1853 count++;
1854 }
1855 for (i = 0; i < count; i++)
1856 {
1857 tree temp = create_tmp_var (type);
1858 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1859 insert_decl_map (&outer_ctx->cb, temp, temp);
1860 OMP_CLAUSE_DECL (c) = temp;
1861 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1862 gimple_omp_taskreg_set_clauses (stmt, c);
1863 }
1864 }
1865 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1866 && omp_find_clause (gimple_omp_task_clauses (stmt),
1867 OMP_CLAUSE_REDUCTION))
1868 {
1869 tree type = build_pointer_type (pointer_sized_int_node);
1870 tree temp = create_tmp_var (type);
1871 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1872 insert_decl_map (&outer_ctx->cb, temp, temp);
1873 OMP_CLAUSE_DECL (c) = temp;
1874 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1875 gimple_omp_task_set_clauses (stmt, c);
1876 }
1877 }
1878
1879 /* Scan an OpenMP parallel directive. */
1880
1881 static void
1882 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1883 {
1884 omp_context *ctx;
1885 tree name;
1886 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1887
1888 /* Ignore parallel directives with empty bodies, unless there
1889 are copyin clauses. */
1890 if (optimize > 0
1891 && empty_body_p (gimple_omp_body (stmt))
1892 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1893 OMP_CLAUSE_COPYIN) == NULL)
1894 {
1895 gsi_replace (gsi, gimple_build_nop (), false);
1896 return;
1897 }
1898
1899 if (gimple_omp_parallel_combined_p (stmt))
1900 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1901 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1902 OMP_CLAUSE_REDUCTION);
1903 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1904 if (OMP_CLAUSE_REDUCTION_TASK (c))
1905 {
1906 tree type = build_pointer_type (pointer_sized_int_node);
1907 tree temp = create_tmp_var (type);
1908 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1909 if (outer_ctx)
1910 insert_decl_map (&outer_ctx->cb, temp, temp);
1911 OMP_CLAUSE_DECL (c) = temp;
1912 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1913 gimple_omp_parallel_set_clauses (stmt, c);
1914 break;
1915 }
1916 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1917 break;
1918
1919 ctx = new_omp_context (stmt, outer_ctx);
1920 taskreg_contexts.safe_push (ctx);
1921 if (taskreg_nesting_level > 1)
1922 ctx->is_nested = true;
1923 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1924 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1925 name = create_tmp_var_name (".omp_data_s");
1926 name = build_decl (gimple_location (stmt),
1927 TYPE_DECL, name, ctx->record_type);
1928 DECL_ARTIFICIAL (name) = 1;
1929 DECL_NAMELESS (name) = 1;
1930 TYPE_NAME (ctx->record_type) = name;
1931 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1932 if (!gimple_omp_parallel_grid_phony (stmt))
1933 {
1934 create_omp_child_function (ctx, false);
1935 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1936 }
1937
1938 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1939 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1940
1941 if (TYPE_FIELDS (ctx->record_type) == NULL)
1942 ctx->record_type = ctx->receiver_decl = NULL;
1943 }
1944
1945 /* Scan an OpenMP task directive. */
1946
1947 static void
1948 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1949 {
1950 omp_context *ctx;
1951 tree name, t;
1952 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1953
1954 /* Ignore task directives with empty bodies, unless they have depend
1955 clause. */
1956 if (optimize > 0
1957 && gimple_omp_body (stmt)
1958 && empty_body_p (gimple_omp_body (stmt))
1959 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1960 {
1961 gsi_replace (gsi, gimple_build_nop (), false);
1962 return;
1963 }
1964
1965 if (gimple_omp_task_taskloop_p (stmt))
1966 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1967
1968 ctx = new_omp_context (stmt, outer_ctx);
1969
1970 if (gimple_omp_task_taskwait_p (stmt))
1971 {
1972 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1973 return;
1974 }
1975
1976 taskreg_contexts.safe_push (ctx);
1977 if (taskreg_nesting_level > 1)
1978 ctx->is_nested = true;
1979 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1980 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1981 name = create_tmp_var_name (".omp_data_s");
1982 name = build_decl (gimple_location (stmt),
1983 TYPE_DECL, name, ctx->record_type);
1984 DECL_ARTIFICIAL (name) = 1;
1985 DECL_NAMELESS (name) = 1;
1986 TYPE_NAME (ctx->record_type) = name;
1987 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1988 create_omp_child_function (ctx, false);
1989 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1990
1991 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1992
1993 if (ctx->srecord_type)
1994 {
1995 name = create_tmp_var_name (".omp_data_a");
1996 name = build_decl (gimple_location (stmt),
1997 TYPE_DECL, name, ctx->srecord_type);
1998 DECL_ARTIFICIAL (name) = 1;
1999 DECL_NAMELESS (name) = 1;
2000 TYPE_NAME (ctx->srecord_type) = name;
2001 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2002 create_omp_child_function (ctx, true);
2003 }
2004
2005 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2006
2007 if (TYPE_FIELDS (ctx->record_type) == NULL)
2008 {
2009 ctx->record_type = ctx->receiver_decl = NULL;
2010 t = build_int_cst (long_integer_type_node, 0);
2011 gimple_omp_task_set_arg_size (stmt, t);
2012 t = build_int_cst (long_integer_type_node, 1);
2013 gimple_omp_task_set_arg_align (stmt, t);
2014 }
2015 }
2016
2017 /* Helper function for finish_taskreg_scan, called through walk_tree.
2018 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2019 tree, replace it in the expression. */
2020
2021 static tree
2022 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2023 {
2024 if (VAR_P (*tp))
2025 {
2026 omp_context *ctx = (omp_context *) data;
2027 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2028 if (t != *tp)
2029 {
2030 if (DECL_HAS_VALUE_EXPR_P (t))
2031 t = unshare_expr (DECL_VALUE_EXPR (t));
2032 *tp = t;
2033 }
2034 *walk_subtrees = 0;
2035 }
2036 else if (IS_TYPE_OR_DECL_P (*tp))
2037 *walk_subtrees = 0;
2038 return NULL_TREE;
2039 }
2040
2041 /* If any decls have been made addressable during scan_omp,
2042 adjust their fields if needed, and layout record types
2043 of parallel/task constructs. */
2044
2045 static void
2046 finish_taskreg_scan (omp_context *ctx)
2047 {
2048 if (ctx->record_type == NULL_TREE)
2049 return;
2050
2051 /* If any task_shared_vars were needed, verify all
2052 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2053 statements if use_pointer_for_field hasn't changed
2054 because of that. If it did, update field types now. */
2055 if (task_shared_vars)
2056 {
2057 tree c;
2058
2059 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2060 c; c = OMP_CLAUSE_CHAIN (c))
2061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2062 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2063 {
2064 tree decl = OMP_CLAUSE_DECL (c);
2065
2066 /* Global variables don't need to be copied,
2067 the receiver side will use them directly. */
2068 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2069 continue;
2070 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2071 || !use_pointer_for_field (decl, ctx))
2072 continue;
2073 tree field = lookup_field (decl, ctx);
2074 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2075 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2076 continue;
2077 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2078 TREE_THIS_VOLATILE (field) = 0;
2079 DECL_USER_ALIGN (field) = 0;
2080 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2081 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2082 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2083 if (ctx->srecord_type)
2084 {
2085 tree sfield = lookup_sfield (decl, ctx);
2086 TREE_TYPE (sfield) = TREE_TYPE (field);
2087 TREE_THIS_VOLATILE (sfield) = 0;
2088 DECL_USER_ALIGN (sfield) = 0;
2089 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2090 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2091 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2092 }
2093 }
2094 }
2095
2096 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2097 {
2098 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2099 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2100 if (c)
2101 {
2102 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2103 expects to find it at the start of data. */
2104 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2105 tree *p = &TYPE_FIELDS (ctx->record_type);
2106 while (*p)
2107 if (*p == f)
2108 {
2109 *p = DECL_CHAIN (*p);
2110 break;
2111 }
2112 else
2113 p = &DECL_CHAIN (*p);
2114 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2115 TYPE_FIELDS (ctx->record_type) = f;
2116 }
2117 layout_type (ctx->record_type);
2118 fixup_child_record_type (ctx);
2119 }
2120 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2121 {
2122 layout_type (ctx->record_type);
2123 fixup_child_record_type (ctx);
2124 }
2125 else
2126 {
2127 location_t loc = gimple_location (ctx->stmt);
2128 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2129 /* Move VLA fields to the end. */
2130 p = &TYPE_FIELDS (ctx->record_type);
2131 while (*p)
2132 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2133 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2134 {
2135 *q = *p;
2136 *p = TREE_CHAIN (*p);
2137 TREE_CHAIN (*q) = NULL_TREE;
2138 q = &TREE_CHAIN (*q);
2139 }
2140 else
2141 p = &DECL_CHAIN (*p);
2142 *p = vla_fields;
2143 if (gimple_omp_task_taskloop_p (ctx->stmt))
2144 {
2145 /* Move fields corresponding to first and second _looptemp_
2146 clause first. There are filled by GOMP_taskloop
2147 and thus need to be in specific positions. */
2148 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2149 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2150 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2151 OMP_CLAUSE__LOOPTEMP_);
2152 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2153 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2154 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2155 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2156 p = &TYPE_FIELDS (ctx->record_type);
2157 while (*p)
2158 if (*p == f1 || *p == f2 || *p == f3)
2159 *p = DECL_CHAIN (*p);
2160 else
2161 p = &DECL_CHAIN (*p);
2162 DECL_CHAIN (f1) = f2;
2163 if (c3)
2164 {
2165 DECL_CHAIN (f2) = f3;
2166 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2167 }
2168 else
2169 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2170 TYPE_FIELDS (ctx->record_type) = f1;
2171 if (ctx->srecord_type)
2172 {
2173 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2174 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2175 if (c3)
2176 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2177 p = &TYPE_FIELDS (ctx->srecord_type);
2178 while (*p)
2179 if (*p == f1 || *p == f2 || *p == f3)
2180 *p = DECL_CHAIN (*p);
2181 else
2182 p = &DECL_CHAIN (*p);
2183 DECL_CHAIN (f1) = f2;
2184 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2185 if (c3)
2186 {
2187 DECL_CHAIN (f2) = f3;
2188 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2189 }
2190 else
2191 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2192 TYPE_FIELDS (ctx->srecord_type) = f1;
2193 }
2194 }
2195 layout_type (ctx->record_type);
2196 fixup_child_record_type (ctx);
2197 if (ctx->srecord_type)
2198 layout_type (ctx->srecord_type);
2199 tree t = fold_convert_loc (loc, long_integer_type_node,
2200 TYPE_SIZE_UNIT (ctx->record_type));
2201 if (TREE_CODE (t) != INTEGER_CST)
2202 {
2203 t = unshare_expr (t);
2204 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2205 }
2206 gimple_omp_task_set_arg_size (ctx->stmt, t);
2207 t = build_int_cst (long_integer_type_node,
2208 TYPE_ALIGN_UNIT (ctx->record_type));
2209 gimple_omp_task_set_arg_align (ctx->stmt, t);
2210 }
2211 }
2212
2213 /* Find the enclosing offload context. */
2214
2215 static omp_context *
2216 enclosing_target_ctx (omp_context *ctx)
2217 {
2218 for (; ctx; ctx = ctx->outer)
2219 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2220 break;
2221
2222 return ctx;
2223 }
2224
2225 /* Return true if ctx is part of an oacc kernels region. */
2226
2227 static bool
2228 ctx_in_oacc_kernels_region (omp_context *ctx)
2229 {
2230 for (;ctx != NULL; ctx = ctx->outer)
2231 {
2232 gimple *stmt = ctx->stmt;
2233 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2234 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2235 return true;
2236 }
2237
2238 return false;
2239 }
2240
2241 /* Check the parallelism clauses inside a kernels regions.
2242 Until kernels handling moves to use the same loop indirection
2243 scheme as parallel, we need to do this checking early. */
2244
2245 static unsigned
2246 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2247 {
2248 bool checking = true;
2249 unsigned outer_mask = 0;
2250 unsigned this_mask = 0;
2251 bool has_seq = false, has_auto = false;
2252
2253 if (ctx->outer)
2254 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2255 if (!stmt)
2256 {
2257 checking = false;
2258 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2259 return outer_mask;
2260 stmt = as_a <gomp_for *> (ctx->stmt);
2261 }
2262
2263 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2264 {
2265 switch (OMP_CLAUSE_CODE (c))
2266 {
2267 case OMP_CLAUSE_GANG:
2268 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2269 break;
2270 case OMP_CLAUSE_WORKER:
2271 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2272 break;
2273 case OMP_CLAUSE_VECTOR:
2274 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2275 break;
2276 case OMP_CLAUSE_SEQ:
2277 has_seq = true;
2278 break;
2279 case OMP_CLAUSE_AUTO:
2280 has_auto = true;
2281 break;
2282 default:
2283 break;
2284 }
2285 }
2286
2287 if (checking)
2288 {
2289 if (has_seq && (this_mask || has_auto))
2290 error_at (gimple_location (stmt), "%<seq%> overrides other"
2291 " OpenACC loop specifiers");
2292 else if (has_auto && this_mask)
2293 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2294 " OpenACC loop specifiers");
2295
2296 if (this_mask & outer_mask)
2297 error_at (gimple_location (stmt), "inner loop uses same"
2298 " OpenACC parallelism as containing loop");
2299 }
2300
2301 return outer_mask | this_mask;
2302 }
2303
2304 /* Scan a GIMPLE_OMP_FOR. */
2305
2306 static omp_context *
2307 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2308 {
2309 omp_context *ctx;
2310 size_t i;
2311 tree clauses = gimple_omp_for_clauses (stmt);
2312
2313 ctx = new_omp_context (stmt, outer_ctx);
2314
2315 if (is_gimple_omp_oacc (stmt))
2316 {
2317 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2318
2319 if (!tgt || is_oacc_parallel (tgt))
2320 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2321 {
2322 char const *check = NULL;
2323
2324 switch (OMP_CLAUSE_CODE (c))
2325 {
2326 case OMP_CLAUSE_GANG:
2327 check = "gang";
2328 break;
2329
2330 case OMP_CLAUSE_WORKER:
2331 check = "worker";
2332 break;
2333
2334 case OMP_CLAUSE_VECTOR:
2335 check = "vector";
2336 break;
2337
2338 default:
2339 break;
2340 }
2341
2342 if (check && OMP_CLAUSE_OPERAND (c, 0))
2343 error_at (gimple_location (stmt),
2344 "argument not permitted on %qs clause in"
2345 " OpenACC %<parallel%>", check);
2346 }
2347
2348 if (tgt && is_oacc_kernels (tgt))
2349 {
2350 /* Strip out reductions, as they are not handled yet. */
2351 tree *prev_ptr = &clauses;
2352
2353 while (tree probe = *prev_ptr)
2354 {
2355 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2356
2357 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2358 *prev_ptr = *next_ptr;
2359 else
2360 prev_ptr = next_ptr;
2361 }
2362
2363 gimple_omp_for_set_clauses (stmt, clauses);
2364 check_oacc_kernel_gwv (stmt, ctx);
2365 }
2366 }
2367
2368 scan_sharing_clauses (clauses, ctx);
2369
2370 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2371 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2372 {
2373 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2374 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2375 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2376 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2377 }
2378 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2379 return ctx;
2380 }
2381
2382 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2383
2384 static void
2385 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2386 omp_context *outer_ctx)
2387 {
2388 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2389 gsi_replace (gsi, bind, false);
2390 gimple_seq seq = NULL;
2391 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2392 tree cond = create_tmp_var_raw (integer_type_node);
2393 DECL_CONTEXT (cond) = current_function_decl;
2394 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2395 gimple_bind_set_vars (bind, cond);
2396 gimple_call_set_lhs (g, cond);
2397 gimple_seq_add_stmt (&seq, g);
2398 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2399 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2400 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2401 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2402 gimple_seq_add_stmt (&seq, g);
2403 g = gimple_build_label (lab1);
2404 gimple_seq_add_stmt (&seq, g);
2405 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2406 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2407 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2408 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2409 gimple_omp_for_set_clauses (new_stmt, clause);
2410 gimple_seq_add_stmt (&seq, new_stmt);
2411 g = gimple_build_goto (lab3);
2412 gimple_seq_add_stmt (&seq, g);
2413 g = gimple_build_label (lab2);
2414 gimple_seq_add_stmt (&seq, g);
2415 gimple_seq_add_stmt (&seq, stmt);
2416 g = gimple_build_label (lab3);
2417 gimple_seq_add_stmt (&seq, g);
2418 gimple_bind_set_body (bind, seq);
2419 update_stmt (bind);
2420 scan_omp_for (new_stmt, outer_ctx);
2421 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2422 }
2423
2424 /* Scan an OpenMP sections directive. */
2425
2426 static void
2427 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2428 {
2429 omp_context *ctx;
2430
2431 ctx = new_omp_context (stmt, outer_ctx);
2432 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2433 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2434 }
2435
2436 /* Scan an OpenMP single directive. */
2437
2438 static void
2439 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2440 {
2441 omp_context *ctx;
2442 tree name;
2443
2444 ctx = new_omp_context (stmt, outer_ctx);
2445 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2446 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2447 name = create_tmp_var_name (".omp_copy_s");
2448 name = build_decl (gimple_location (stmt),
2449 TYPE_DECL, name, ctx->record_type);
2450 TYPE_NAME (ctx->record_type) = name;
2451
2452 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2453 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2454
2455 if (TYPE_FIELDS (ctx->record_type) == NULL)
2456 ctx->record_type = NULL;
2457 else
2458 layout_type (ctx->record_type);
2459 }
2460
2461 /* Scan a GIMPLE_OMP_TARGET. */
2462
2463 static void
2464 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2465 {
2466 omp_context *ctx;
2467 tree name;
2468 bool offloaded = is_gimple_omp_offloaded (stmt);
2469 tree clauses = gimple_omp_target_clauses (stmt);
2470
2471 ctx = new_omp_context (stmt, outer_ctx);
2472 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2473 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2474 name = create_tmp_var_name (".omp_data_t");
2475 name = build_decl (gimple_location (stmt),
2476 TYPE_DECL, name, ctx->record_type);
2477 DECL_ARTIFICIAL (name) = 1;
2478 DECL_NAMELESS (name) = 1;
2479 TYPE_NAME (ctx->record_type) = name;
2480 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2481
2482 if (offloaded)
2483 {
2484 create_omp_child_function (ctx, false);
2485 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2486 }
2487
2488 scan_sharing_clauses (clauses, ctx);
2489 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2490
2491 if (TYPE_FIELDS (ctx->record_type) == NULL)
2492 ctx->record_type = ctx->receiver_decl = NULL;
2493 else
2494 {
2495 TYPE_FIELDS (ctx->record_type)
2496 = nreverse (TYPE_FIELDS (ctx->record_type));
2497 if (flag_checking)
2498 {
2499 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2500 for (tree field = TYPE_FIELDS (ctx->record_type);
2501 field;
2502 field = DECL_CHAIN (field))
2503 gcc_assert (DECL_ALIGN (field) == align);
2504 }
2505 layout_type (ctx->record_type);
2506 if (offloaded)
2507 fixup_child_record_type (ctx);
2508 }
2509 }
2510
2511 /* Scan an OpenMP teams directive. */
2512
2513 static void
2514 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2515 {
2516 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2517
2518 if (!gimple_omp_teams_host (stmt))
2519 {
2520 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2521 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2522 return;
2523 }
2524 taskreg_contexts.safe_push (ctx);
2525 gcc_assert (taskreg_nesting_level == 1);
2526 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2527 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2528 tree name = create_tmp_var_name (".omp_data_s");
2529 name = build_decl (gimple_location (stmt),
2530 TYPE_DECL, name, ctx->record_type);
2531 DECL_ARTIFICIAL (name) = 1;
2532 DECL_NAMELESS (name) = 1;
2533 TYPE_NAME (ctx->record_type) = name;
2534 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2535 create_omp_child_function (ctx, false);
2536 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2537
2538 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2539 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2540
2541 if (TYPE_FIELDS (ctx->record_type) == NULL)
2542 ctx->record_type = ctx->receiver_decl = NULL;
2543 }
2544
2545 /* Check nesting restrictions. */
2546 static bool
2547 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2548 {
2549 tree c;
2550
2551 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2552 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2553 the original copy of its contents. */
2554 return true;
2555
2556 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2557 inside an OpenACC CTX. */
2558 if (!(is_gimple_omp (stmt)
2559 && is_gimple_omp_oacc (stmt))
2560 /* Except for atomic codes that we share with OpenMP. */
2561 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2562 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2563 {
2564 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2565 {
2566 error_at (gimple_location (stmt),
2567 "non-OpenACC construct inside of OpenACC routine");
2568 return false;
2569 }
2570 else
2571 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2572 if (is_gimple_omp (octx->stmt)
2573 && is_gimple_omp_oacc (octx->stmt))
2574 {
2575 error_at (gimple_location (stmt),
2576 "non-OpenACC construct inside of OpenACC region");
2577 return false;
2578 }
2579 }
2580
2581 if (ctx != NULL)
2582 {
2583 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2584 && ctx->outer
2585 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2586 ctx = ctx->outer;
2587 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2588 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2589 {
2590 c = NULL_TREE;
2591 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2592 {
2593 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2594 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2595 {
2596 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2597 && (ctx->outer == NULL
2598 || !gimple_omp_for_combined_into_p (ctx->stmt)
2599 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2600 || (gimple_omp_for_kind (ctx->outer->stmt)
2601 != GF_OMP_FOR_KIND_FOR)
2602 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2603 {
2604 error_at (gimple_location (stmt),
2605 "%<ordered simd threads%> must be closely "
2606 "nested inside of %<for simd%> region");
2607 return false;
2608 }
2609 return true;
2610 }
2611 }
2612 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2613 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2614 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2615 return true;
2616 error_at (gimple_location (stmt),
2617 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2618 " or %<#pragma omp atomic%> may not be nested inside"
2619 " %<simd%> region");
2620 return false;
2621 }
2622 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2623 {
2624 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2625 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2626 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2627 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2628 {
2629 error_at (gimple_location (stmt),
2630 "only %<distribute%> or %<parallel%> regions are "
2631 "allowed to be strictly nested inside %<teams%> "
2632 "region");
2633 return false;
2634 }
2635 }
2636 }
2637 switch (gimple_code (stmt))
2638 {
2639 case GIMPLE_OMP_FOR:
2640 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2641 return true;
2642 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2643 {
2644 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2645 {
2646 error_at (gimple_location (stmt),
2647 "%<distribute%> region must be strictly nested "
2648 "inside %<teams%> construct");
2649 return false;
2650 }
2651 return true;
2652 }
2653 /* We split taskloop into task and nested taskloop in it. */
2654 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2655 return true;
2656 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2657 {
2658 bool ok = false;
2659
2660 if (ctx)
2661 switch (gimple_code (ctx->stmt))
2662 {
2663 case GIMPLE_OMP_FOR:
2664 ok = (gimple_omp_for_kind (ctx->stmt)
2665 == GF_OMP_FOR_KIND_OACC_LOOP);
2666 break;
2667
2668 case GIMPLE_OMP_TARGET:
2669 switch (gimple_omp_target_kind (ctx->stmt))
2670 {
2671 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2672 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2673 ok = true;
2674 break;
2675
2676 default:
2677 break;
2678 }
2679
2680 default:
2681 break;
2682 }
2683 else if (oacc_get_fn_attrib (current_function_decl))
2684 ok = true;
2685 if (!ok)
2686 {
2687 error_at (gimple_location (stmt),
2688 "OpenACC loop directive must be associated with"
2689 " an OpenACC compute region");
2690 return false;
2691 }
2692 }
2693 /* FALLTHRU */
2694 case GIMPLE_CALL:
2695 if (is_gimple_call (stmt)
2696 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2697 == BUILT_IN_GOMP_CANCEL
2698 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2699 == BUILT_IN_GOMP_CANCELLATION_POINT))
2700 {
2701 const char *bad = NULL;
2702 const char *kind = NULL;
2703 const char *construct
2704 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2705 == BUILT_IN_GOMP_CANCEL)
2706 ? "#pragma omp cancel"
2707 : "#pragma omp cancellation point";
2708 if (ctx == NULL)
2709 {
2710 error_at (gimple_location (stmt), "orphaned %qs construct",
2711 construct);
2712 return false;
2713 }
2714 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2715 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2716 : 0)
2717 {
2718 case 1:
2719 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2720 bad = "#pragma omp parallel";
2721 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2722 == BUILT_IN_GOMP_CANCEL
2723 && !integer_zerop (gimple_call_arg (stmt, 1)))
2724 ctx->cancellable = true;
2725 kind = "parallel";
2726 break;
2727 case 2:
2728 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2729 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2730 bad = "#pragma omp for";
2731 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2732 == BUILT_IN_GOMP_CANCEL
2733 && !integer_zerop (gimple_call_arg (stmt, 1)))
2734 {
2735 ctx->cancellable = true;
2736 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2737 OMP_CLAUSE_NOWAIT))
2738 warning_at (gimple_location (stmt), 0,
2739 "%<#pragma omp cancel for%> inside "
2740 "%<nowait%> for construct");
2741 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2742 OMP_CLAUSE_ORDERED))
2743 warning_at (gimple_location (stmt), 0,
2744 "%<#pragma omp cancel for%> inside "
2745 "%<ordered%> for construct");
2746 }
2747 kind = "for";
2748 break;
2749 case 4:
2750 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2751 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2752 bad = "#pragma omp sections";
2753 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2754 == BUILT_IN_GOMP_CANCEL
2755 && !integer_zerop (gimple_call_arg (stmt, 1)))
2756 {
2757 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2758 {
2759 ctx->cancellable = true;
2760 if (omp_find_clause (gimple_omp_sections_clauses
2761 (ctx->stmt),
2762 OMP_CLAUSE_NOWAIT))
2763 warning_at (gimple_location (stmt), 0,
2764 "%<#pragma omp cancel sections%> inside "
2765 "%<nowait%> sections construct");
2766 }
2767 else
2768 {
2769 gcc_assert (ctx->outer
2770 && gimple_code (ctx->outer->stmt)
2771 == GIMPLE_OMP_SECTIONS);
2772 ctx->outer->cancellable = true;
2773 if (omp_find_clause (gimple_omp_sections_clauses
2774 (ctx->outer->stmt),
2775 OMP_CLAUSE_NOWAIT))
2776 warning_at (gimple_location (stmt), 0,
2777 "%<#pragma omp cancel sections%> inside "
2778 "%<nowait%> sections construct");
2779 }
2780 }
2781 kind = "sections";
2782 break;
2783 case 8:
2784 if (!is_task_ctx (ctx)
2785 && (!is_taskloop_ctx (ctx)
2786 || ctx->outer == NULL
2787 || !is_task_ctx (ctx->outer)))
2788 bad = "#pragma omp task";
2789 else
2790 {
2791 for (omp_context *octx = ctx->outer;
2792 octx; octx = octx->outer)
2793 {
2794 switch (gimple_code (octx->stmt))
2795 {
2796 case GIMPLE_OMP_TASKGROUP:
2797 break;
2798 case GIMPLE_OMP_TARGET:
2799 if (gimple_omp_target_kind (octx->stmt)
2800 != GF_OMP_TARGET_KIND_REGION)
2801 continue;
2802 /* FALLTHRU */
2803 case GIMPLE_OMP_PARALLEL:
2804 case GIMPLE_OMP_TEAMS:
2805 error_at (gimple_location (stmt),
2806 "%<%s taskgroup%> construct not closely "
2807 "nested inside of %<taskgroup%> region",
2808 construct);
2809 return false;
2810 case GIMPLE_OMP_TASK:
2811 if (gimple_omp_task_taskloop_p (octx->stmt)
2812 && octx->outer
2813 && is_taskloop_ctx (octx->outer))
2814 {
2815 tree clauses
2816 = gimple_omp_for_clauses (octx->outer->stmt);
2817 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2818 break;
2819 }
2820 continue;
2821 default:
2822 continue;
2823 }
2824 break;
2825 }
2826 ctx->cancellable = true;
2827 }
2828 kind = "taskgroup";
2829 break;
2830 default:
2831 error_at (gimple_location (stmt), "invalid arguments");
2832 return false;
2833 }
2834 if (bad)
2835 {
2836 error_at (gimple_location (stmt),
2837 "%<%s %s%> construct not closely nested inside of %qs",
2838 construct, kind, bad);
2839 return false;
2840 }
2841 }
2842 /* FALLTHRU */
2843 case GIMPLE_OMP_SECTIONS:
2844 case GIMPLE_OMP_SINGLE:
2845 for (; ctx != NULL; ctx = ctx->outer)
2846 switch (gimple_code (ctx->stmt))
2847 {
2848 case GIMPLE_OMP_FOR:
2849 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2850 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2851 break;
2852 /* FALLTHRU */
2853 case GIMPLE_OMP_SECTIONS:
2854 case GIMPLE_OMP_SINGLE:
2855 case GIMPLE_OMP_ORDERED:
2856 case GIMPLE_OMP_MASTER:
2857 case GIMPLE_OMP_TASK:
2858 case GIMPLE_OMP_CRITICAL:
2859 if (is_gimple_call (stmt))
2860 {
2861 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2862 != BUILT_IN_GOMP_BARRIER)
2863 return true;
2864 error_at (gimple_location (stmt),
2865 "barrier region may not be closely nested inside "
2866 "of work-sharing, %<critical%>, %<ordered%>, "
2867 "%<master%>, explicit %<task%> or %<taskloop%> "
2868 "region");
2869 return false;
2870 }
2871 error_at (gimple_location (stmt),
2872 "work-sharing region may not be closely nested inside "
2873 "of work-sharing, %<critical%>, %<ordered%>, "
2874 "%<master%>, explicit %<task%> or %<taskloop%> region");
2875 return false;
2876 case GIMPLE_OMP_PARALLEL:
2877 case GIMPLE_OMP_TEAMS:
2878 return true;
2879 case GIMPLE_OMP_TARGET:
2880 if (gimple_omp_target_kind (ctx->stmt)
2881 == GF_OMP_TARGET_KIND_REGION)
2882 return true;
2883 break;
2884 default:
2885 break;
2886 }
2887 break;
2888 case GIMPLE_OMP_MASTER:
2889 for (; ctx != NULL; ctx = ctx->outer)
2890 switch (gimple_code (ctx->stmt))
2891 {
2892 case GIMPLE_OMP_FOR:
2893 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2894 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2895 break;
2896 /* FALLTHRU */
2897 case GIMPLE_OMP_SECTIONS:
2898 case GIMPLE_OMP_SINGLE:
2899 case GIMPLE_OMP_TASK:
2900 error_at (gimple_location (stmt),
2901 "%<master%> region may not be closely nested inside "
2902 "of work-sharing, explicit %<task%> or %<taskloop%> "
2903 "region");
2904 return false;
2905 case GIMPLE_OMP_PARALLEL:
2906 case GIMPLE_OMP_TEAMS:
2907 return true;
2908 case GIMPLE_OMP_TARGET:
2909 if (gimple_omp_target_kind (ctx->stmt)
2910 == GF_OMP_TARGET_KIND_REGION)
2911 return true;
2912 break;
2913 default:
2914 break;
2915 }
2916 break;
2917 case GIMPLE_OMP_TASK:
2918 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2920 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2921 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2922 {
2923 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2924 error_at (OMP_CLAUSE_LOCATION (c),
2925 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2926 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2927 return false;
2928 }
2929 break;
2930 case GIMPLE_OMP_ORDERED:
2931 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2932 c; c = OMP_CLAUSE_CHAIN (c))
2933 {
2934 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2935 {
2936 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2937 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2938 continue;
2939 }
2940 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2941 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2942 || kind == OMP_CLAUSE_DEPEND_SINK)
2943 {
2944 tree oclause;
2945 /* Look for containing ordered(N) loop. */
2946 if (ctx == NULL
2947 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2948 || (oclause
2949 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2950 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2951 {
2952 error_at (OMP_CLAUSE_LOCATION (c),
2953 "%<ordered%> construct with %<depend%> clause "
2954 "must be closely nested inside an %<ordered%> "
2955 "loop");
2956 return false;
2957 }
2958 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2959 {
2960 error_at (OMP_CLAUSE_LOCATION (c),
2961 "%<ordered%> construct with %<depend%> clause "
2962 "must be closely nested inside a loop with "
2963 "%<ordered%> clause with a parameter");
2964 return false;
2965 }
2966 }
2967 else
2968 {
2969 error_at (OMP_CLAUSE_LOCATION (c),
2970 "invalid depend kind in omp %<ordered%> %<depend%>");
2971 return false;
2972 }
2973 }
2974 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2975 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2976 {
2977 /* ordered simd must be closely nested inside of simd region,
2978 and simd region must not encounter constructs other than
2979 ordered simd, therefore ordered simd may be either orphaned,
2980 or ctx->stmt must be simd. The latter case is handled already
2981 earlier. */
2982 if (ctx != NULL)
2983 {
2984 error_at (gimple_location (stmt),
2985 "%<ordered%> %<simd%> must be closely nested inside "
2986 "%<simd%> region");
2987 return false;
2988 }
2989 }
2990 for (; ctx != NULL; ctx = ctx->outer)
2991 switch (gimple_code (ctx->stmt))
2992 {
2993 case GIMPLE_OMP_CRITICAL:
2994 case GIMPLE_OMP_TASK:
2995 case GIMPLE_OMP_ORDERED:
2996 ordered_in_taskloop:
2997 error_at (gimple_location (stmt),
2998 "%<ordered%> region may not be closely nested inside "
2999 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3000 "%<taskloop%> region");
3001 return false;
3002 case GIMPLE_OMP_FOR:
3003 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3004 goto ordered_in_taskloop;
3005 tree o;
3006 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3007 OMP_CLAUSE_ORDERED);
3008 if (o == NULL)
3009 {
3010 error_at (gimple_location (stmt),
3011 "%<ordered%> region must be closely nested inside "
3012 "a loop region with an %<ordered%> clause");
3013 return false;
3014 }
3015 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3016 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3017 {
3018 error_at (gimple_location (stmt),
3019 "%<ordered%> region without %<depend%> clause may "
3020 "not be closely nested inside a loop region with "
3021 "an %<ordered%> clause with a parameter");
3022 return false;
3023 }
3024 return true;
3025 case GIMPLE_OMP_TARGET:
3026 if (gimple_omp_target_kind (ctx->stmt)
3027 != GF_OMP_TARGET_KIND_REGION)
3028 break;
3029 /* FALLTHRU */
3030 case GIMPLE_OMP_PARALLEL:
3031 case GIMPLE_OMP_TEAMS:
3032 error_at (gimple_location (stmt),
3033 "%<ordered%> region must be closely nested inside "
3034 "a loop region with an %<ordered%> clause");
3035 return false;
3036 default:
3037 break;
3038 }
3039 break;
3040 case GIMPLE_OMP_CRITICAL:
3041 {
3042 tree this_stmt_name
3043 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3044 for (; ctx != NULL; ctx = ctx->outer)
3045 if (gomp_critical *other_crit
3046 = dyn_cast <gomp_critical *> (ctx->stmt))
3047 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3048 {
3049 error_at (gimple_location (stmt),
3050 "%<critical%> region may not be nested inside "
3051 "a %<critical%> region with the same name");
3052 return false;
3053 }
3054 }
3055 break;
3056 case GIMPLE_OMP_TEAMS:
3057 if (ctx == NULL)
3058 break;
3059 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3060 || (gimple_omp_target_kind (ctx->stmt)
3061 != GF_OMP_TARGET_KIND_REGION))
3062 {
3063 /* Teams construct can appear either strictly nested inside of
3064 target construct with no intervening stmts, or can be encountered
3065 only by initial task (so must not appear inside any OpenMP
3066 construct. */
3067 error_at (gimple_location (stmt),
3068 "%<teams%> construct must be closely nested inside of "
3069 "%<target%> construct or not nested in any OpenMP "
3070 "construct");
3071 return false;
3072 }
3073 break;
3074 case GIMPLE_OMP_TARGET:
3075 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3076 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3077 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3078 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3079 {
3080 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3081 error_at (OMP_CLAUSE_LOCATION (c),
3082 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3083 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3084 return false;
3085 }
3086 if (is_gimple_omp_offloaded (stmt)
3087 && oacc_get_fn_attrib (cfun->decl) != NULL)
3088 {
3089 error_at (gimple_location (stmt),
3090 "OpenACC region inside of OpenACC routine, nested "
3091 "parallelism not supported yet");
3092 return false;
3093 }
3094 for (; ctx != NULL; ctx = ctx->outer)
3095 {
3096 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3097 {
3098 if (is_gimple_omp (stmt)
3099 && is_gimple_omp_oacc (stmt)
3100 && is_gimple_omp (ctx->stmt))
3101 {
3102 error_at (gimple_location (stmt),
3103 "OpenACC construct inside of non-OpenACC region");
3104 return false;
3105 }
3106 continue;
3107 }
3108
3109 const char *stmt_name, *ctx_stmt_name;
3110 switch (gimple_omp_target_kind (stmt))
3111 {
3112 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3113 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3114 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3115 case GF_OMP_TARGET_KIND_ENTER_DATA:
3116 stmt_name = "target enter data"; break;
3117 case GF_OMP_TARGET_KIND_EXIT_DATA:
3118 stmt_name = "target exit data"; break;
3119 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3120 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3121 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3122 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3123 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3124 stmt_name = "enter/exit data"; break;
3125 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3126 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3127 break;
3128 default: gcc_unreachable ();
3129 }
3130 switch (gimple_omp_target_kind (ctx->stmt))
3131 {
3132 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3133 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3134 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3135 ctx_stmt_name = "parallel"; break;
3136 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3137 ctx_stmt_name = "kernels"; break;
3138 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3139 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3140 ctx_stmt_name = "host_data"; break;
3141 default: gcc_unreachable ();
3142 }
3143
3144 /* OpenACC/OpenMP mismatch? */
3145 if (is_gimple_omp_oacc (stmt)
3146 != is_gimple_omp_oacc (ctx->stmt))
3147 {
3148 error_at (gimple_location (stmt),
3149 "%s %qs construct inside of %s %qs region",
3150 (is_gimple_omp_oacc (stmt)
3151 ? "OpenACC" : "OpenMP"), stmt_name,
3152 (is_gimple_omp_oacc (ctx->stmt)
3153 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3154 return false;
3155 }
3156 if (is_gimple_omp_offloaded (ctx->stmt))
3157 {
3158 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3159 if (is_gimple_omp_oacc (ctx->stmt))
3160 {
3161 error_at (gimple_location (stmt),
3162 "%qs construct inside of %qs region",
3163 stmt_name, ctx_stmt_name);
3164 return false;
3165 }
3166 else
3167 {
3168 warning_at (gimple_location (stmt), 0,
3169 "%qs construct inside of %qs region",
3170 stmt_name, ctx_stmt_name);
3171 }
3172 }
3173 }
3174 break;
3175 default:
3176 break;
3177 }
3178 return true;
3179 }
3180
3181
3182 /* Helper function scan_omp.
3183
3184 Callback for walk_tree or operators in walk_gimple_stmt used to
3185 scan for OMP directives in TP. */
3186
3187 static tree
3188 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3189 {
3190 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3191 omp_context *ctx = (omp_context *) wi->info;
3192 tree t = *tp;
3193
3194 switch (TREE_CODE (t))
3195 {
3196 case VAR_DECL:
3197 case PARM_DECL:
3198 case LABEL_DECL:
3199 case RESULT_DECL:
3200 if (ctx)
3201 {
3202 tree repl = remap_decl (t, &ctx->cb);
3203 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3204 *tp = repl;
3205 }
3206 break;
3207
3208 default:
3209 if (ctx && TYPE_P (t))
3210 *tp = remap_type (t, &ctx->cb);
3211 else if (!DECL_P (t))
3212 {
3213 *walk_subtrees = 1;
3214 if (ctx)
3215 {
3216 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3217 if (tem != TREE_TYPE (t))
3218 {
3219 if (TREE_CODE (t) == INTEGER_CST)
3220 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3221 else
3222 TREE_TYPE (t) = tem;
3223 }
3224 }
3225 }
3226 break;
3227 }
3228
3229 return NULL_TREE;
3230 }
3231
3232 /* Return true if FNDECL is a setjmp or a longjmp. */
3233
3234 static bool
3235 setjmp_or_longjmp_p (const_tree fndecl)
3236 {
3237 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3238 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3239 return true;
3240
3241 tree declname = DECL_NAME (fndecl);
3242 if (!declname)
3243 return false;
3244 const char *name = IDENTIFIER_POINTER (declname);
3245 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3246 }
3247
3248
3249 /* Helper function for scan_omp.
3250
3251 Callback for walk_gimple_stmt used to scan for OMP directives in
3252 the current statement in GSI. */
3253
3254 static tree
3255 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3256 struct walk_stmt_info *wi)
3257 {
3258 gimple *stmt = gsi_stmt (*gsi);
3259 omp_context *ctx = (omp_context *) wi->info;
3260
3261 if (gimple_has_location (stmt))
3262 input_location = gimple_location (stmt);
3263
3264 /* Check the nesting restrictions. */
3265 bool remove = false;
3266 if (is_gimple_omp (stmt))
3267 remove = !check_omp_nesting_restrictions (stmt, ctx);
3268 else if (is_gimple_call (stmt))
3269 {
3270 tree fndecl = gimple_call_fndecl (stmt);
3271 if (fndecl)
3272 {
3273 if (setjmp_or_longjmp_p (fndecl)
3274 && ctx
3275 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3276 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3277 {
3278 remove = true;
3279 error_at (gimple_location (stmt),
3280 "setjmp/longjmp inside simd construct");
3281 }
3282 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3283 switch (DECL_FUNCTION_CODE (fndecl))
3284 {
3285 case BUILT_IN_GOMP_BARRIER:
3286 case BUILT_IN_GOMP_CANCEL:
3287 case BUILT_IN_GOMP_CANCELLATION_POINT:
3288 case BUILT_IN_GOMP_TASKYIELD:
3289 case BUILT_IN_GOMP_TASKWAIT:
3290 case BUILT_IN_GOMP_TASKGROUP_START:
3291 case BUILT_IN_GOMP_TASKGROUP_END:
3292 remove = !check_omp_nesting_restrictions (stmt, ctx);
3293 break;
3294 default:
3295 break;
3296 }
3297 }
3298 }
3299 if (remove)
3300 {
3301 stmt = gimple_build_nop ();
3302 gsi_replace (gsi, stmt, false);
3303 }
3304
3305 *handled_ops_p = true;
3306
3307 switch (gimple_code (stmt))
3308 {
3309 case GIMPLE_OMP_PARALLEL:
3310 taskreg_nesting_level++;
3311 scan_omp_parallel (gsi, ctx);
3312 taskreg_nesting_level--;
3313 break;
3314
3315 case GIMPLE_OMP_TASK:
3316 taskreg_nesting_level++;
3317 scan_omp_task (gsi, ctx);
3318 taskreg_nesting_level--;
3319 break;
3320
3321 case GIMPLE_OMP_FOR:
3322 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3323 == GF_OMP_FOR_KIND_SIMD)
3324 && omp_maybe_offloaded_ctx (ctx)
3325 && omp_max_simt_vf ())
3326 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3327 else
3328 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3329 break;
3330
3331 case GIMPLE_OMP_SECTIONS:
3332 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3333 break;
3334
3335 case GIMPLE_OMP_SINGLE:
3336 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3337 break;
3338
3339 case GIMPLE_OMP_SCAN:
3340 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3341 {
3342 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3343 ctx->scan_inclusive = true;
3344 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3345 ctx->scan_exclusive = true;
3346 }
3347 /* FALLTHRU */
3348 case GIMPLE_OMP_SECTION:
3349 case GIMPLE_OMP_MASTER:
3350 case GIMPLE_OMP_ORDERED:
3351 case GIMPLE_OMP_CRITICAL:
3352 case GIMPLE_OMP_GRID_BODY:
3353 ctx = new_omp_context (stmt, ctx);
3354 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3355 break;
3356
3357 case GIMPLE_OMP_TASKGROUP:
3358 ctx = new_omp_context (stmt, ctx);
3359 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3360 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3361 break;
3362
3363 case GIMPLE_OMP_TARGET:
3364 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3365 break;
3366
3367 case GIMPLE_OMP_TEAMS:
3368 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3369 {
3370 taskreg_nesting_level++;
3371 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3372 taskreg_nesting_level--;
3373 }
3374 else
3375 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3376 break;
3377
3378 case GIMPLE_BIND:
3379 {
3380 tree var;
3381
3382 *handled_ops_p = false;
3383 if (ctx)
3384 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3385 var ;
3386 var = DECL_CHAIN (var))
3387 insert_decl_map (&ctx->cb, var, var);
3388 }
3389 break;
3390 default:
3391 *handled_ops_p = false;
3392 break;
3393 }
3394
3395 return NULL_TREE;
3396 }
3397
3398
3399 /* Scan all the statements starting at the current statement. CTX
3400 contains context information about the OMP directives and
3401 clauses found during the scan. */
3402
3403 static void
3404 scan_omp (gimple_seq *body_p, omp_context *ctx)
3405 {
3406 location_t saved_location;
3407 struct walk_stmt_info wi;
3408
3409 memset (&wi, 0, sizeof (wi));
3410 wi.info = ctx;
3411 wi.want_locations = true;
3412
3413 saved_location = input_location;
3414 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3415 input_location = saved_location;
3416 }
3417 \f
3418 /* Re-gimplification and code generation routines. */
3419
3420 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3421 of BIND if in a method. */
3422
3423 static void
3424 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3425 {
3426 if (DECL_ARGUMENTS (current_function_decl)
3427 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3428 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3429 == POINTER_TYPE))
3430 {
3431 tree vars = gimple_bind_vars (bind);
3432 for (tree *pvar = &vars; *pvar; )
3433 if (omp_member_access_dummy_var (*pvar))
3434 *pvar = DECL_CHAIN (*pvar);
3435 else
3436 pvar = &DECL_CHAIN (*pvar);
3437 gimple_bind_set_vars (bind, vars);
3438 }
3439 }
3440
3441 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3442 block and its subblocks. */
3443
3444 static void
3445 remove_member_access_dummy_vars (tree block)
3446 {
3447 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3448 if (omp_member_access_dummy_var (*pvar))
3449 *pvar = DECL_CHAIN (*pvar);
3450 else
3451 pvar = &DECL_CHAIN (*pvar);
3452
3453 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3454 remove_member_access_dummy_vars (block);
3455 }
3456
3457 /* If a context was created for STMT when it was scanned, return it. */
3458
3459 static omp_context *
3460 maybe_lookup_ctx (gimple *stmt)
3461 {
3462 splay_tree_node n;
3463 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3464 return n ? (omp_context *) n->value : NULL;
3465 }
3466
3467
3468 /* Find the mapping for DECL in CTX or the immediately enclosing
3469 context that has a mapping for DECL.
3470
3471 If CTX is a nested parallel directive, we may have to use the decl
3472 mappings created in CTX's parent context. Suppose that we have the
3473 following parallel nesting (variable UIDs showed for clarity):
3474
3475 iD.1562 = 0;
3476 #omp parallel shared(iD.1562) -> outer parallel
3477 iD.1562 = iD.1562 + 1;
3478
3479 #omp parallel shared (iD.1562) -> inner parallel
3480 iD.1562 = iD.1562 - 1;
3481
3482 Each parallel structure will create a distinct .omp_data_s structure
3483 for copying iD.1562 in/out of the directive:
3484
3485 outer parallel .omp_data_s.1.i -> iD.1562
3486 inner parallel .omp_data_s.2.i -> iD.1562
3487
3488 A shared variable mapping will produce a copy-out operation before
3489 the parallel directive and a copy-in operation after it. So, in
3490 this case we would have:
3491
3492 iD.1562 = 0;
3493 .omp_data_o.1.i = iD.1562;
3494 #omp parallel shared(iD.1562) -> outer parallel
3495 .omp_data_i.1 = &.omp_data_o.1
3496 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3497
3498 .omp_data_o.2.i = iD.1562; -> **
3499 #omp parallel shared(iD.1562) -> inner parallel
3500 .omp_data_i.2 = &.omp_data_o.2
3501 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3502
3503
3504 ** This is a problem. The symbol iD.1562 cannot be referenced
3505 inside the body of the outer parallel region. But since we are
3506 emitting this copy operation while expanding the inner parallel
3507 directive, we need to access the CTX structure of the outer
3508 parallel directive to get the correct mapping:
3509
3510 .omp_data_o.2.i = .omp_data_i.1->i
3511
3512 Since there may be other workshare or parallel directives enclosing
3513 the parallel directive, it may be necessary to walk up the context
3514 parent chain. This is not a problem in general because nested
3515 parallelism happens only rarely. */
3516
3517 static tree
3518 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3519 {
3520 tree t;
3521 omp_context *up;
3522
3523 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3524 t = maybe_lookup_decl (decl, up);
3525
3526 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3527
3528 return t ? t : decl;
3529 }
3530
3531
3532 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3533 in outer contexts. */
3534
3535 static tree
3536 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3537 {
3538 tree t = NULL;
3539 omp_context *up;
3540
3541 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3542 t = maybe_lookup_decl (decl, up);
3543
3544 return t ? t : decl;
3545 }
3546
3547
3548 /* Construct the initialization value for reduction operation OP. */
3549
3550 tree
3551 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3552 {
3553 switch (op)
3554 {
3555 case PLUS_EXPR:
3556 case MINUS_EXPR:
3557 case BIT_IOR_EXPR:
3558 case BIT_XOR_EXPR:
3559 case TRUTH_OR_EXPR:
3560 case TRUTH_ORIF_EXPR:
3561 case TRUTH_XOR_EXPR:
3562 case NE_EXPR:
3563 return build_zero_cst (type);
3564
3565 case MULT_EXPR:
3566 case TRUTH_AND_EXPR:
3567 case TRUTH_ANDIF_EXPR:
3568 case EQ_EXPR:
3569 return fold_convert_loc (loc, type, integer_one_node);
3570
3571 case BIT_AND_EXPR:
3572 return fold_convert_loc (loc, type, integer_minus_one_node);
3573
3574 case MAX_EXPR:
3575 if (SCALAR_FLOAT_TYPE_P (type))
3576 {
3577 REAL_VALUE_TYPE max, min;
3578 if (HONOR_INFINITIES (type))
3579 {
3580 real_inf (&max);
3581 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3582 }
3583 else
3584 real_maxval (&min, 1, TYPE_MODE (type));
3585 return build_real (type, min);
3586 }
3587 else if (POINTER_TYPE_P (type))
3588 {
3589 wide_int min
3590 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3591 return wide_int_to_tree (type, min);
3592 }
3593 else
3594 {
3595 gcc_assert (INTEGRAL_TYPE_P (type));
3596 return TYPE_MIN_VALUE (type);
3597 }
3598
3599 case MIN_EXPR:
3600 if (SCALAR_FLOAT_TYPE_P (type))
3601 {
3602 REAL_VALUE_TYPE max;
3603 if (HONOR_INFINITIES (type))
3604 real_inf (&max);
3605 else
3606 real_maxval (&max, 0, TYPE_MODE (type));
3607 return build_real (type, max);
3608 }
3609 else if (POINTER_TYPE_P (type))
3610 {
3611 wide_int max
3612 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3613 return wide_int_to_tree (type, max);
3614 }
3615 else
3616 {
3617 gcc_assert (INTEGRAL_TYPE_P (type));
3618 return TYPE_MAX_VALUE (type);
3619 }
3620
3621 default:
3622 gcc_unreachable ();
3623 }
3624 }
3625
3626 /* Construct the initialization value for reduction CLAUSE. */
3627
3628 tree
3629 omp_reduction_init (tree clause, tree type)
3630 {
3631 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3632 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3633 }
3634
3635 /* Return alignment to be assumed for var in CLAUSE, which should be
3636 OMP_CLAUSE_ALIGNED. */
3637
3638 static tree
3639 omp_clause_aligned_alignment (tree clause)
3640 {
3641 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3642 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3643
3644 /* Otherwise return implementation defined alignment. */
3645 unsigned int al = 1;
3646 opt_scalar_mode mode_iter;
3647 auto_vector_sizes sizes;
3648 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3649 poly_uint64 vs = 0;
3650 for (unsigned int i = 0; i < sizes.length (); ++i)
3651 vs = ordered_max (vs, sizes[i]);
3652 static enum mode_class classes[]
3653 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3654 for (int i = 0; i < 4; i += 2)
3655 /* The for loop above dictates that we only walk through scalar classes. */
3656 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3657 {
3658 scalar_mode mode = mode_iter.require ();
3659 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3660 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3661 continue;
3662 while (maybe_ne (vs, 0U)
3663 && known_lt (GET_MODE_SIZE (vmode), vs)
3664 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3665 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3666
3667 tree type = lang_hooks.types.type_for_mode (mode, 1);
3668 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3669 continue;
3670 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3671 GET_MODE_SIZE (mode));
3672 type = build_vector_type (type, nelts);
3673 if (TYPE_MODE (type) != vmode)
3674 continue;
3675 if (TYPE_ALIGN_UNIT (type) > al)
3676 al = TYPE_ALIGN_UNIT (type);
3677 }
3678 return build_int_cst (integer_type_node, al);
3679 }
3680
3681
3682 /* This structure is part of the interface between lower_rec_simd_input_clauses
3683 and lower_rec_input_clauses. */
3684
3685 struct omplow_simd_context {
3686 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3687 tree idx;
3688 tree lane;
3689 tree lastlane;
3690 vec<tree, va_heap> simt_eargs;
3691 gimple_seq simt_dlist;
3692 poly_uint64_pod max_vf;
3693 bool is_simt;
3694 };
3695
3696 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3697 privatization. */
3698
3699 static bool
3700 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3701 omplow_simd_context *sctx, tree &ivar,
3702 tree &lvar, tree *rvar = NULL,
3703 tree *rvar2 = NULL)
3704 {
3705 if (known_eq (sctx->max_vf, 0U))
3706 {
3707 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3708 if (maybe_gt (sctx->max_vf, 1U))
3709 {
3710 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3711 OMP_CLAUSE_SAFELEN);
3712 if (c)
3713 {
3714 poly_uint64 safe_len;
3715 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3716 || maybe_lt (safe_len, 1U))
3717 sctx->max_vf = 1;
3718 else
3719 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3720 }
3721 }
3722 if (maybe_gt (sctx->max_vf, 1U))
3723 {
3724 sctx->idx = create_tmp_var (unsigned_type_node);
3725 sctx->lane = create_tmp_var (unsigned_type_node);
3726 }
3727 }
3728 if (known_eq (sctx->max_vf, 1U))
3729 return false;
3730
3731 if (sctx->is_simt)
3732 {
3733 if (is_gimple_reg (new_var))
3734 {
3735 ivar = lvar = new_var;
3736 return true;
3737 }
3738 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3739 ivar = lvar = create_tmp_var (type);
3740 TREE_ADDRESSABLE (ivar) = 1;
3741 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3742 NULL, DECL_ATTRIBUTES (ivar));
3743 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3744 tree clobber = build_constructor (type, NULL);
3745 TREE_THIS_VOLATILE (clobber) = 1;
3746 gimple *g = gimple_build_assign (ivar, clobber);
3747 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3748 }
3749 else
3750 {
3751 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3752 tree avar = create_tmp_var_raw (atype);
3753 if (TREE_ADDRESSABLE (new_var))
3754 TREE_ADDRESSABLE (avar) = 1;
3755 DECL_ATTRIBUTES (avar)
3756 = tree_cons (get_identifier ("omp simd array"), NULL,
3757 DECL_ATTRIBUTES (avar));
3758 gimple_add_tmp_var (avar);
3759 tree iavar = avar;
3760 if (rvar)
3761 {
3762 /* For inscan reductions, create another array temporary,
3763 which will hold the reduced value. */
3764 iavar = create_tmp_var_raw (atype);
3765 if (TREE_ADDRESSABLE (new_var))
3766 TREE_ADDRESSABLE (iavar) = 1;
3767 DECL_ATTRIBUTES (iavar)
3768 = tree_cons (get_identifier ("omp simd array"), NULL,
3769 tree_cons (get_identifier ("omp simd inscan"), NULL,
3770 DECL_ATTRIBUTES (iavar)));
3771 gimple_add_tmp_var (iavar);
3772 ctx->cb.decl_map->put (avar, iavar);
3773 if (sctx->lastlane == NULL_TREE)
3774 sctx->lastlane = create_tmp_var (unsigned_type_node);
3775 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
3776 sctx->lastlane, NULL_TREE, NULL_TREE);
3777 TREE_THIS_NOTRAP (*rvar) = 1;
3778
3779 if (ctx->scan_exclusive)
3780 {
3781 /* And for exclusive scan yet another one, which will
3782 hold the value during the scan phase. */
3783 tree savar = create_tmp_var_raw (atype);
3784 if (TREE_ADDRESSABLE (new_var))
3785 TREE_ADDRESSABLE (savar) = 1;
3786 DECL_ATTRIBUTES (savar)
3787 = tree_cons (get_identifier ("omp simd array"), NULL,
3788 tree_cons (get_identifier ("omp simd inscan "
3789 "exclusive"), NULL,
3790 DECL_ATTRIBUTES (savar)));
3791 gimple_add_tmp_var (savar);
3792 ctx->cb.decl_map->put (iavar, savar);
3793 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
3794 sctx->idx, NULL_TREE, NULL_TREE);
3795 TREE_THIS_NOTRAP (*rvar2) = 1;
3796 }
3797 }
3798 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
3799 NULL_TREE, NULL_TREE);
3800 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3801 NULL_TREE, NULL_TREE);
3802 TREE_THIS_NOTRAP (ivar) = 1;
3803 TREE_THIS_NOTRAP (lvar) = 1;
3804 }
3805 if (DECL_P (new_var))
3806 {
3807 SET_DECL_VALUE_EXPR (new_var, lvar);
3808 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3809 }
3810 return true;
3811 }
3812
3813 /* Helper function of lower_rec_input_clauses. For a reference
3814 in simd reduction, add an underlying variable it will reference. */
3815
3816 static void
3817 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3818 {
3819 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3820 if (TREE_CONSTANT (z))
3821 {
3822 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3823 get_name (new_vard));
3824 gimple_add_tmp_var (z);
3825 TREE_ADDRESSABLE (z) = 1;
3826 z = build_fold_addr_expr_loc (loc, z);
3827 gimplify_assign (new_vard, z, ilist);
3828 }
3829 }
3830
3831 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3832 code to emit (type) (tskred_temp[idx]). */
3833
3834 static tree
3835 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3836 unsigned idx)
3837 {
3838 unsigned HOST_WIDE_INT sz
3839 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3840 tree r = build2 (MEM_REF, pointer_sized_int_node,
3841 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3842 idx * sz));
3843 tree v = create_tmp_var (pointer_sized_int_node);
3844 gimple *g = gimple_build_assign (v, r);
3845 gimple_seq_add_stmt (ilist, g);
3846 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3847 {
3848 v = create_tmp_var (type);
3849 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3850 gimple_seq_add_stmt (ilist, g);
3851 }
3852 return v;
3853 }
3854
3855 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3856 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3857 private variables. Initialization statements go in ILIST, while calls
3858 to destructors go in DLIST. */
3859
3860 static void
3861 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3862 omp_context *ctx, struct omp_for_data *fd)
3863 {
3864 tree c, copyin_seq, x, ptr;
3865 bool copyin_by_ref = false;
3866 bool lastprivate_firstprivate = false;
3867 bool reduction_omp_orig_ref = false;
3868 int pass;
3869 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3870 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3871 omplow_simd_context sctx = omplow_simd_context ();
3872 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3873 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3874 gimple_seq llist[4] = { };
3875 tree nonconst_simd_if = NULL_TREE;
3876
3877 copyin_seq = NULL;
3878 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3879
3880 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3881 with data sharing clauses referencing variable sized vars. That
3882 is unnecessarily hard to support and very unlikely to result in
3883 vectorized code anyway. */
3884 if (is_simd)
3885 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3886 switch (OMP_CLAUSE_CODE (c))
3887 {
3888 case OMP_CLAUSE_LINEAR:
3889 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3890 sctx.max_vf = 1;
3891 /* FALLTHRU */
3892 case OMP_CLAUSE_PRIVATE:
3893 case OMP_CLAUSE_FIRSTPRIVATE:
3894 case OMP_CLAUSE_LASTPRIVATE:
3895 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3896 sctx.max_vf = 1;
3897 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3898 {
3899 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3900 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3901 sctx.max_vf = 1;
3902 }
3903 break;
3904 case OMP_CLAUSE_REDUCTION:
3905 case OMP_CLAUSE_IN_REDUCTION:
3906 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3907 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3908 sctx.max_vf = 1;
3909 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3910 {
3911 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3912 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3913 sctx.max_vf = 1;
3914 }
3915 break;
3916 case OMP_CLAUSE_IF:
3917 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3918 sctx.max_vf = 1;
3919 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3920 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
3921 break;
3922 case OMP_CLAUSE_SIMDLEN:
3923 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3924 sctx.max_vf = 1;
3925 break;
3926 case OMP_CLAUSE__CONDTEMP_:
3927 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3928 if (sctx.is_simt)
3929 sctx.max_vf = 1;
3930 break;
3931 default:
3932 continue;
3933 }
3934
3935 /* Add a placeholder for simduid. */
3936 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3937 sctx.simt_eargs.safe_push (NULL_TREE);
3938
3939 unsigned task_reduction_cnt = 0;
3940 unsigned task_reduction_cntorig = 0;
3941 unsigned task_reduction_cnt_full = 0;
3942 unsigned task_reduction_cntorig_full = 0;
3943 unsigned task_reduction_other_cnt = 0;
3944 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3945 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3946 /* Do all the fixed sized types in the first pass, and the variable sized
3947 types in the second pass. This makes sure that the scalar arguments to
3948 the variable sized types are processed before we use them in the
3949 variable sized operations. For task reductions we use 4 passes, in the
3950 first two we ignore them, in the third one gather arguments for
3951 GOMP_task_reduction_remap call and in the last pass actually handle
3952 the task reductions. */
3953 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3954 ? 4 : 2); ++pass)
3955 {
3956 if (pass == 2 && task_reduction_cnt)
3957 {
3958 tskred_atype
3959 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3960 + task_reduction_cntorig);
3961 tskred_avar = create_tmp_var_raw (tskred_atype);
3962 gimple_add_tmp_var (tskred_avar);
3963 TREE_ADDRESSABLE (tskred_avar) = 1;
3964 task_reduction_cnt_full = task_reduction_cnt;
3965 task_reduction_cntorig_full = task_reduction_cntorig;
3966 }
3967 else if (pass == 3 && task_reduction_cnt)
3968 {
3969 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3970 gimple *g
3971 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3972 size_int (task_reduction_cntorig),
3973 build_fold_addr_expr (tskred_avar));
3974 gimple_seq_add_stmt (ilist, g);
3975 }
3976 if (pass == 3 && task_reduction_other_cnt)
3977 {
3978 /* For reduction clauses, build
3979 tskred_base = (void *) tskred_temp[2]
3980 + omp_get_thread_num () * tskred_temp[1]
3981 or if tskred_temp[1] is known to be constant, that constant
3982 directly. This is the start of the private reduction copy block
3983 for the current thread. */
3984 tree v = create_tmp_var (integer_type_node);
3985 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3986 gimple *g = gimple_build_call (x, 0);
3987 gimple_call_set_lhs (g, v);
3988 gimple_seq_add_stmt (ilist, g);
3989 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3990 tskred_temp = OMP_CLAUSE_DECL (c);
3991 if (is_taskreg_ctx (ctx))
3992 tskred_temp = lookup_decl (tskred_temp, ctx);
3993 tree v2 = create_tmp_var (sizetype);
3994 g = gimple_build_assign (v2, NOP_EXPR, v);
3995 gimple_seq_add_stmt (ilist, g);
3996 if (ctx->task_reductions[0])
3997 v = fold_convert (sizetype, ctx->task_reductions[0]);
3998 else
3999 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4000 tree v3 = create_tmp_var (sizetype);
4001 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4002 gimple_seq_add_stmt (ilist, g);
4003 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4004 tskred_base = create_tmp_var (ptr_type_node);
4005 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4006 gimple_seq_add_stmt (ilist, g);
4007 }
4008 task_reduction_cnt = 0;
4009 task_reduction_cntorig = 0;
4010 task_reduction_other_cnt = 0;
4011 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4012 {
4013 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4014 tree var, new_var;
4015 bool by_ref;
4016 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4017 bool task_reduction_p = false;
4018 bool task_reduction_needs_orig_p = false;
4019 tree cond = NULL_TREE;
4020
4021 switch (c_kind)
4022 {
4023 case OMP_CLAUSE_PRIVATE:
4024 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4025 continue;
4026 break;
4027 case OMP_CLAUSE_SHARED:
4028 /* Ignore shared directives in teams construct inside
4029 of target construct. */
4030 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4031 && !is_host_teams_ctx (ctx))
4032 continue;
4033 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4034 {
4035 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4036 || is_global_var (OMP_CLAUSE_DECL (c)));
4037 continue;
4038 }
4039 case OMP_CLAUSE_FIRSTPRIVATE:
4040 case OMP_CLAUSE_COPYIN:
4041 break;
4042 case OMP_CLAUSE_LINEAR:
4043 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4044 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4045 lastprivate_firstprivate = true;
4046 break;
4047 case OMP_CLAUSE_REDUCTION:
4048 case OMP_CLAUSE_IN_REDUCTION:
4049 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4050 {
4051 task_reduction_p = true;
4052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4053 {
4054 task_reduction_other_cnt++;
4055 if (pass == 2)
4056 continue;
4057 }
4058 else
4059 task_reduction_cnt++;
4060 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4061 {
4062 var = OMP_CLAUSE_DECL (c);
4063 /* If var is a global variable that isn't privatized
4064 in outer contexts, we don't need to look up the
4065 original address, it is always the address of the
4066 global variable itself. */
4067 if (!DECL_P (var)
4068 || omp_is_reference (var)
4069 || !is_global_var
4070 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4071 {
4072 task_reduction_needs_orig_p = true;
4073 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4074 task_reduction_cntorig++;
4075 }
4076 }
4077 }
4078 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4079 reduction_omp_orig_ref = true;
4080 break;
4081 case OMP_CLAUSE__REDUCTEMP_:
4082 if (!is_taskreg_ctx (ctx))
4083 continue;
4084 /* FALLTHRU */
4085 case OMP_CLAUSE__LOOPTEMP_:
4086 /* Handle _looptemp_/_reductemp_ clauses only on
4087 parallel/task. */
4088 if (fd)
4089 continue;
4090 break;
4091 case OMP_CLAUSE_LASTPRIVATE:
4092 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4093 {
4094 lastprivate_firstprivate = true;
4095 if (pass != 0 || is_taskloop_ctx (ctx))
4096 continue;
4097 }
4098 /* Even without corresponding firstprivate, if
4099 decl is Fortran allocatable, it needs outer var
4100 reference. */
4101 else if (pass == 0
4102 && lang_hooks.decls.omp_private_outer_ref
4103 (OMP_CLAUSE_DECL (c)))
4104 lastprivate_firstprivate = true;
4105 break;
4106 case OMP_CLAUSE_ALIGNED:
4107 if (pass != 1)
4108 continue;
4109 var = OMP_CLAUSE_DECL (c);
4110 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4111 && !is_global_var (var))
4112 {
4113 new_var = maybe_lookup_decl (var, ctx);
4114 if (new_var == NULL_TREE)
4115 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4116 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4117 tree alarg = omp_clause_aligned_alignment (c);
4118 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4119 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4120 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4121 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4122 gimplify_and_add (x, ilist);
4123 }
4124 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4125 && is_global_var (var))
4126 {
4127 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4128 new_var = lookup_decl (var, ctx);
4129 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4130 t = build_fold_addr_expr_loc (clause_loc, t);
4131 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4132 tree alarg = omp_clause_aligned_alignment (c);
4133 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4134 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4135 t = fold_convert_loc (clause_loc, ptype, t);
4136 x = create_tmp_var (ptype);
4137 t = build2 (MODIFY_EXPR, ptype, x, t);
4138 gimplify_and_add (t, ilist);
4139 t = build_simple_mem_ref_loc (clause_loc, x);
4140 SET_DECL_VALUE_EXPR (new_var, t);
4141 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4142 }
4143 continue;
4144 case OMP_CLAUSE__CONDTEMP_:
4145 if (is_parallel_ctx (ctx)
4146 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4147 break;
4148 continue;
4149 default:
4150 continue;
4151 }
4152
4153 if (task_reduction_p != (pass >= 2))
4154 continue;
4155
4156 new_var = var = OMP_CLAUSE_DECL (c);
4157 if ((c_kind == OMP_CLAUSE_REDUCTION
4158 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4159 && TREE_CODE (var) == MEM_REF)
4160 {
4161 var = TREE_OPERAND (var, 0);
4162 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4163 var = TREE_OPERAND (var, 0);
4164 if (TREE_CODE (var) == INDIRECT_REF
4165 || TREE_CODE (var) == ADDR_EXPR)
4166 var = TREE_OPERAND (var, 0);
4167 if (is_variable_sized (var))
4168 {
4169 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4170 var = DECL_VALUE_EXPR (var);
4171 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4172 var = TREE_OPERAND (var, 0);
4173 gcc_assert (DECL_P (var));
4174 }
4175 new_var = var;
4176 }
4177 if (c_kind != OMP_CLAUSE_COPYIN)
4178 new_var = lookup_decl (var, ctx);
4179
4180 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4181 {
4182 if (pass != 0)
4183 continue;
4184 }
4185 /* C/C++ array section reductions. */
4186 else if ((c_kind == OMP_CLAUSE_REDUCTION
4187 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4188 && var != OMP_CLAUSE_DECL (c))
4189 {
4190 if (pass == 0)
4191 continue;
4192
4193 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4194 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4195
4196 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4197 {
4198 tree b = TREE_OPERAND (orig_var, 1);
4199 b = maybe_lookup_decl (b, ctx);
4200 if (b == NULL)
4201 {
4202 b = TREE_OPERAND (orig_var, 1);
4203 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4204 }
4205 if (integer_zerop (bias))
4206 bias = b;
4207 else
4208 {
4209 bias = fold_convert_loc (clause_loc,
4210 TREE_TYPE (b), bias);
4211 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4212 TREE_TYPE (b), b, bias);
4213 }
4214 orig_var = TREE_OPERAND (orig_var, 0);
4215 }
4216 if (pass == 2)
4217 {
4218 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4219 if (is_global_var (out)
4220 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4221 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4222 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4223 != POINTER_TYPE)))
4224 x = var;
4225 else
4226 {
4227 bool by_ref = use_pointer_for_field (var, NULL);
4228 x = build_receiver_ref (var, by_ref, ctx);
4229 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4230 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4231 == POINTER_TYPE))
4232 x = build_fold_addr_expr (x);
4233 }
4234 if (TREE_CODE (orig_var) == INDIRECT_REF)
4235 x = build_simple_mem_ref (x);
4236 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4237 {
4238 if (var == TREE_OPERAND (orig_var, 0))
4239 x = build_fold_addr_expr (x);
4240 }
4241 bias = fold_convert (sizetype, bias);
4242 x = fold_convert (ptr_type_node, x);
4243 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4244 TREE_TYPE (x), x, bias);
4245 unsigned cnt = task_reduction_cnt - 1;
4246 if (!task_reduction_needs_orig_p)
4247 cnt += (task_reduction_cntorig_full
4248 - task_reduction_cntorig);
4249 else
4250 cnt = task_reduction_cntorig - 1;
4251 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4252 size_int (cnt), NULL_TREE, NULL_TREE);
4253 gimplify_assign (r, x, ilist);
4254 continue;
4255 }
4256
4257 if (TREE_CODE (orig_var) == INDIRECT_REF
4258 || TREE_CODE (orig_var) == ADDR_EXPR)
4259 orig_var = TREE_OPERAND (orig_var, 0);
4260 tree d = OMP_CLAUSE_DECL (c);
4261 tree type = TREE_TYPE (d);
4262 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4263 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4264 const char *name = get_name (orig_var);
4265 if (pass == 3)
4266 {
4267 tree xv = create_tmp_var (ptr_type_node);
4268 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4269 {
4270 unsigned cnt = task_reduction_cnt - 1;
4271 if (!task_reduction_needs_orig_p)
4272 cnt += (task_reduction_cntorig_full
4273 - task_reduction_cntorig);
4274 else
4275 cnt = task_reduction_cntorig - 1;
4276 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4277 size_int (cnt), NULL_TREE, NULL_TREE);
4278
4279 gimple *g = gimple_build_assign (xv, x);
4280 gimple_seq_add_stmt (ilist, g);
4281 }
4282 else
4283 {
4284 unsigned int idx = *ctx->task_reduction_map->get (c);
4285 tree off;
4286 if (ctx->task_reductions[1 + idx])
4287 off = fold_convert (sizetype,
4288 ctx->task_reductions[1 + idx]);
4289 else
4290 off = task_reduction_read (ilist, tskred_temp, sizetype,
4291 7 + 3 * idx + 1);
4292 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4293 tskred_base, off);
4294 gimple_seq_add_stmt (ilist, g);
4295 }
4296 x = fold_convert (build_pointer_type (boolean_type_node),
4297 xv);
4298 if (TREE_CONSTANT (v))
4299 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4300 TYPE_SIZE_UNIT (type));
4301 else
4302 {
4303 tree t = maybe_lookup_decl (v, ctx);
4304 if (t)
4305 v = t;
4306 else
4307 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4308 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4309 fb_rvalue);
4310 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4311 TREE_TYPE (v), v,
4312 build_int_cst (TREE_TYPE (v), 1));
4313 t = fold_build2_loc (clause_loc, MULT_EXPR,
4314 TREE_TYPE (v), t,
4315 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4316 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4317 }
4318 cond = create_tmp_var (TREE_TYPE (x));
4319 gimplify_assign (cond, x, ilist);
4320 x = xv;
4321 }
4322 else if (TREE_CONSTANT (v))
4323 {
4324 x = create_tmp_var_raw (type, name);
4325 gimple_add_tmp_var (x);
4326 TREE_ADDRESSABLE (x) = 1;
4327 x = build_fold_addr_expr_loc (clause_loc, x);
4328 }
4329 else
4330 {
4331 tree atmp
4332 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4333 tree t = maybe_lookup_decl (v, ctx);
4334 if (t)
4335 v = t;
4336 else
4337 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4338 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4339 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4340 TREE_TYPE (v), v,
4341 build_int_cst (TREE_TYPE (v), 1));
4342 t = fold_build2_loc (clause_loc, MULT_EXPR,
4343 TREE_TYPE (v), t,
4344 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4345 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4346 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4347 }
4348
4349 tree ptype = build_pointer_type (TREE_TYPE (type));
4350 x = fold_convert_loc (clause_loc, ptype, x);
4351 tree y = create_tmp_var (ptype, name);
4352 gimplify_assign (y, x, ilist);
4353 x = y;
4354 tree yb = y;
4355
4356 if (!integer_zerop (bias))
4357 {
4358 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4359 bias);
4360 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4361 x);
4362 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4363 pointer_sized_int_node, yb, bias);
4364 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4365 yb = create_tmp_var (ptype, name);
4366 gimplify_assign (yb, x, ilist);
4367 x = yb;
4368 }
4369
4370 d = TREE_OPERAND (d, 0);
4371 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4372 d = TREE_OPERAND (d, 0);
4373 if (TREE_CODE (d) == ADDR_EXPR)
4374 {
4375 if (orig_var != var)
4376 {
4377 gcc_assert (is_variable_sized (orig_var));
4378 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4379 x);
4380 gimplify_assign (new_var, x, ilist);
4381 tree new_orig_var = lookup_decl (orig_var, ctx);
4382 tree t = build_fold_indirect_ref (new_var);
4383 DECL_IGNORED_P (new_var) = 0;
4384 TREE_THIS_NOTRAP (t) = 1;
4385 SET_DECL_VALUE_EXPR (new_orig_var, t);
4386 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4387 }
4388 else
4389 {
4390 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4391 build_int_cst (ptype, 0));
4392 SET_DECL_VALUE_EXPR (new_var, x);
4393 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4394 }
4395 }
4396 else
4397 {
4398 gcc_assert (orig_var == var);
4399 if (TREE_CODE (d) == INDIRECT_REF)
4400 {
4401 x = create_tmp_var (ptype, name);
4402 TREE_ADDRESSABLE (x) = 1;
4403 gimplify_assign (x, yb, ilist);
4404 x = build_fold_addr_expr_loc (clause_loc, x);
4405 }
4406 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4407 gimplify_assign (new_var, x, ilist);
4408 }
4409 /* GOMP_taskgroup_reduction_register memsets the whole
4410 array to zero. If the initializer is zero, we don't
4411 need to initialize it again, just mark it as ever
4412 used unconditionally, i.e. cond = true. */
4413 if (cond
4414 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4415 && initializer_zerop (omp_reduction_init (c,
4416 TREE_TYPE (type))))
4417 {
4418 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4419 boolean_true_node);
4420 gimple_seq_add_stmt (ilist, g);
4421 continue;
4422 }
4423 tree end = create_artificial_label (UNKNOWN_LOCATION);
4424 if (cond)
4425 {
4426 gimple *g;
4427 if (!is_parallel_ctx (ctx))
4428 {
4429 tree condv = create_tmp_var (boolean_type_node);
4430 g = gimple_build_assign (condv,
4431 build_simple_mem_ref (cond));
4432 gimple_seq_add_stmt (ilist, g);
4433 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4434 g = gimple_build_cond (NE_EXPR, condv,
4435 boolean_false_node, end, lab1);
4436 gimple_seq_add_stmt (ilist, g);
4437 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4438 }
4439 g = gimple_build_assign (build_simple_mem_ref (cond),
4440 boolean_true_node);
4441 gimple_seq_add_stmt (ilist, g);
4442 }
4443
4444 tree y1 = create_tmp_var (ptype);
4445 gimplify_assign (y1, y, ilist);
4446 tree i2 = NULL_TREE, y2 = NULL_TREE;
4447 tree body2 = NULL_TREE, end2 = NULL_TREE;
4448 tree y3 = NULL_TREE, y4 = NULL_TREE;
4449 if (task_reduction_needs_orig_p)
4450 {
4451 y3 = create_tmp_var (ptype);
4452 tree ref;
4453 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4454 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4455 size_int (task_reduction_cnt_full
4456 + task_reduction_cntorig - 1),
4457 NULL_TREE, NULL_TREE);
4458 else
4459 {
4460 unsigned int idx = *ctx->task_reduction_map->get (c);
4461 ref = task_reduction_read (ilist, tskred_temp, ptype,
4462 7 + 3 * idx);
4463 }
4464 gimplify_assign (y3, ref, ilist);
4465 }
4466 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4467 {
4468 if (pass != 3)
4469 {
4470 y2 = create_tmp_var (ptype);
4471 gimplify_assign (y2, y, ilist);
4472 }
4473 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4474 {
4475 tree ref = build_outer_var_ref (var, ctx);
4476 /* For ref build_outer_var_ref already performs this. */
4477 if (TREE_CODE (d) == INDIRECT_REF)
4478 gcc_assert (omp_is_reference (var));
4479 else if (TREE_CODE (d) == ADDR_EXPR)
4480 ref = build_fold_addr_expr (ref);
4481 else if (omp_is_reference (var))
4482 ref = build_fold_addr_expr (ref);
4483 ref = fold_convert_loc (clause_loc, ptype, ref);
4484 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4485 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4486 {
4487 y3 = create_tmp_var (ptype);
4488 gimplify_assign (y3, unshare_expr (ref), ilist);
4489 }
4490 if (is_simd)
4491 {
4492 y4 = create_tmp_var (ptype);
4493 gimplify_assign (y4, ref, dlist);
4494 }
4495 }
4496 }
4497 tree i = create_tmp_var (TREE_TYPE (v));
4498 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4499 tree body = create_artificial_label (UNKNOWN_LOCATION);
4500 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4501 if (y2)
4502 {
4503 i2 = create_tmp_var (TREE_TYPE (v));
4504 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4505 body2 = create_artificial_label (UNKNOWN_LOCATION);
4506 end2 = create_artificial_label (UNKNOWN_LOCATION);
4507 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4508 }
4509 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4510 {
4511 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4512 tree decl_placeholder
4513 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4514 SET_DECL_VALUE_EXPR (decl_placeholder,
4515 build_simple_mem_ref (y1));
4516 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4517 SET_DECL_VALUE_EXPR (placeholder,
4518 y3 ? build_simple_mem_ref (y3)
4519 : error_mark_node);
4520 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4521 x = lang_hooks.decls.omp_clause_default_ctor
4522 (c, build_simple_mem_ref (y1),
4523 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4524 if (x)
4525 gimplify_and_add (x, ilist);
4526 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4527 {
4528 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4529 lower_omp (&tseq, ctx);
4530 gimple_seq_add_seq (ilist, tseq);
4531 }
4532 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4533 if (is_simd)
4534 {
4535 SET_DECL_VALUE_EXPR (decl_placeholder,
4536 build_simple_mem_ref (y2));
4537 SET_DECL_VALUE_EXPR (placeholder,
4538 build_simple_mem_ref (y4));
4539 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4540 lower_omp (&tseq, ctx);
4541 gimple_seq_add_seq (dlist, tseq);
4542 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4543 }
4544 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4545 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4546 if (y2)
4547 {
4548 x = lang_hooks.decls.omp_clause_dtor
4549 (c, build_simple_mem_ref (y2));
4550 if (x)
4551 gimplify_and_add (x, dlist);
4552 }
4553 }
4554 else
4555 {
4556 x = omp_reduction_init (c, TREE_TYPE (type));
4557 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4558
4559 /* reduction(-:var) sums up the partial results, so it
4560 acts identically to reduction(+:var). */
4561 if (code == MINUS_EXPR)
4562 code = PLUS_EXPR;
4563
4564 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4565 if (is_simd)
4566 {
4567 x = build2 (code, TREE_TYPE (type),
4568 build_simple_mem_ref (y4),
4569 build_simple_mem_ref (y2));
4570 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4571 }
4572 }
4573 gimple *g
4574 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4575 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4576 gimple_seq_add_stmt (ilist, g);
4577 if (y3)
4578 {
4579 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4580 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4581 gimple_seq_add_stmt (ilist, g);
4582 }
4583 g = gimple_build_assign (i, PLUS_EXPR, i,
4584 build_int_cst (TREE_TYPE (i), 1));
4585 gimple_seq_add_stmt (ilist, g);
4586 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4587 gimple_seq_add_stmt (ilist, g);
4588 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4589 if (y2)
4590 {
4591 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4592 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4593 gimple_seq_add_stmt (dlist, g);
4594 if (y4)
4595 {
4596 g = gimple_build_assign
4597 (y4, POINTER_PLUS_EXPR, y4,
4598 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4599 gimple_seq_add_stmt (dlist, g);
4600 }
4601 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4602 build_int_cst (TREE_TYPE (i2), 1));
4603 gimple_seq_add_stmt (dlist, g);
4604 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4605 gimple_seq_add_stmt (dlist, g);
4606 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4607 }
4608 continue;
4609 }
4610 else if (pass == 2)
4611 {
4612 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4613 x = var;
4614 else
4615 {
4616 bool by_ref = use_pointer_for_field (var, ctx);
4617 x = build_receiver_ref (var, by_ref, ctx);
4618 }
4619 if (!omp_is_reference (var))
4620 x = build_fold_addr_expr (x);
4621 x = fold_convert (ptr_type_node, x);
4622 unsigned cnt = task_reduction_cnt - 1;
4623 if (!task_reduction_needs_orig_p)
4624 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4625 else
4626 cnt = task_reduction_cntorig - 1;
4627 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4628 size_int (cnt), NULL_TREE, NULL_TREE);
4629 gimplify_assign (r, x, ilist);
4630 continue;
4631 }
4632 else if (pass == 3)
4633 {
4634 tree type = TREE_TYPE (new_var);
4635 if (!omp_is_reference (var))
4636 type = build_pointer_type (type);
4637 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4638 {
4639 unsigned cnt = task_reduction_cnt - 1;
4640 if (!task_reduction_needs_orig_p)
4641 cnt += (task_reduction_cntorig_full
4642 - task_reduction_cntorig);
4643 else
4644 cnt = task_reduction_cntorig - 1;
4645 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4646 size_int (cnt), NULL_TREE, NULL_TREE);
4647 }
4648 else
4649 {
4650 unsigned int idx = *ctx->task_reduction_map->get (c);
4651 tree off;
4652 if (ctx->task_reductions[1 + idx])
4653 off = fold_convert (sizetype,
4654 ctx->task_reductions[1 + idx]);
4655 else
4656 off = task_reduction_read (ilist, tskred_temp, sizetype,
4657 7 + 3 * idx + 1);
4658 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4659 tskred_base, off);
4660 }
4661 x = fold_convert (type, x);
4662 tree t;
4663 if (omp_is_reference (var))
4664 {
4665 gimplify_assign (new_var, x, ilist);
4666 t = new_var;
4667 new_var = build_simple_mem_ref (new_var);
4668 }
4669 else
4670 {
4671 t = create_tmp_var (type);
4672 gimplify_assign (t, x, ilist);
4673 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4674 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4675 }
4676 t = fold_convert (build_pointer_type (boolean_type_node), t);
4677 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4678 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4679 cond = create_tmp_var (TREE_TYPE (t));
4680 gimplify_assign (cond, t, ilist);
4681 }
4682 else if (is_variable_sized (var))
4683 {
4684 /* For variable sized types, we need to allocate the
4685 actual storage here. Call alloca and store the
4686 result in the pointer decl that we created elsewhere. */
4687 if (pass == 0)
4688 continue;
4689
4690 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4691 {
4692 gcall *stmt;
4693 tree tmp, atmp;
4694
4695 ptr = DECL_VALUE_EXPR (new_var);
4696 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4697 ptr = TREE_OPERAND (ptr, 0);
4698 gcc_assert (DECL_P (ptr));
4699 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4700
4701 /* void *tmp = __builtin_alloca */
4702 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4703 stmt = gimple_build_call (atmp, 2, x,
4704 size_int (DECL_ALIGN (var)));
4705 tmp = create_tmp_var_raw (ptr_type_node);
4706 gimple_add_tmp_var (tmp);
4707 gimple_call_set_lhs (stmt, tmp);
4708
4709 gimple_seq_add_stmt (ilist, stmt);
4710
4711 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4712 gimplify_assign (ptr, x, ilist);
4713 }
4714 }
4715 else if (omp_is_reference (var)
4716 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4717 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4718 {
4719 /* For references that are being privatized for Fortran,
4720 allocate new backing storage for the new pointer
4721 variable. This allows us to avoid changing all the
4722 code that expects a pointer to something that expects
4723 a direct variable. */
4724 if (pass == 0)
4725 continue;
4726
4727 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4728 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4729 {
4730 x = build_receiver_ref (var, false, ctx);
4731 x = build_fold_addr_expr_loc (clause_loc, x);
4732 }
4733 else if (TREE_CONSTANT (x))
4734 {
4735 /* For reduction in SIMD loop, defer adding the
4736 initialization of the reference, because if we decide
4737 to use SIMD array for it, the initilization could cause
4738 expansion ICE. Ditto for other privatization clauses. */
4739 if (is_simd)
4740 x = NULL_TREE;
4741 else
4742 {
4743 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4744 get_name (var));
4745 gimple_add_tmp_var (x);
4746 TREE_ADDRESSABLE (x) = 1;
4747 x = build_fold_addr_expr_loc (clause_loc, x);
4748 }
4749 }
4750 else
4751 {
4752 tree atmp
4753 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4754 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4755 tree al = size_int (TYPE_ALIGN (rtype));
4756 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4757 }
4758
4759 if (x)
4760 {
4761 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4762 gimplify_assign (new_var, x, ilist);
4763 }
4764
4765 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4766 }
4767 else if ((c_kind == OMP_CLAUSE_REDUCTION
4768 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4769 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4770 {
4771 if (pass == 0)
4772 continue;
4773 }
4774 else if (pass != 0)
4775 continue;
4776
4777 switch (OMP_CLAUSE_CODE (c))
4778 {
4779 case OMP_CLAUSE_SHARED:
4780 /* Ignore shared directives in teams construct inside
4781 target construct. */
4782 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4783 && !is_host_teams_ctx (ctx))
4784 continue;
4785 /* Shared global vars are just accessed directly. */
4786 if (is_global_var (new_var))
4787 break;
4788 /* For taskloop firstprivate/lastprivate, represented
4789 as firstprivate and shared clause on the task, new_var
4790 is the firstprivate var. */
4791 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4792 break;
4793 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4794 needs to be delayed until after fixup_child_record_type so
4795 that we get the correct type during the dereference. */
4796 by_ref = use_pointer_for_field (var, ctx);
4797 x = build_receiver_ref (var, by_ref, ctx);
4798 SET_DECL_VALUE_EXPR (new_var, x);
4799 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4800
4801 /* ??? If VAR is not passed by reference, and the variable
4802 hasn't been initialized yet, then we'll get a warning for
4803 the store into the omp_data_s structure. Ideally, we'd be
4804 able to notice this and not store anything at all, but
4805 we're generating code too early. Suppress the warning. */
4806 if (!by_ref)
4807 TREE_NO_WARNING (var) = 1;
4808 break;
4809
4810 case OMP_CLAUSE__CONDTEMP_:
4811 if (is_parallel_ctx (ctx))
4812 {
4813 x = build_receiver_ref (var, false, ctx);
4814 SET_DECL_VALUE_EXPR (new_var, x);
4815 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4816 }
4817 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
4818 {
4819 x = build_zero_cst (TREE_TYPE (var));
4820 goto do_private;
4821 }
4822 break;
4823
4824 case OMP_CLAUSE_LASTPRIVATE:
4825 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4826 break;
4827 /* FALLTHRU */
4828
4829 case OMP_CLAUSE_PRIVATE:
4830 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4831 x = build_outer_var_ref (var, ctx);
4832 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4833 {
4834 if (is_task_ctx (ctx))
4835 x = build_receiver_ref (var, false, ctx);
4836 else
4837 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4838 }
4839 else
4840 x = NULL;
4841 do_private:
4842 tree nx;
4843 nx = lang_hooks.decls.omp_clause_default_ctor
4844 (c, unshare_expr (new_var), x);
4845 if (is_simd)
4846 {
4847 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4848 if ((TREE_ADDRESSABLE (new_var) || nx || y
4849 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4850 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
4851 || omp_is_reference (var))
4852 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4853 ivar, lvar))
4854 {
4855 if (omp_is_reference (var))
4856 {
4857 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4858 tree new_vard = TREE_OPERAND (new_var, 0);
4859 gcc_assert (DECL_P (new_vard));
4860 SET_DECL_VALUE_EXPR (new_vard,
4861 build_fold_addr_expr (lvar));
4862 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4863 }
4864
4865 if (nx)
4866 x = lang_hooks.decls.omp_clause_default_ctor
4867 (c, unshare_expr (ivar), x);
4868 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
4869 {
4870 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
4871 unshare_expr (ivar), x);
4872 nx = x;
4873 }
4874 if (nx && x)
4875 gimplify_and_add (x, &llist[0]);
4876 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4877 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
4878 {
4879 tree v = new_var;
4880 if (!DECL_P (v))
4881 {
4882 gcc_assert (TREE_CODE (v) == MEM_REF);
4883 v = TREE_OPERAND (v, 0);
4884 gcc_assert (DECL_P (v));
4885 }
4886 v = *ctx->lastprivate_conditional_map->get (v);
4887 tree t = create_tmp_var (TREE_TYPE (v));
4888 tree z = build_zero_cst (TREE_TYPE (v));
4889 tree orig_v
4890 = build_outer_var_ref (var, ctx,
4891 OMP_CLAUSE_LASTPRIVATE);
4892 gimple_seq_add_stmt (dlist,
4893 gimple_build_assign (t, z));
4894 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
4895 tree civar = DECL_VALUE_EXPR (v);
4896 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
4897 civar = unshare_expr (civar);
4898 TREE_OPERAND (civar, 1) = sctx.idx;
4899 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
4900 unshare_expr (civar));
4901 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
4902 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
4903 orig_v, unshare_expr (ivar)));
4904 tree cond = build2 (LT_EXPR, boolean_type_node, t,
4905 civar);
4906 x = build3 (COND_EXPR, void_type_node, cond, x,
4907 void_node);
4908 gimple_seq tseq = NULL;
4909 gimplify_and_add (x, &tseq);
4910 if (ctx->outer)
4911 lower_omp (&tseq, ctx->outer);
4912 gimple_seq_add_seq (&llist[1], tseq);
4913 }
4914 if (y)
4915 {
4916 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4917 if (y)
4918 gimplify_and_add (y, &llist[1]);
4919 }
4920 break;
4921 }
4922 if (omp_is_reference (var))
4923 {
4924 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4925 tree new_vard = TREE_OPERAND (new_var, 0);
4926 gcc_assert (DECL_P (new_vard));
4927 tree type = TREE_TYPE (TREE_TYPE (new_vard));
4928 x = TYPE_SIZE_UNIT (type);
4929 if (TREE_CONSTANT (x))
4930 {
4931 x = create_tmp_var_raw (type, get_name (var));
4932 gimple_add_tmp_var (x);
4933 TREE_ADDRESSABLE (x) = 1;
4934 x = build_fold_addr_expr_loc (clause_loc, x);
4935 x = fold_convert_loc (clause_loc,
4936 TREE_TYPE (new_vard), x);
4937 gimplify_assign (new_vard, x, ilist);
4938 }
4939 }
4940 }
4941 if (nx)
4942 gimplify_and_add (nx, ilist);
4943 /* FALLTHRU */
4944
4945 do_dtor:
4946 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4947 if (x)
4948 gimplify_and_add (x, dlist);
4949 break;
4950
4951 case OMP_CLAUSE_LINEAR:
4952 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4953 goto do_firstprivate;
4954 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4955 x = NULL;
4956 else
4957 x = build_outer_var_ref (var, ctx);
4958 goto do_private;
4959
4960 case OMP_CLAUSE_FIRSTPRIVATE:
4961 if (is_task_ctx (ctx))
4962 {
4963 if ((omp_is_reference (var)
4964 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4965 || is_variable_sized (var))
4966 goto do_dtor;
4967 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4968 ctx))
4969 || use_pointer_for_field (var, NULL))
4970 {
4971 x = build_receiver_ref (var, false, ctx);
4972 SET_DECL_VALUE_EXPR (new_var, x);
4973 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4974 goto do_dtor;
4975 }
4976 }
4977 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4978 && omp_is_reference (var))
4979 {
4980 x = build_outer_var_ref (var, ctx);
4981 gcc_assert (TREE_CODE (x) == MEM_REF
4982 && integer_zerop (TREE_OPERAND (x, 1)));
4983 x = TREE_OPERAND (x, 0);
4984 x = lang_hooks.decls.omp_clause_copy_ctor
4985 (c, unshare_expr (new_var), x);
4986 gimplify_and_add (x, ilist);
4987 goto do_dtor;
4988 }
4989 do_firstprivate:
4990 x = build_outer_var_ref (var, ctx);
4991 if (is_simd)
4992 {
4993 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4994 && gimple_omp_for_combined_into_p (ctx->stmt))
4995 {
4996 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4997 tree stept = TREE_TYPE (t);
4998 tree ct = omp_find_clause (clauses,
4999 OMP_CLAUSE__LOOPTEMP_);
5000 gcc_assert (ct);
5001 tree l = OMP_CLAUSE_DECL (ct);
5002 tree n1 = fd->loop.n1;
5003 tree step = fd->loop.step;
5004 tree itype = TREE_TYPE (l);
5005 if (POINTER_TYPE_P (itype))
5006 itype = signed_type_for (itype);
5007 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5008 if (TYPE_UNSIGNED (itype)
5009 && fd->loop.cond_code == GT_EXPR)
5010 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5011 fold_build1 (NEGATE_EXPR, itype, l),
5012 fold_build1 (NEGATE_EXPR,
5013 itype, step));
5014 else
5015 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5016 t = fold_build2 (MULT_EXPR, stept,
5017 fold_convert (stept, l), t);
5018
5019 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5020 {
5021 if (omp_is_reference (var))
5022 {
5023 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5024 tree new_vard = TREE_OPERAND (new_var, 0);
5025 gcc_assert (DECL_P (new_vard));
5026 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5027 nx = TYPE_SIZE_UNIT (type);
5028 if (TREE_CONSTANT (nx))
5029 {
5030 nx = create_tmp_var_raw (type,
5031 get_name (var));
5032 gimple_add_tmp_var (nx);
5033 TREE_ADDRESSABLE (nx) = 1;
5034 nx = build_fold_addr_expr_loc (clause_loc,
5035 nx);
5036 nx = fold_convert_loc (clause_loc,
5037 TREE_TYPE (new_vard),
5038 nx);
5039 gimplify_assign (new_vard, nx, ilist);
5040 }
5041 }
5042
5043 x = lang_hooks.decls.omp_clause_linear_ctor
5044 (c, new_var, x, t);
5045 gimplify_and_add (x, ilist);
5046 goto do_dtor;
5047 }
5048
5049 if (POINTER_TYPE_P (TREE_TYPE (x)))
5050 x = fold_build2 (POINTER_PLUS_EXPR,
5051 TREE_TYPE (x), x, t);
5052 else
5053 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5054 }
5055
5056 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5057 || TREE_ADDRESSABLE (new_var)
5058 || omp_is_reference (var))
5059 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5060 ivar, lvar))
5061 {
5062 if (omp_is_reference (var))
5063 {
5064 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5065 tree new_vard = TREE_OPERAND (new_var, 0);
5066 gcc_assert (DECL_P (new_vard));
5067 SET_DECL_VALUE_EXPR (new_vard,
5068 build_fold_addr_expr (lvar));
5069 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5070 }
5071 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5072 {
5073 tree iv = create_tmp_var (TREE_TYPE (new_var));
5074 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5075 gimplify_and_add (x, ilist);
5076 gimple_stmt_iterator gsi
5077 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5078 gassign *g
5079 = gimple_build_assign (unshare_expr (lvar), iv);
5080 gsi_insert_before_without_update (&gsi, g,
5081 GSI_SAME_STMT);
5082 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5083 enum tree_code code = PLUS_EXPR;
5084 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5085 code = POINTER_PLUS_EXPR;
5086 g = gimple_build_assign (iv, code, iv, t);
5087 gsi_insert_before_without_update (&gsi, g,
5088 GSI_SAME_STMT);
5089 break;
5090 }
5091 x = lang_hooks.decls.omp_clause_copy_ctor
5092 (c, unshare_expr (ivar), x);
5093 gimplify_and_add (x, &llist[0]);
5094 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5095 if (x)
5096 gimplify_and_add (x, &llist[1]);
5097 break;
5098 }
5099 if (omp_is_reference (var))
5100 {
5101 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5102 tree new_vard = TREE_OPERAND (new_var, 0);
5103 gcc_assert (DECL_P (new_vard));
5104 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5105 nx = TYPE_SIZE_UNIT (type);
5106 if (TREE_CONSTANT (nx))
5107 {
5108 nx = create_tmp_var_raw (type, get_name (var));
5109 gimple_add_tmp_var (nx);
5110 TREE_ADDRESSABLE (nx) = 1;
5111 nx = build_fold_addr_expr_loc (clause_loc, nx);
5112 nx = fold_convert_loc (clause_loc,
5113 TREE_TYPE (new_vard), nx);
5114 gimplify_assign (new_vard, nx, ilist);
5115 }
5116 }
5117 }
5118 x = lang_hooks.decls.omp_clause_copy_ctor
5119 (c, unshare_expr (new_var), x);
5120 gimplify_and_add (x, ilist);
5121 goto do_dtor;
5122
5123 case OMP_CLAUSE__LOOPTEMP_:
5124 case OMP_CLAUSE__REDUCTEMP_:
5125 gcc_assert (is_taskreg_ctx (ctx));
5126 x = build_outer_var_ref (var, ctx);
5127 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5128 gimplify_and_add (x, ilist);
5129 break;
5130
5131 case OMP_CLAUSE_COPYIN:
5132 by_ref = use_pointer_for_field (var, NULL);
5133 x = build_receiver_ref (var, by_ref, ctx);
5134 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5135 append_to_statement_list (x, &copyin_seq);
5136 copyin_by_ref |= by_ref;
5137 break;
5138
5139 case OMP_CLAUSE_REDUCTION:
5140 case OMP_CLAUSE_IN_REDUCTION:
5141 /* OpenACC reductions are initialized using the
5142 GOACC_REDUCTION internal function. */
5143 if (is_gimple_omp_oacc (ctx->stmt))
5144 break;
5145 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5146 {
5147 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5148 gimple *tseq;
5149 tree ptype = TREE_TYPE (placeholder);
5150 if (cond)
5151 {
5152 x = error_mark_node;
5153 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5154 && !task_reduction_needs_orig_p)
5155 x = var;
5156 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5157 {
5158 tree pptype = build_pointer_type (ptype);
5159 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5160 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5161 size_int (task_reduction_cnt_full
5162 + task_reduction_cntorig - 1),
5163 NULL_TREE, NULL_TREE);
5164 else
5165 {
5166 unsigned int idx
5167 = *ctx->task_reduction_map->get (c);
5168 x = task_reduction_read (ilist, tskred_temp,
5169 pptype, 7 + 3 * idx);
5170 }
5171 x = fold_convert (pptype, x);
5172 x = build_simple_mem_ref (x);
5173 }
5174 }
5175 else
5176 {
5177 x = build_outer_var_ref (var, ctx);
5178
5179 if (omp_is_reference (var)
5180 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5181 x = build_fold_addr_expr_loc (clause_loc, x);
5182 }
5183 SET_DECL_VALUE_EXPR (placeholder, x);
5184 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5185 tree new_vard = new_var;
5186 if (omp_is_reference (var))
5187 {
5188 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5189 new_vard = TREE_OPERAND (new_var, 0);
5190 gcc_assert (DECL_P (new_vard));
5191 }
5192 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5193 if (is_simd
5194 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5195 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5196 rvarp = &rvar;
5197 if (is_simd
5198 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5199 ivar, lvar, rvarp,
5200 &rvar2))
5201 {
5202 if (new_vard == new_var)
5203 {
5204 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5205 SET_DECL_VALUE_EXPR (new_var, ivar);
5206 }
5207 else
5208 {
5209 SET_DECL_VALUE_EXPR (new_vard,
5210 build_fold_addr_expr (ivar));
5211 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5212 }
5213 x = lang_hooks.decls.omp_clause_default_ctor
5214 (c, unshare_expr (ivar),
5215 build_outer_var_ref (var, ctx));
5216 if (rvarp)
5217 {
5218 if (x)
5219 {
5220 gimplify_and_add (x, &llist[0]);
5221
5222 tree ivar2 = unshare_expr (lvar);
5223 TREE_OPERAND (ivar2, 1) = sctx.idx;
5224 x = lang_hooks.decls.omp_clause_default_ctor
5225 (c, ivar2, build_outer_var_ref (var, ctx));
5226 gimplify_and_add (x, &llist[0]);
5227
5228 if (rvar2)
5229 {
5230 x = lang_hooks.decls.omp_clause_default_ctor
5231 (c, unshare_expr (rvar2),
5232 build_outer_var_ref (var, ctx));
5233 gimplify_and_add (x, &llist[0]);
5234 }
5235
5236 /* For types that need construction, add another
5237 private var which will be default constructed
5238 and optionally initialized with
5239 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5240 loop we want to assign this value instead of
5241 constructing and destructing it in each
5242 iteration. */
5243 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5244 gimple_add_tmp_var (nv);
5245 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5246 ? rvar2
5247 : ivar, 0),
5248 nv);
5249 x = lang_hooks.decls.omp_clause_default_ctor
5250 (c, nv, build_outer_var_ref (var, ctx));
5251 gimplify_and_add (x, ilist);
5252
5253 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5254 {
5255 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5256 x = DECL_VALUE_EXPR (new_vard);
5257 tree vexpr = nv;
5258 if (new_vard != new_var)
5259 vexpr = build_fold_addr_expr (nv);
5260 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5261 lower_omp (&tseq, ctx);
5262 SET_DECL_VALUE_EXPR (new_vard, x);
5263 gimple_seq_add_seq (ilist, tseq);
5264 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5265 }
5266
5267 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5268 if (x)
5269 gimplify_and_add (x, dlist);
5270 }
5271
5272 tree ref = build_outer_var_ref (var, ctx);
5273 x = unshare_expr (ivar);
5274 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5275 ref);
5276 gimplify_and_add (x, &llist[0]);
5277
5278 ref = build_outer_var_ref (var, ctx);
5279 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5280 rvar);
5281 gimplify_and_add (x, &llist[3]);
5282
5283 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5284 if (new_vard == new_var)
5285 SET_DECL_VALUE_EXPR (new_var, lvar);
5286 else
5287 SET_DECL_VALUE_EXPR (new_vard,
5288 build_fold_addr_expr (lvar));
5289
5290 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5291 if (x)
5292 gimplify_and_add (x, &llist[1]);
5293
5294 tree ivar2 = unshare_expr (lvar);
5295 TREE_OPERAND (ivar2, 1) = sctx.idx;
5296 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5297 if (x)
5298 gimplify_and_add (x, &llist[1]);
5299
5300 if (rvar2)
5301 {
5302 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5303 if (x)
5304 gimplify_and_add (x, &llist[1]);
5305 }
5306 break;
5307 }
5308 if (x)
5309 gimplify_and_add (x, &llist[0]);
5310 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5311 {
5312 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5313 lower_omp (&tseq, ctx);
5314 gimple_seq_add_seq (&llist[0], tseq);
5315 }
5316 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5317 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5318 lower_omp (&tseq, ctx);
5319 gimple_seq_add_seq (&llist[1], tseq);
5320 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5321 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5322 if (new_vard == new_var)
5323 SET_DECL_VALUE_EXPR (new_var, lvar);
5324 else
5325 SET_DECL_VALUE_EXPR (new_vard,
5326 build_fold_addr_expr (lvar));
5327 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5328 if (x)
5329 gimplify_and_add (x, &llist[1]);
5330 break;
5331 }
5332 /* If this is a reference to constant size reduction var
5333 with placeholder, we haven't emitted the initializer
5334 for it because it is undesirable if SIMD arrays are used.
5335 But if they aren't used, we need to emit the deferred
5336 initialization now. */
5337 else if (omp_is_reference (var) && is_simd)
5338 handle_simd_reference (clause_loc, new_vard, ilist);
5339
5340 tree lab2 = NULL_TREE;
5341 if (cond)
5342 {
5343 gimple *g;
5344 if (!is_parallel_ctx (ctx))
5345 {
5346 tree condv = create_tmp_var (boolean_type_node);
5347 tree m = build_simple_mem_ref (cond);
5348 g = gimple_build_assign (condv, m);
5349 gimple_seq_add_stmt (ilist, g);
5350 tree lab1
5351 = create_artificial_label (UNKNOWN_LOCATION);
5352 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5353 g = gimple_build_cond (NE_EXPR, condv,
5354 boolean_false_node,
5355 lab2, lab1);
5356 gimple_seq_add_stmt (ilist, g);
5357 gimple_seq_add_stmt (ilist,
5358 gimple_build_label (lab1));
5359 }
5360 g = gimple_build_assign (build_simple_mem_ref (cond),
5361 boolean_true_node);
5362 gimple_seq_add_stmt (ilist, g);
5363 }
5364 x = lang_hooks.decls.omp_clause_default_ctor
5365 (c, unshare_expr (new_var),
5366 cond ? NULL_TREE
5367 : build_outer_var_ref (var, ctx));
5368 if (x)
5369 gimplify_and_add (x, ilist);
5370
5371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5372 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5373 {
5374 if (x || (!is_simd
5375 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5376 {
5377 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5378 gimple_add_tmp_var (nv);
5379 ctx->cb.decl_map->put (new_vard, nv);
5380 x = lang_hooks.decls.omp_clause_default_ctor
5381 (c, nv, build_outer_var_ref (var, ctx));
5382 if (x)
5383 gimplify_and_add (x, ilist);
5384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5385 {
5386 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5387 tree vexpr = nv;
5388 if (new_vard != new_var)
5389 vexpr = build_fold_addr_expr (nv);
5390 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5391 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5392 lower_omp (&tseq, ctx);
5393 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5394 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5395 gimple_seq_add_seq (ilist, tseq);
5396 }
5397 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5398 if (is_simd && ctx->scan_exclusive)
5399 {
5400 tree nv2
5401 = create_tmp_var_raw (TREE_TYPE (new_var));
5402 gimple_add_tmp_var (nv2);
5403 ctx->cb.decl_map->put (nv, nv2);
5404 x = lang_hooks.decls.omp_clause_default_ctor
5405 (c, nv2, build_outer_var_ref (var, ctx));
5406 gimplify_and_add (x, ilist);
5407 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5408 if (x)
5409 gimplify_and_add (x, dlist);
5410 }
5411 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5412 if (x)
5413 gimplify_and_add (x, dlist);
5414 }
5415 else if (is_simd
5416 && ctx->scan_exclusive
5417 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5418 {
5419 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5420 gimple_add_tmp_var (nv2);
5421 ctx->cb.decl_map->put (new_vard, nv2);
5422 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5423 if (x)
5424 gimplify_and_add (x, dlist);
5425 }
5426 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5427 goto do_dtor;
5428 }
5429
5430 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5431 {
5432 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5433 lower_omp (&tseq, ctx);
5434 gimple_seq_add_seq (ilist, tseq);
5435 }
5436 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5437 if (is_simd)
5438 {
5439 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5440 lower_omp (&tseq, ctx);
5441 gimple_seq_add_seq (dlist, tseq);
5442 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5443 }
5444 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5445 if (cond)
5446 {
5447 if (lab2)
5448 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5449 break;
5450 }
5451 goto do_dtor;
5452 }
5453 else
5454 {
5455 x = omp_reduction_init (c, TREE_TYPE (new_var));
5456 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5457 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5458
5459 if (cond)
5460 {
5461 gimple *g;
5462 tree lab2 = NULL_TREE;
5463 /* GOMP_taskgroup_reduction_register memsets the whole
5464 array to zero. If the initializer is zero, we don't
5465 need to initialize it again, just mark it as ever
5466 used unconditionally, i.e. cond = true. */
5467 if (initializer_zerop (x))
5468 {
5469 g = gimple_build_assign (build_simple_mem_ref (cond),
5470 boolean_true_node);
5471 gimple_seq_add_stmt (ilist, g);
5472 break;
5473 }
5474
5475 /* Otherwise, emit
5476 if (!cond) { cond = true; new_var = x; } */
5477 if (!is_parallel_ctx (ctx))
5478 {
5479 tree condv = create_tmp_var (boolean_type_node);
5480 tree m = build_simple_mem_ref (cond);
5481 g = gimple_build_assign (condv, m);
5482 gimple_seq_add_stmt (ilist, g);
5483 tree lab1
5484 = create_artificial_label (UNKNOWN_LOCATION);
5485 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5486 g = gimple_build_cond (NE_EXPR, condv,
5487 boolean_false_node,
5488 lab2, lab1);
5489 gimple_seq_add_stmt (ilist, g);
5490 gimple_seq_add_stmt (ilist,
5491 gimple_build_label (lab1));
5492 }
5493 g = gimple_build_assign (build_simple_mem_ref (cond),
5494 boolean_true_node);
5495 gimple_seq_add_stmt (ilist, g);
5496 gimplify_assign (new_var, x, ilist);
5497 if (lab2)
5498 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5499 break;
5500 }
5501
5502 /* reduction(-:var) sums up the partial results, so it
5503 acts identically to reduction(+:var). */
5504 if (code == MINUS_EXPR)
5505 code = PLUS_EXPR;
5506
5507 tree new_vard = new_var;
5508 if (is_simd && omp_is_reference (var))
5509 {
5510 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5511 new_vard = TREE_OPERAND (new_var, 0);
5512 gcc_assert (DECL_P (new_vard));
5513 }
5514 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5515 if (is_simd
5516 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5517 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5518 rvarp = &rvar;
5519 if (is_simd
5520 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5521 ivar, lvar, rvarp,
5522 &rvar2))
5523 {
5524 if (new_vard != new_var)
5525 {
5526 SET_DECL_VALUE_EXPR (new_vard,
5527 build_fold_addr_expr (lvar));
5528 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5529 }
5530
5531 tree ref = build_outer_var_ref (var, ctx);
5532
5533 if (rvarp)
5534 {
5535 gimplify_assign (ivar, ref, &llist[0]);
5536 ref = build_outer_var_ref (var, ctx);
5537 gimplify_assign (ref, rvar, &llist[3]);
5538 break;
5539 }
5540
5541 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5542
5543 if (sctx.is_simt)
5544 {
5545 if (!simt_lane)
5546 simt_lane = create_tmp_var (unsigned_type_node);
5547 x = build_call_expr_internal_loc
5548 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5549 TREE_TYPE (ivar), 2, ivar, simt_lane);
5550 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5551 gimplify_assign (ivar, x, &llist[2]);
5552 }
5553 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5554 ref = build_outer_var_ref (var, ctx);
5555 gimplify_assign (ref, x, &llist[1]);
5556
5557 }
5558 else
5559 {
5560 if (omp_is_reference (var) && is_simd)
5561 handle_simd_reference (clause_loc, new_vard, ilist);
5562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5563 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5564 break;
5565 gimplify_assign (new_var, x, ilist);
5566 if (is_simd)
5567 {
5568 tree ref = build_outer_var_ref (var, ctx);
5569
5570 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5571 ref = build_outer_var_ref (var, ctx);
5572 gimplify_assign (ref, x, dlist);
5573 }
5574 }
5575 }
5576 break;
5577
5578 default:
5579 gcc_unreachable ();
5580 }
5581 }
5582 }
5583 if (tskred_avar)
5584 {
5585 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5586 TREE_THIS_VOLATILE (clobber) = 1;
5587 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5588 }
5589
5590 if (known_eq (sctx.max_vf, 1U))
5591 {
5592 sctx.is_simt = false;
5593 if (ctx->lastprivate_conditional_map)
5594 {
5595 if (gimple_omp_for_combined_into_p (ctx->stmt))
5596 {
5597 /* Signal to lower_omp_1 that it should use parent context. */
5598 ctx->combined_into_simd_safelen0 = true;
5599 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5600 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5601 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5602 {
5603 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5604 tree *v
5605 = ctx->lastprivate_conditional_map->get (o);
5606 tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx->outer);
5607 tree *pv
5608 = ctx->outer->lastprivate_conditional_map->get (po);
5609 *v = *pv;
5610 }
5611 }
5612 else
5613 {
5614 /* When not vectorized, treat lastprivate(conditional:) like
5615 normal lastprivate, as there will be just one simd lane
5616 writing the privatized variable. */
5617 delete ctx->lastprivate_conditional_map;
5618 ctx->lastprivate_conditional_map = NULL;
5619 }
5620 }
5621 }
5622
5623 if (nonconst_simd_if)
5624 {
5625 if (sctx.lane == NULL_TREE)
5626 {
5627 sctx.idx = create_tmp_var (unsigned_type_node);
5628 sctx.lane = create_tmp_var (unsigned_type_node);
5629 }
5630 /* FIXME: For now. */
5631 sctx.is_simt = false;
5632 }
5633
5634 if (sctx.lane || sctx.is_simt)
5635 {
5636 uid = create_tmp_var (ptr_type_node, "simduid");
5637 /* Don't want uninit warnings on simduid, it is always uninitialized,
5638 but we use it not for the value, but for the DECL_UID only. */
5639 TREE_NO_WARNING (uid) = 1;
5640 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5641 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5642 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5643 gimple_omp_for_set_clauses (ctx->stmt, c);
5644 }
5645 /* Emit calls denoting privatized variables and initializing a pointer to
5646 structure that holds private variables as fields after ompdevlow pass. */
5647 if (sctx.is_simt)
5648 {
5649 sctx.simt_eargs[0] = uid;
5650 gimple *g
5651 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5652 gimple_call_set_lhs (g, uid);
5653 gimple_seq_add_stmt (ilist, g);
5654 sctx.simt_eargs.release ();
5655
5656 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5657 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5658 gimple_call_set_lhs (g, simtrec);
5659 gimple_seq_add_stmt (ilist, g);
5660 }
5661 if (sctx.lane)
5662 {
5663 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5664 2 + (nonconst_simd_if != NULL),
5665 uid, integer_zero_node,
5666 nonconst_simd_if);
5667 gimple_call_set_lhs (g, sctx.lane);
5668 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5669 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5670 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5671 build_int_cst (unsigned_type_node, 0));
5672 gimple_seq_add_stmt (ilist, g);
5673 if (sctx.lastlane)
5674 {
5675 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5676 2, uid, sctx.lane);
5677 gimple_call_set_lhs (g, sctx.lastlane);
5678 gimple_seq_add_stmt (dlist, g);
5679 gimple_seq_add_seq (dlist, llist[3]);
5680 }
5681 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5682 if (llist[2])
5683 {
5684 tree simt_vf = create_tmp_var (unsigned_type_node);
5685 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5686 gimple_call_set_lhs (g, simt_vf);
5687 gimple_seq_add_stmt (dlist, g);
5688
5689 tree t = build_int_cst (unsigned_type_node, 1);
5690 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5691 gimple_seq_add_stmt (dlist, g);
5692
5693 t = build_int_cst (unsigned_type_node, 0);
5694 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5695 gimple_seq_add_stmt (dlist, g);
5696
5697 tree body = create_artificial_label (UNKNOWN_LOCATION);
5698 tree header = create_artificial_label (UNKNOWN_LOCATION);
5699 tree end = create_artificial_label (UNKNOWN_LOCATION);
5700 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5701 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5702
5703 gimple_seq_add_seq (dlist, llist[2]);
5704
5705 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5706 gimple_seq_add_stmt (dlist, g);
5707
5708 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5709 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5710 gimple_seq_add_stmt (dlist, g);
5711
5712 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5713 }
5714 for (int i = 0; i < 2; i++)
5715 if (llist[i])
5716 {
5717 tree vf = create_tmp_var (unsigned_type_node);
5718 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5719 gimple_call_set_lhs (g, vf);
5720 gimple_seq *seq = i == 0 ? ilist : dlist;
5721 gimple_seq_add_stmt (seq, g);
5722 tree t = build_int_cst (unsigned_type_node, 0);
5723 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5724 gimple_seq_add_stmt (seq, g);
5725 tree body = create_artificial_label (UNKNOWN_LOCATION);
5726 tree header = create_artificial_label (UNKNOWN_LOCATION);
5727 tree end = create_artificial_label (UNKNOWN_LOCATION);
5728 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5729 gimple_seq_add_stmt (seq, gimple_build_label (body));
5730 gimple_seq_add_seq (seq, llist[i]);
5731 t = build_int_cst (unsigned_type_node, 1);
5732 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5733 gimple_seq_add_stmt (seq, g);
5734 gimple_seq_add_stmt (seq, gimple_build_label (header));
5735 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5736 gimple_seq_add_stmt (seq, g);
5737 gimple_seq_add_stmt (seq, gimple_build_label (end));
5738 }
5739 }
5740 if (sctx.is_simt)
5741 {
5742 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5743 gimple *g
5744 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5745 gimple_seq_add_stmt (dlist, g);
5746 }
5747
5748 /* The copyin sequence is not to be executed by the main thread, since
5749 that would result in self-copies. Perhaps not visible to scalars,
5750 but it certainly is to C++ operator=. */
5751 if (copyin_seq)
5752 {
5753 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5754 0);
5755 x = build2 (NE_EXPR, boolean_type_node, x,
5756 build_int_cst (TREE_TYPE (x), 0));
5757 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5758 gimplify_and_add (x, ilist);
5759 }
5760
5761 /* If any copyin variable is passed by reference, we must ensure the
5762 master thread doesn't modify it before it is copied over in all
5763 threads. Similarly for variables in both firstprivate and
5764 lastprivate clauses we need to ensure the lastprivate copying
5765 happens after firstprivate copying in all threads. And similarly
5766 for UDRs if initializer expression refers to omp_orig. */
5767 if (copyin_by_ref || lastprivate_firstprivate
5768 || (reduction_omp_orig_ref
5769 && !ctx->scan_inclusive
5770 && !ctx->scan_exclusive))
5771 {
5772 /* Don't add any barrier for #pragma omp simd or
5773 #pragma omp distribute. */
5774 if (!is_task_ctx (ctx)
5775 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5776 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5777 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5778 }
5779
5780 /* If max_vf is non-zero, then we can use only a vectorization factor
5781 up to the max_vf we chose. So stick it into the safelen clause. */
5782 if (maybe_ne (sctx.max_vf, 0U))
5783 {
5784 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5785 OMP_CLAUSE_SAFELEN);
5786 poly_uint64 safe_len;
5787 if (c == NULL_TREE
5788 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5789 && maybe_gt (safe_len, sctx.max_vf)))
5790 {
5791 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5792 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5793 sctx.max_vf);
5794 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5795 gimple_omp_for_set_clauses (ctx->stmt, c);
5796 }
5797 }
5798 }
5799
5800 /* Create temporary variables for lastprivate(conditional:) implementation
5801 in context CTX with CLAUSES. */
5802
5803 static void
5804 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
5805 {
5806 tree iter_type = NULL_TREE;
5807 tree cond_ptr = NULL_TREE;
5808 tree iter_var = NULL_TREE;
5809 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5810 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
5811 tree next = *clauses;
5812 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
5813 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5814 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5815 {
5816 if (is_simd)
5817 {
5818 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
5819 gcc_assert (cc);
5820 if (iter_type == NULL_TREE)
5821 {
5822 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
5823 iter_var = create_tmp_var_raw (iter_type);
5824 DECL_CONTEXT (iter_var) = current_function_decl;
5825 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5826 DECL_CHAIN (iter_var) = ctx->block_vars;
5827 ctx->block_vars = iter_var;
5828 tree c3
5829 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5830 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5831 OMP_CLAUSE_DECL (c3) = iter_var;
5832 OMP_CLAUSE_CHAIN (c3) = *clauses;
5833 *clauses = c3;
5834 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5835 }
5836 next = OMP_CLAUSE_CHAIN (cc);
5837 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5838 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
5839 ctx->lastprivate_conditional_map->put (o, v);
5840 continue;
5841 }
5842 if (iter_type == NULL)
5843 {
5844 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
5845 {
5846 struct omp_for_data fd;
5847 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
5848 NULL);
5849 iter_type = unsigned_type_for (fd.iter_type);
5850 }
5851 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
5852 iter_type = unsigned_type_node;
5853 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
5854 if (c2)
5855 {
5856 cond_ptr
5857 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
5858 OMP_CLAUSE_DECL (c2) = cond_ptr;
5859 }
5860 else
5861 {
5862 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
5863 DECL_CONTEXT (cond_ptr) = current_function_decl;
5864 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
5865 DECL_CHAIN (cond_ptr) = ctx->block_vars;
5866 ctx->block_vars = cond_ptr;
5867 c2 = build_omp_clause (UNKNOWN_LOCATION,
5868 OMP_CLAUSE__CONDTEMP_);
5869 OMP_CLAUSE_DECL (c2) = cond_ptr;
5870 OMP_CLAUSE_CHAIN (c2) = *clauses;
5871 *clauses = c2;
5872 }
5873 iter_var = create_tmp_var_raw (iter_type);
5874 DECL_CONTEXT (iter_var) = current_function_decl;
5875 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5876 DECL_CHAIN (iter_var) = ctx->block_vars;
5877 ctx->block_vars = iter_var;
5878 tree c3
5879 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5880 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5881 OMP_CLAUSE_DECL (c3) = iter_var;
5882 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
5883 OMP_CLAUSE_CHAIN (c2) = c3;
5884 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5885 }
5886 tree v = create_tmp_var_raw (iter_type);
5887 DECL_CONTEXT (v) = current_function_decl;
5888 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
5889 DECL_CHAIN (v) = ctx->block_vars;
5890 ctx->block_vars = v;
5891 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5892 ctx->lastprivate_conditional_map->put (o, v);
5893 }
5894 }
5895
5896
5897 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5898 both parallel and workshare constructs. PREDICATE may be NULL if it's
5899 always true. BODY_P is the sequence to insert early initialization
5900 if needed, STMT_LIST is where the non-conditional lastprivate handling
5901 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5902 section. */
5903
5904 static void
5905 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
5906 gimple_seq *stmt_list, gimple_seq *cstmt_list,
5907 omp_context *ctx)
5908 {
5909 tree x, c, label = NULL, orig_clauses = clauses;
5910 bool par_clauses = false;
5911 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5912 unsigned HOST_WIDE_INT conditional_off = 0;
5913
5914 /* Early exit if there are no lastprivate or linear clauses. */
5915 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5916 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5917 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5918 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5919 break;
5920 if (clauses == NULL)
5921 {
5922 /* If this was a workshare clause, see if it had been combined
5923 with its parallel. In that case, look for the clauses on the
5924 parallel statement itself. */
5925 if (is_parallel_ctx (ctx))
5926 return;
5927
5928 ctx = ctx->outer;
5929 if (ctx == NULL || !is_parallel_ctx (ctx))
5930 return;
5931
5932 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5933 OMP_CLAUSE_LASTPRIVATE);
5934 if (clauses == NULL)
5935 return;
5936 par_clauses = true;
5937 }
5938
5939 bool maybe_simt = false;
5940 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5941 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5942 {
5943 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5944 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5945 if (simduid)
5946 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5947 }
5948
5949 if (predicate)
5950 {
5951 gcond *stmt;
5952 tree label_true, arm1, arm2;
5953 enum tree_code pred_code = TREE_CODE (predicate);
5954
5955 label = create_artificial_label (UNKNOWN_LOCATION);
5956 label_true = create_artificial_label (UNKNOWN_LOCATION);
5957 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5958 {
5959 arm1 = TREE_OPERAND (predicate, 0);
5960 arm2 = TREE_OPERAND (predicate, 1);
5961 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5962 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5963 }
5964 else
5965 {
5966 arm1 = predicate;
5967 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5968 arm2 = boolean_false_node;
5969 pred_code = NE_EXPR;
5970 }
5971 if (maybe_simt)
5972 {
5973 c = build2 (pred_code, boolean_type_node, arm1, arm2);
5974 c = fold_convert (integer_type_node, c);
5975 simtcond = create_tmp_var (integer_type_node);
5976 gimplify_assign (simtcond, c, stmt_list);
5977 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5978 1, simtcond);
5979 c = create_tmp_var (integer_type_node);
5980 gimple_call_set_lhs (g, c);
5981 gimple_seq_add_stmt (stmt_list, g);
5982 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5983 label_true, label);
5984 }
5985 else
5986 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5987 gimple_seq_add_stmt (stmt_list, stmt);
5988 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5989 }
5990
5991 tree cond_ptr = NULL_TREE;
5992 for (c = clauses; c ;)
5993 {
5994 tree var, new_var;
5995 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5996 gimple_seq *this_stmt_list = stmt_list;
5997 tree lab2 = NULL_TREE;
5998
5999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6000 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6001 && ctx->lastprivate_conditional_map
6002 && !ctx->combined_into_simd_safelen0)
6003 {
6004 gcc_assert (body_p);
6005 if (simduid)
6006 goto next;
6007 if (cond_ptr == NULL_TREE)
6008 {
6009 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6010 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6011 }
6012 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6013 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6014 tree v = *ctx->lastprivate_conditional_map->get (o);
6015 gimplify_assign (v, build_zero_cst (type), body_p);
6016 this_stmt_list = cstmt_list;
6017 tree mem;
6018 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6019 {
6020 mem = build2 (MEM_REF, type, cond_ptr,
6021 build_int_cst (TREE_TYPE (cond_ptr),
6022 conditional_off));
6023 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6024 }
6025 else
6026 mem = build4 (ARRAY_REF, type, cond_ptr,
6027 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6028 tree mem2 = copy_node (mem);
6029 gimple_seq seq = NULL;
6030 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6031 gimple_seq_add_seq (this_stmt_list, seq);
6032 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6033 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6034 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6035 gimple_seq_add_stmt (this_stmt_list, g);
6036 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6037 gimplify_assign (mem2, v, this_stmt_list);
6038 }
6039
6040 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6041 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6042 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6043 {
6044 var = OMP_CLAUSE_DECL (c);
6045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6046 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6047 && is_taskloop_ctx (ctx))
6048 {
6049 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6050 new_var = lookup_decl (var, ctx->outer);
6051 }
6052 else
6053 {
6054 new_var = lookup_decl (var, ctx);
6055 /* Avoid uninitialized warnings for lastprivate and
6056 for linear iterators. */
6057 if (predicate
6058 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6059 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6060 TREE_NO_WARNING (new_var) = 1;
6061 }
6062
6063 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6064 {
6065 tree val = DECL_VALUE_EXPR (new_var);
6066 if (TREE_CODE (val) == ARRAY_REF
6067 && VAR_P (TREE_OPERAND (val, 0))
6068 && lookup_attribute ("omp simd array",
6069 DECL_ATTRIBUTES (TREE_OPERAND (val,
6070 0))))
6071 {
6072 if (lastlane == NULL)
6073 {
6074 lastlane = create_tmp_var (unsigned_type_node);
6075 gcall *g
6076 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6077 2, simduid,
6078 TREE_OPERAND (val, 1));
6079 gimple_call_set_lhs (g, lastlane);
6080 gimple_seq_add_stmt (this_stmt_list, g);
6081 }
6082 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6083 TREE_OPERAND (val, 0), lastlane,
6084 NULL_TREE, NULL_TREE);
6085 TREE_THIS_NOTRAP (new_var) = 1;
6086 }
6087 }
6088 else if (maybe_simt)
6089 {
6090 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6091 ? DECL_VALUE_EXPR (new_var)
6092 : new_var);
6093 if (simtlast == NULL)
6094 {
6095 simtlast = create_tmp_var (unsigned_type_node);
6096 gcall *g = gimple_build_call_internal
6097 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6098 gimple_call_set_lhs (g, simtlast);
6099 gimple_seq_add_stmt (this_stmt_list, g);
6100 }
6101 x = build_call_expr_internal_loc
6102 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6103 TREE_TYPE (val), 2, val, simtlast);
6104 new_var = unshare_expr (new_var);
6105 gimplify_assign (new_var, x, this_stmt_list);
6106 new_var = unshare_expr (new_var);
6107 }
6108
6109 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6110 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6111 {
6112 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6113 gimple_seq_add_seq (this_stmt_list,
6114 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6115 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6116 }
6117 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6118 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6119 {
6120 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6121 gimple_seq_add_seq (this_stmt_list,
6122 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6123 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6124 }
6125
6126 x = NULL_TREE;
6127 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6128 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
6129 {
6130 gcc_checking_assert (is_taskloop_ctx (ctx));
6131 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6132 ctx->outer->outer);
6133 if (is_global_var (ovar))
6134 x = ovar;
6135 }
6136 if (!x)
6137 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6138 if (omp_is_reference (var))
6139 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6140 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6141 gimplify_and_add (x, this_stmt_list);
6142
6143 if (lab2)
6144 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6145 }
6146
6147 next:
6148 c = OMP_CLAUSE_CHAIN (c);
6149 if (c == NULL && !par_clauses)
6150 {
6151 /* If this was a workshare clause, see if it had been combined
6152 with its parallel. In that case, continue looking for the
6153 clauses also on the parallel statement itself. */
6154 if (is_parallel_ctx (ctx))
6155 break;
6156
6157 ctx = ctx->outer;
6158 if (ctx == NULL || !is_parallel_ctx (ctx))
6159 break;
6160
6161 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6162 OMP_CLAUSE_LASTPRIVATE);
6163 par_clauses = true;
6164 }
6165 }
6166
6167 if (label)
6168 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6169 }
6170
6171 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6172 (which might be a placeholder). INNER is true if this is an inner
6173 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6174 join markers. Generate the before-loop forking sequence in
6175 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6176 general form of these sequences is
6177
6178 GOACC_REDUCTION_SETUP
6179 GOACC_FORK
6180 GOACC_REDUCTION_INIT
6181 ...
6182 GOACC_REDUCTION_FINI
6183 GOACC_JOIN
6184 GOACC_REDUCTION_TEARDOWN. */
6185
6186 static void
6187 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6188 gcall *fork, gcall *join, gimple_seq *fork_seq,
6189 gimple_seq *join_seq, omp_context *ctx)
6190 {
6191 gimple_seq before_fork = NULL;
6192 gimple_seq after_fork = NULL;
6193 gimple_seq before_join = NULL;
6194 gimple_seq after_join = NULL;
6195 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6196 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6197 unsigned offset = 0;
6198
6199 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6200 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6201 {
6202 tree orig = OMP_CLAUSE_DECL (c);
6203 tree var = maybe_lookup_decl (orig, ctx);
6204 tree ref_to_res = NULL_TREE;
6205 tree incoming, outgoing, v1, v2, v3;
6206 bool is_private = false;
6207
6208 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6209 if (rcode == MINUS_EXPR)
6210 rcode = PLUS_EXPR;
6211 else if (rcode == TRUTH_ANDIF_EXPR)
6212 rcode = BIT_AND_EXPR;
6213 else if (rcode == TRUTH_ORIF_EXPR)
6214 rcode = BIT_IOR_EXPR;
6215 tree op = build_int_cst (unsigned_type_node, rcode);
6216
6217 if (!var)
6218 var = orig;
6219
6220 incoming = outgoing = var;
6221
6222 if (!inner)
6223 {
6224 /* See if an outer construct also reduces this variable. */
6225 omp_context *outer = ctx;
6226
6227 while (omp_context *probe = outer->outer)
6228 {
6229 enum gimple_code type = gimple_code (probe->stmt);
6230 tree cls;
6231
6232 switch (type)
6233 {
6234 case GIMPLE_OMP_FOR:
6235 cls = gimple_omp_for_clauses (probe->stmt);
6236 break;
6237
6238 case GIMPLE_OMP_TARGET:
6239 if (gimple_omp_target_kind (probe->stmt)
6240 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6241 goto do_lookup;
6242
6243 cls = gimple_omp_target_clauses (probe->stmt);
6244 break;
6245
6246 default:
6247 goto do_lookup;
6248 }
6249
6250 outer = probe;
6251 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6252 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6253 && orig == OMP_CLAUSE_DECL (cls))
6254 {
6255 incoming = outgoing = lookup_decl (orig, probe);
6256 goto has_outer_reduction;
6257 }
6258 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6259 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6260 && orig == OMP_CLAUSE_DECL (cls))
6261 {
6262 is_private = true;
6263 goto do_lookup;
6264 }
6265 }
6266
6267 do_lookup:
6268 /* This is the outermost construct with this reduction,
6269 see if there's a mapping for it. */
6270 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6271 && maybe_lookup_field (orig, outer) && !is_private)
6272 {
6273 ref_to_res = build_receiver_ref (orig, false, outer);
6274 if (omp_is_reference (orig))
6275 ref_to_res = build_simple_mem_ref (ref_to_res);
6276
6277 tree type = TREE_TYPE (var);
6278 if (POINTER_TYPE_P (type))
6279 type = TREE_TYPE (type);
6280
6281 outgoing = var;
6282 incoming = omp_reduction_init_op (loc, rcode, type);
6283 }
6284 else
6285 {
6286 /* Try to look at enclosing contexts for reduction var,
6287 use original if no mapping found. */
6288 tree t = NULL_TREE;
6289 omp_context *c = ctx->outer;
6290 while (c && !t)
6291 {
6292 t = maybe_lookup_decl (orig, c);
6293 c = c->outer;
6294 }
6295 incoming = outgoing = (t ? t : orig);
6296 }
6297
6298 has_outer_reduction:;
6299 }
6300
6301 if (!ref_to_res)
6302 ref_to_res = integer_zero_node;
6303
6304 if (omp_is_reference (orig))
6305 {
6306 tree type = TREE_TYPE (var);
6307 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6308
6309 if (!inner)
6310 {
6311 tree x = create_tmp_var (TREE_TYPE (type), id);
6312 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6313 }
6314
6315 v1 = create_tmp_var (type, id);
6316 v2 = create_tmp_var (type, id);
6317 v3 = create_tmp_var (type, id);
6318
6319 gimplify_assign (v1, var, fork_seq);
6320 gimplify_assign (v2, var, fork_seq);
6321 gimplify_assign (v3, var, fork_seq);
6322
6323 var = build_simple_mem_ref (var);
6324 v1 = build_simple_mem_ref (v1);
6325 v2 = build_simple_mem_ref (v2);
6326 v3 = build_simple_mem_ref (v3);
6327 outgoing = build_simple_mem_ref (outgoing);
6328
6329 if (!TREE_CONSTANT (incoming))
6330 incoming = build_simple_mem_ref (incoming);
6331 }
6332 else
6333 v1 = v2 = v3 = var;
6334
6335 /* Determine position in reduction buffer, which may be used
6336 by target. The parser has ensured that this is not a
6337 variable-sized type. */
6338 fixed_size_mode mode
6339 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6340 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6341 offset = (offset + align - 1) & ~(align - 1);
6342 tree off = build_int_cst (sizetype, offset);
6343 offset += GET_MODE_SIZE (mode);
6344
6345 if (!init_code)
6346 {
6347 init_code = build_int_cst (integer_type_node,
6348 IFN_GOACC_REDUCTION_INIT);
6349 fini_code = build_int_cst (integer_type_node,
6350 IFN_GOACC_REDUCTION_FINI);
6351 setup_code = build_int_cst (integer_type_node,
6352 IFN_GOACC_REDUCTION_SETUP);
6353 teardown_code = build_int_cst (integer_type_node,
6354 IFN_GOACC_REDUCTION_TEARDOWN);
6355 }
6356
6357 tree setup_call
6358 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6359 TREE_TYPE (var), 6, setup_code,
6360 unshare_expr (ref_to_res),
6361 incoming, level, op, off);
6362 tree init_call
6363 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6364 TREE_TYPE (var), 6, init_code,
6365 unshare_expr (ref_to_res),
6366 v1, level, op, off);
6367 tree fini_call
6368 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6369 TREE_TYPE (var), 6, fini_code,
6370 unshare_expr (ref_to_res),
6371 v2, level, op, off);
6372 tree teardown_call
6373 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6374 TREE_TYPE (var), 6, teardown_code,
6375 ref_to_res, v3, level, op, off);
6376
6377 gimplify_assign (v1, setup_call, &before_fork);
6378 gimplify_assign (v2, init_call, &after_fork);
6379 gimplify_assign (v3, fini_call, &before_join);
6380 gimplify_assign (outgoing, teardown_call, &after_join);
6381 }
6382
6383 /* Now stitch things together. */
6384 gimple_seq_add_seq (fork_seq, before_fork);
6385 if (fork)
6386 gimple_seq_add_stmt (fork_seq, fork);
6387 gimple_seq_add_seq (fork_seq, after_fork);
6388
6389 gimple_seq_add_seq (join_seq, before_join);
6390 if (join)
6391 gimple_seq_add_stmt (join_seq, join);
6392 gimple_seq_add_seq (join_seq, after_join);
6393 }
6394
6395 /* Generate code to implement the REDUCTION clauses, append it
6396 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6397 that should be emitted also inside of the critical section,
6398 in that case clear *CLIST afterwards, otherwise leave it as is
6399 and let the caller emit it itself. */
6400
6401 static void
6402 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6403 gimple_seq *clist, omp_context *ctx)
6404 {
6405 gimple_seq sub_seq = NULL;
6406 gimple *stmt;
6407 tree x, c;
6408 int count = 0;
6409
6410 /* OpenACC loop reductions are handled elsewhere. */
6411 if (is_gimple_omp_oacc (ctx->stmt))
6412 return;
6413
6414 /* SIMD reductions are handled in lower_rec_input_clauses. */
6415 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6416 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
6417 return;
6418
6419 /* inscan reductions are handled elsewhere. */
6420 if (ctx->scan_inclusive || ctx->scan_exclusive)
6421 return;
6422
6423 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6424 update in that case, otherwise use a lock. */
6425 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6426 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6427 && !OMP_CLAUSE_REDUCTION_TASK (c))
6428 {
6429 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6430 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6431 {
6432 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6433 count = -1;
6434 break;
6435 }
6436 count++;
6437 }
6438
6439 if (count == 0)
6440 return;
6441
6442 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6443 {
6444 tree var, ref, new_var, orig_var;
6445 enum tree_code code;
6446 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6447
6448 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6449 || OMP_CLAUSE_REDUCTION_TASK (c))
6450 continue;
6451
6452 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6453 orig_var = var = OMP_CLAUSE_DECL (c);
6454 if (TREE_CODE (var) == MEM_REF)
6455 {
6456 var = TREE_OPERAND (var, 0);
6457 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6458 var = TREE_OPERAND (var, 0);
6459 if (TREE_CODE (var) == ADDR_EXPR)
6460 var = TREE_OPERAND (var, 0);
6461 else
6462 {
6463 /* If this is a pointer or referenced based array
6464 section, the var could be private in the outer
6465 context e.g. on orphaned loop construct. Pretend this
6466 is private variable's outer reference. */
6467 ccode = OMP_CLAUSE_PRIVATE;
6468 if (TREE_CODE (var) == INDIRECT_REF)
6469 var = TREE_OPERAND (var, 0);
6470 }
6471 orig_var = var;
6472 if (is_variable_sized (var))
6473 {
6474 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6475 var = DECL_VALUE_EXPR (var);
6476 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6477 var = TREE_OPERAND (var, 0);
6478 gcc_assert (DECL_P (var));
6479 }
6480 }
6481 new_var = lookup_decl (var, ctx);
6482 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6483 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6484 ref = build_outer_var_ref (var, ctx, ccode);
6485 code = OMP_CLAUSE_REDUCTION_CODE (c);
6486
6487 /* reduction(-:var) sums up the partial results, so it acts
6488 identically to reduction(+:var). */
6489 if (code == MINUS_EXPR)
6490 code = PLUS_EXPR;
6491
6492 if (count == 1)
6493 {
6494 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6495
6496 addr = save_expr (addr);
6497 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6498 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6499 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6500 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6501 gimplify_and_add (x, stmt_seqp);
6502 return;
6503 }
6504 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6505 {
6506 tree d = OMP_CLAUSE_DECL (c);
6507 tree type = TREE_TYPE (d);
6508 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6509 tree i = create_tmp_var (TREE_TYPE (v));
6510 tree ptype = build_pointer_type (TREE_TYPE (type));
6511 tree bias = TREE_OPERAND (d, 1);
6512 d = TREE_OPERAND (d, 0);
6513 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6514 {
6515 tree b = TREE_OPERAND (d, 1);
6516 b = maybe_lookup_decl (b, ctx);
6517 if (b == NULL)
6518 {
6519 b = TREE_OPERAND (d, 1);
6520 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6521 }
6522 if (integer_zerop (bias))
6523 bias = b;
6524 else
6525 {
6526 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6527 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6528 TREE_TYPE (b), b, bias);
6529 }
6530 d = TREE_OPERAND (d, 0);
6531 }
6532 /* For ref build_outer_var_ref already performs this, so
6533 only new_var needs a dereference. */
6534 if (TREE_CODE (d) == INDIRECT_REF)
6535 {
6536 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6537 gcc_assert (omp_is_reference (var) && var == orig_var);
6538 }
6539 else if (TREE_CODE (d) == ADDR_EXPR)
6540 {
6541 if (orig_var == var)
6542 {
6543 new_var = build_fold_addr_expr (new_var);
6544 ref = build_fold_addr_expr (ref);
6545 }
6546 }
6547 else
6548 {
6549 gcc_assert (orig_var == var);
6550 if (omp_is_reference (var))
6551 ref = build_fold_addr_expr (ref);
6552 }
6553 if (DECL_P (v))
6554 {
6555 tree t = maybe_lookup_decl (v, ctx);
6556 if (t)
6557 v = t;
6558 else
6559 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6560 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6561 }
6562 if (!integer_zerop (bias))
6563 {
6564 bias = fold_convert_loc (clause_loc, sizetype, bias);
6565 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6566 TREE_TYPE (new_var), new_var,
6567 unshare_expr (bias));
6568 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6569 TREE_TYPE (ref), ref, bias);
6570 }
6571 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6572 ref = fold_convert_loc (clause_loc, ptype, ref);
6573 tree m = create_tmp_var (ptype);
6574 gimplify_assign (m, new_var, stmt_seqp);
6575 new_var = m;
6576 m = create_tmp_var (ptype);
6577 gimplify_assign (m, ref, stmt_seqp);
6578 ref = m;
6579 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6580 tree body = create_artificial_label (UNKNOWN_LOCATION);
6581 tree end = create_artificial_label (UNKNOWN_LOCATION);
6582 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6583 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6584 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6585 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6586 {
6587 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6588 tree decl_placeholder
6589 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6590 SET_DECL_VALUE_EXPR (placeholder, out);
6591 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6592 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6593 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6594 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6595 gimple_seq_add_seq (&sub_seq,
6596 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6597 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6598 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6599 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6600 }
6601 else
6602 {
6603 x = build2 (code, TREE_TYPE (out), out, priv);
6604 out = unshare_expr (out);
6605 gimplify_assign (out, x, &sub_seq);
6606 }
6607 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6608 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6609 gimple_seq_add_stmt (&sub_seq, g);
6610 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6611 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6612 gimple_seq_add_stmt (&sub_seq, g);
6613 g = gimple_build_assign (i, PLUS_EXPR, i,
6614 build_int_cst (TREE_TYPE (i), 1));
6615 gimple_seq_add_stmt (&sub_seq, g);
6616 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6617 gimple_seq_add_stmt (&sub_seq, g);
6618 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6619 }
6620 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6621 {
6622 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6623
6624 if (omp_is_reference (var)
6625 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6626 TREE_TYPE (ref)))
6627 ref = build_fold_addr_expr_loc (clause_loc, ref);
6628 SET_DECL_VALUE_EXPR (placeholder, ref);
6629 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6630 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6631 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6632 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6633 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6634 }
6635 else
6636 {
6637 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6638 ref = build_outer_var_ref (var, ctx);
6639 gimplify_assign (ref, x, &sub_seq);
6640 }
6641 }
6642
6643 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6644 0);
6645 gimple_seq_add_stmt (stmt_seqp, stmt);
6646
6647 gimple_seq_add_seq (stmt_seqp, sub_seq);
6648
6649 if (clist)
6650 {
6651 gimple_seq_add_seq (stmt_seqp, *clist);
6652 *clist = NULL;
6653 }
6654
6655 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6656 0);
6657 gimple_seq_add_stmt (stmt_seqp, stmt);
6658 }
6659
6660
6661 /* Generate code to implement the COPYPRIVATE clauses. */
6662
6663 static void
6664 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6665 omp_context *ctx)
6666 {
6667 tree c;
6668
6669 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6670 {
6671 tree var, new_var, ref, x;
6672 bool by_ref;
6673 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6674
6675 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6676 continue;
6677
6678 var = OMP_CLAUSE_DECL (c);
6679 by_ref = use_pointer_for_field (var, NULL);
6680
6681 ref = build_sender_ref (var, ctx);
6682 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6683 if (by_ref)
6684 {
6685 x = build_fold_addr_expr_loc (clause_loc, new_var);
6686 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6687 }
6688 gimplify_assign (ref, x, slist);
6689
6690 ref = build_receiver_ref (var, false, ctx);
6691 if (by_ref)
6692 {
6693 ref = fold_convert_loc (clause_loc,
6694 build_pointer_type (TREE_TYPE (new_var)),
6695 ref);
6696 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6697 }
6698 if (omp_is_reference (var))
6699 {
6700 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6701 ref = build_simple_mem_ref_loc (clause_loc, ref);
6702 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6703 }
6704 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6705 gimplify_and_add (x, rlist);
6706 }
6707 }
6708
6709
6710 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6711 and REDUCTION from the sender (aka parent) side. */
6712
6713 static void
6714 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6715 omp_context *ctx)
6716 {
6717 tree c, t;
6718 int ignored_looptemp = 0;
6719 bool is_taskloop = false;
6720
6721 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6722 by GOMP_taskloop. */
6723 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6724 {
6725 ignored_looptemp = 2;
6726 is_taskloop = true;
6727 }
6728
6729 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6730 {
6731 tree val, ref, x, var;
6732 bool by_ref, do_in = false, do_out = false;
6733 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6734
6735 switch (OMP_CLAUSE_CODE (c))
6736 {
6737 case OMP_CLAUSE_PRIVATE:
6738 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6739 break;
6740 continue;
6741 case OMP_CLAUSE_FIRSTPRIVATE:
6742 case OMP_CLAUSE_COPYIN:
6743 case OMP_CLAUSE_LASTPRIVATE:
6744 case OMP_CLAUSE_IN_REDUCTION:
6745 case OMP_CLAUSE__REDUCTEMP_:
6746 break;
6747 case OMP_CLAUSE_REDUCTION:
6748 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6749 continue;
6750 break;
6751 case OMP_CLAUSE_SHARED:
6752 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6753 break;
6754 continue;
6755 case OMP_CLAUSE__LOOPTEMP_:
6756 if (ignored_looptemp)
6757 {
6758 ignored_looptemp--;
6759 continue;
6760 }
6761 break;
6762 default:
6763 continue;
6764 }
6765
6766 val = OMP_CLAUSE_DECL (c);
6767 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6768 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6769 && TREE_CODE (val) == MEM_REF)
6770 {
6771 val = TREE_OPERAND (val, 0);
6772 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6773 val = TREE_OPERAND (val, 0);
6774 if (TREE_CODE (val) == INDIRECT_REF
6775 || TREE_CODE (val) == ADDR_EXPR)
6776 val = TREE_OPERAND (val, 0);
6777 if (is_variable_sized (val))
6778 continue;
6779 }
6780
6781 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6782 outer taskloop region. */
6783 omp_context *ctx_for_o = ctx;
6784 if (is_taskloop
6785 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6786 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6787 ctx_for_o = ctx->outer;
6788
6789 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6790
6791 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6792 && is_global_var (var)
6793 && (val == OMP_CLAUSE_DECL (c)
6794 || !is_task_ctx (ctx)
6795 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6796 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6797 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6798 != POINTER_TYPE)))))
6799 continue;
6800
6801 t = omp_member_access_dummy_var (var);
6802 if (t)
6803 {
6804 var = DECL_VALUE_EXPR (var);
6805 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6806 if (o != t)
6807 var = unshare_and_remap (var, t, o);
6808 else
6809 var = unshare_expr (var);
6810 }
6811
6812 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6813 {
6814 /* Handle taskloop firstprivate/lastprivate, where the
6815 lastprivate on GIMPLE_OMP_TASK is represented as
6816 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6817 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6818 x = omp_build_component_ref (ctx->sender_decl, f);
6819 if (use_pointer_for_field (val, ctx))
6820 var = build_fold_addr_expr (var);
6821 gimplify_assign (x, var, ilist);
6822 DECL_ABSTRACT_ORIGIN (f) = NULL;
6823 continue;
6824 }
6825
6826 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6827 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6828 || val == OMP_CLAUSE_DECL (c))
6829 && is_variable_sized (val))
6830 continue;
6831 by_ref = use_pointer_for_field (val, NULL);
6832
6833 switch (OMP_CLAUSE_CODE (c))
6834 {
6835 case OMP_CLAUSE_FIRSTPRIVATE:
6836 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6837 && !by_ref
6838 && is_task_ctx (ctx))
6839 TREE_NO_WARNING (var) = 1;
6840 do_in = true;
6841 break;
6842
6843 case OMP_CLAUSE_PRIVATE:
6844 case OMP_CLAUSE_COPYIN:
6845 case OMP_CLAUSE__LOOPTEMP_:
6846 case OMP_CLAUSE__REDUCTEMP_:
6847 do_in = true;
6848 break;
6849
6850 case OMP_CLAUSE_LASTPRIVATE:
6851 if (by_ref || omp_is_reference (val))
6852 {
6853 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6854 continue;
6855 do_in = true;
6856 }
6857 else
6858 {
6859 do_out = true;
6860 if (lang_hooks.decls.omp_private_outer_ref (val))
6861 do_in = true;
6862 }
6863 break;
6864
6865 case OMP_CLAUSE_REDUCTION:
6866 case OMP_CLAUSE_IN_REDUCTION:
6867 do_in = true;
6868 if (val == OMP_CLAUSE_DECL (c))
6869 {
6870 if (is_task_ctx (ctx))
6871 by_ref = use_pointer_for_field (val, ctx);
6872 else
6873 do_out = !(by_ref || omp_is_reference (val));
6874 }
6875 else
6876 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6877 break;
6878
6879 default:
6880 gcc_unreachable ();
6881 }
6882
6883 if (do_in)
6884 {
6885 ref = build_sender_ref (val, ctx);
6886 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6887 gimplify_assign (ref, x, ilist);
6888 if (is_task_ctx (ctx))
6889 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6890 }
6891
6892 if (do_out)
6893 {
6894 ref = build_sender_ref (val, ctx);
6895 gimplify_assign (var, ref, olist);
6896 }
6897 }
6898 }
6899
6900 /* Generate code to implement SHARED from the sender (aka parent)
6901 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6902 list things that got automatically shared. */
6903
6904 static void
6905 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6906 {
6907 tree var, ovar, nvar, t, f, x, record_type;
6908
6909 if (ctx->record_type == NULL)
6910 return;
6911
6912 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6913 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6914 {
6915 ovar = DECL_ABSTRACT_ORIGIN (f);
6916 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6917 continue;
6918
6919 nvar = maybe_lookup_decl (ovar, ctx);
6920 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6921 continue;
6922
6923 /* If CTX is a nested parallel directive. Find the immediately
6924 enclosing parallel or workshare construct that contains a
6925 mapping for OVAR. */
6926 var = lookup_decl_in_outer_ctx (ovar, ctx);
6927
6928 t = omp_member_access_dummy_var (var);
6929 if (t)
6930 {
6931 var = DECL_VALUE_EXPR (var);
6932 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6933 if (o != t)
6934 var = unshare_and_remap (var, t, o);
6935 else
6936 var = unshare_expr (var);
6937 }
6938
6939 if (use_pointer_for_field (ovar, ctx))
6940 {
6941 x = build_sender_ref (ovar, ctx);
6942 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
6943 && TREE_TYPE (f) == TREE_TYPE (ovar))
6944 {
6945 gcc_assert (is_parallel_ctx (ctx)
6946 && DECL_ARTIFICIAL (ovar));
6947 /* _condtemp_ clause. */
6948 var = build_constructor (TREE_TYPE (x), NULL);
6949 }
6950 else
6951 var = build_fold_addr_expr (var);
6952 gimplify_assign (x, var, ilist);
6953 }
6954 else
6955 {
6956 x = build_sender_ref (ovar, ctx);
6957 gimplify_assign (x, var, ilist);
6958
6959 if (!TREE_READONLY (var)
6960 /* We don't need to receive a new reference to a result
6961 or parm decl. In fact we may not store to it as we will
6962 invalidate any pending RSO and generate wrong gimple
6963 during inlining. */
6964 && !((TREE_CODE (var) == RESULT_DECL
6965 || TREE_CODE (var) == PARM_DECL)
6966 && DECL_BY_REFERENCE (var)))
6967 {
6968 x = build_sender_ref (ovar, ctx);
6969 gimplify_assign (var, x, olist);
6970 }
6971 }
6972 }
6973 }
6974
6975 /* Emit an OpenACC head marker call, encapulating the partitioning and
6976 other information that must be processed by the target compiler.
6977 Return the maximum number of dimensions the associated loop might
6978 be partitioned over. */
6979
6980 static unsigned
6981 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6982 gimple_seq *seq, omp_context *ctx)
6983 {
6984 unsigned levels = 0;
6985 unsigned tag = 0;
6986 tree gang_static = NULL_TREE;
6987 auto_vec<tree, 5> args;
6988
6989 args.quick_push (build_int_cst
6990 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6991 args.quick_push (ddvar);
6992 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6993 {
6994 switch (OMP_CLAUSE_CODE (c))
6995 {
6996 case OMP_CLAUSE_GANG:
6997 tag |= OLF_DIM_GANG;
6998 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6999 /* static:* is represented by -1, and we can ignore it, as
7000 scheduling is always static. */
7001 if (gang_static && integer_minus_onep (gang_static))
7002 gang_static = NULL_TREE;
7003 levels++;
7004 break;
7005
7006 case OMP_CLAUSE_WORKER:
7007 tag |= OLF_DIM_WORKER;
7008 levels++;
7009 break;
7010
7011 case OMP_CLAUSE_VECTOR:
7012 tag |= OLF_DIM_VECTOR;
7013 levels++;
7014 break;
7015
7016 case OMP_CLAUSE_SEQ:
7017 tag |= OLF_SEQ;
7018 break;
7019
7020 case OMP_CLAUSE_AUTO:
7021 tag |= OLF_AUTO;
7022 break;
7023
7024 case OMP_CLAUSE_INDEPENDENT:
7025 tag |= OLF_INDEPENDENT;
7026 break;
7027
7028 case OMP_CLAUSE_TILE:
7029 tag |= OLF_TILE;
7030 break;
7031
7032 default:
7033 continue;
7034 }
7035 }
7036
7037 if (gang_static)
7038 {
7039 if (DECL_P (gang_static))
7040 gang_static = build_outer_var_ref (gang_static, ctx);
7041 tag |= OLF_GANG_STATIC;
7042 }
7043
7044 /* In a parallel region, loops are implicitly INDEPENDENT. */
7045 omp_context *tgt = enclosing_target_ctx (ctx);
7046 if (!tgt || is_oacc_parallel (tgt))
7047 tag |= OLF_INDEPENDENT;
7048
7049 if (tag & OLF_TILE)
7050 /* Tiling could use all 3 levels. */
7051 levels = 3;
7052 else
7053 {
7054 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7055 Ensure at least one level, or 2 for possible auto
7056 partitioning */
7057 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7058 << OLF_DIM_BASE) | OLF_SEQ));
7059
7060 if (levels < 1u + maybe_auto)
7061 levels = 1u + maybe_auto;
7062 }
7063
7064 args.quick_push (build_int_cst (integer_type_node, levels));
7065 args.quick_push (build_int_cst (integer_type_node, tag));
7066 if (gang_static)
7067 args.quick_push (gang_static);
7068
7069 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7070 gimple_set_location (call, loc);
7071 gimple_set_lhs (call, ddvar);
7072 gimple_seq_add_stmt (seq, call);
7073
7074 return levels;
7075 }
7076
7077 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7078 partitioning level of the enclosed region. */
7079
7080 static void
7081 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7082 tree tofollow, gimple_seq *seq)
7083 {
7084 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7085 : IFN_UNIQUE_OACC_TAIL_MARK);
7086 tree marker = build_int_cst (integer_type_node, marker_kind);
7087 int nargs = 2 + (tofollow != NULL_TREE);
7088 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7089 marker, ddvar, tofollow);
7090 gimple_set_location (call, loc);
7091 gimple_set_lhs (call, ddvar);
7092 gimple_seq_add_stmt (seq, call);
7093 }
7094
7095 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7096 the loop clauses, from which we extract reductions. Initialize
7097 HEAD and TAIL. */
7098
7099 static void
7100 lower_oacc_head_tail (location_t loc, tree clauses,
7101 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7102 {
7103 bool inner = false;
7104 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7105 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7106
7107 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7108 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7109 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7110
7111 gcc_assert (count);
7112 for (unsigned done = 1; count; count--, done++)
7113 {
7114 gimple_seq fork_seq = NULL;
7115 gimple_seq join_seq = NULL;
7116
7117 tree place = build_int_cst (integer_type_node, -1);
7118 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7119 fork_kind, ddvar, place);
7120 gimple_set_location (fork, loc);
7121 gimple_set_lhs (fork, ddvar);
7122
7123 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7124 join_kind, ddvar, place);
7125 gimple_set_location (join, loc);
7126 gimple_set_lhs (join, ddvar);
7127
7128 /* Mark the beginning of this level sequence. */
7129 if (inner)
7130 lower_oacc_loop_marker (loc, ddvar, true,
7131 build_int_cst (integer_type_node, count),
7132 &fork_seq);
7133 lower_oacc_loop_marker (loc, ddvar, false,
7134 build_int_cst (integer_type_node, done),
7135 &join_seq);
7136
7137 lower_oacc_reductions (loc, clauses, place, inner,
7138 fork, join, &fork_seq, &join_seq, ctx);
7139
7140 /* Append this level to head. */
7141 gimple_seq_add_seq (head, fork_seq);
7142 /* Prepend it to tail. */
7143 gimple_seq_add_seq (&join_seq, *tail);
7144 *tail = join_seq;
7145
7146 inner = true;
7147 }
7148
7149 /* Mark the end of the sequence. */
7150 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7151 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7152 }
7153
7154 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7155 catch handler and return it. This prevents programs from violating the
7156 structured block semantics with throws. */
7157
7158 static gimple_seq
7159 maybe_catch_exception (gimple_seq body)
7160 {
7161 gimple *g;
7162 tree decl;
7163
7164 if (!flag_exceptions)
7165 return body;
7166
7167 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7168 decl = lang_hooks.eh_protect_cleanup_actions ();
7169 else
7170 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7171
7172 g = gimple_build_eh_must_not_throw (decl);
7173 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7174 GIMPLE_TRY_CATCH);
7175
7176 return gimple_seq_alloc_with_stmt (g);
7177 }
7178
7179 \f
7180 /* Routines to lower OMP directives into OMP-GIMPLE. */
7181
7182 /* If ctx is a worksharing context inside of a cancellable parallel
7183 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7184 and conditional branch to parallel's cancel_label to handle
7185 cancellation in the implicit barrier. */
7186
7187 static void
7188 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7189 gimple_seq *body)
7190 {
7191 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7192 if (gimple_omp_return_nowait_p (omp_return))
7193 return;
7194 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7195 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7196 && outer->cancellable)
7197 {
7198 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7199 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7200 tree lhs = create_tmp_var (c_bool_type);
7201 gimple_omp_return_set_lhs (omp_return, lhs);
7202 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7203 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7204 fold_convert (c_bool_type,
7205 boolean_false_node),
7206 outer->cancel_label, fallthru_label);
7207 gimple_seq_add_stmt (body, g);
7208 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7209 }
7210 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7211 return;
7212 }
7213
7214 /* Find the first task_reduction or reduction clause or return NULL
7215 if there are none. */
7216
7217 static inline tree
7218 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7219 enum omp_clause_code ccode)
7220 {
7221 while (1)
7222 {
7223 clauses = omp_find_clause (clauses, ccode);
7224 if (clauses == NULL_TREE)
7225 return NULL_TREE;
7226 if (ccode != OMP_CLAUSE_REDUCTION
7227 || code == OMP_TASKLOOP
7228 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7229 return clauses;
7230 clauses = OMP_CLAUSE_CHAIN (clauses);
7231 }
7232 }
7233
7234 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7235 gimple_seq *, gimple_seq *);
7236
7237 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7238 CTX is the enclosing OMP context for the current statement. */
7239
7240 static void
7241 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7242 {
7243 tree block, control;
7244 gimple_stmt_iterator tgsi;
7245 gomp_sections *stmt;
7246 gimple *t;
7247 gbind *new_stmt, *bind;
7248 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7249
7250 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7251
7252 push_gimplify_context ();
7253
7254 dlist = NULL;
7255 ilist = NULL;
7256
7257 tree rclauses
7258 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7259 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7260 tree rtmp = NULL_TREE;
7261 if (rclauses)
7262 {
7263 tree type = build_pointer_type (pointer_sized_int_node);
7264 tree temp = create_tmp_var (type);
7265 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7266 OMP_CLAUSE_DECL (c) = temp;
7267 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7268 gimple_omp_sections_set_clauses (stmt, c);
7269 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7270 gimple_omp_sections_clauses (stmt),
7271 &ilist, &tred_dlist);
7272 rclauses = c;
7273 rtmp = make_ssa_name (type);
7274 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7275 }
7276
7277 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7278 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7279
7280 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7281 &ilist, &dlist, ctx, NULL);
7282
7283 control = create_tmp_var (unsigned_type_node, ".section");
7284 gimple_omp_sections_set_control (stmt, control);
7285
7286 new_body = gimple_omp_body (stmt);
7287 gimple_omp_set_body (stmt, NULL);
7288 tgsi = gsi_start (new_body);
7289 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7290 {
7291 omp_context *sctx;
7292 gimple *sec_start;
7293
7294 sec_start = gsi_stmt (tgsi);
7295 sctx = maybe_lookup_ctx (sec_start);
7296 gcc_assert (sctx);
7297
7298 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7299 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7300 GSI_CONTINUE_LINKING);
7301 gimple_omp_set_body (sec_start, NULL);
7302
7303 if (gsi_one_before_end_p (tgsi))
7304 {
7305 gimple_seq l = NULL;
7306 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7307 &ilist, &l, &clist, ctx);
7308 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7309 gimple_omp_section_set_last (sec_start);
7310 }
7311
7312 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7313 GSI_CONTINUE_LINKING);
7314 }
7315
7316 block = make_node (BLOCK);
7317 bind = gimple_build_bind (NULL, new_body, block);
7318
7319 olist = NULL;
7320 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7321 &clist, ctx);
7322 if (clist)
7323 {
7324 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7325 gcall *g = gimple_build_call (fndecl, 0);
7326 gimple_seq_add_stmt (&olist, g);
7327 gimple_seq_add_seq (&olist, clist);
7328 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7329 g = gimple_build_call (fndecl, 0);
7330 gimple_seq_add_stmt (&olist, g);
7331 }
7332
7333 block = make_node (BLOCK);
7334 new_stmt = gimple_build_bind (NULL, NULL, block);
7335 gsi_replace (gsi_p, new_stmt, true);
7336
7337 pop_gimplify_context (new_stmt);
7338 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7339 BLOCK_VARS (block) = gimple_bind_vars (bind);
7340 if (BLOCK_VARS (block))
7341 TREE_USED (block) = 1;
7342
7343 new_body = NULL;
7344 gimple_seq_add_seq (&new_body, ilist);
7345 gimple_seq_add_stmt (&new_body, stmt);
7346 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7347 gimple_seq_add_stmt (&new_body, bind);
7348
7349 t = gimple_build_omp_continue (control, control);
7350 gimple_seq_add_stmt (&new_body, t);
7351
7352 gimple_seq_add_seq (&new_body, olist);
7353 if (ctx->cancellable)
7354 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7355 gimple_seq_add_seq (&new_body, dlist);
7356
7357 new_body = maybe_catch_exception (new_body);
7358
7359 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7360 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7361 t = gimple_build_omp_return (nowait);
7362 gimple_seq_add_stmt (&new_body, t);
7363 gimple_seq_add_seq (&new_body, tred_dlist);
7364 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7365
7366 if (rclauses)
7367 OMP_CLAUSE_DECL (rclauses) = rtmp;
7368
7369 gimple_bind_set_body (new_stmt, new_body);
7370 }
7371
7372
7373 /* A subroutine of lower_omp_single. Expand the simple form of
7374 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7375
7376 if (GOMP_single_start ())
7377 BODY;
7378 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7379
7380 FIXME. It may be better to delay expanding the logic of this until
7381 pass_expand_omp. The expanded logic may make the job more difficult
7382 to a synchronization analysis pass. */
7383
7384 static void
7385 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7386 {
7387 location_t loc = gimple_location (single_stmt);
7388 tree tlabel = create_artificial_label (loc);
7389 tree flabel = create_artificial_label (loc);
7390 gimple *call, *cond;
7391 tree lhs, decl;
7392
7393 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7394 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7395 call = gimple_build_call (decl, 0);
7396 gimple_call_set_lhs (call, lhs);
7397 gimple_seq_add_stmt (pre_p, call);
7398
7399 cond = gimple_build_cond (EQ_EXPR, lhs,
7400 fold_convert_loc (loc, TREE_TYPE (lhs),
7401 boolean_true_node),
7402 tlabel, flabel);
7403 gimple_seq_add_stmt (pre_p, cond);
7404 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7405 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7406 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7407 }
7408
7409
7410 /* A subroutine of lower_omp_single. Expand the simple form of
7411 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7412
7413 #pragma omp single copyprivate (a, b, c)
7414
7415 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7416
7417 {
7418 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7419 {
7420 BODY;
7421 copyout.a = a;
7422 copyout.b = b;
7423 copyout.c = c;
7424 GOMP_single_copy_end (&copyout);
7425 }
7426 else
7427 {
7428 a = copyout_p->a;
7429 b = copyout_p->b;
7430 c = copyout_p->c;
7431 }
7432 GOMP_barrier ();
7433 }
7434
7435 FIXME. It may be better to delay expanding the logic of this until
7436 pass_expand_omp. The expanded logic may make the job more difficult
7437 to a synchronization analysis pass. */
7438
7439 static void
7440 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7441 omp_context *ctx)
7442 {
7443 tree ptr_type, t, l0, l1, l2, bfn_decl;
7444 gimple_seq copyin_seq;
7445 location_t loc = gimple_location (single_stmt);
7446
7447 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7448
7449 ptr_type = build_pointer_type (ctx->record_type);
7450 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7451
7452 l0 = create_artificial_label (loc);
7453 l1 = create_artificial_label (loc);
7454 l2 = create_artificial_label (loc);
7455
7456 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7457 t = build_call_expr_loc (loc, bfn_decl, 0);
7458 t = fold_convert_loc (loc, ptr_type, t);
7459 gimplify_assign (ctx->receiver_decl, t, pre_p);
7460
7461 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7462 build_int_cst (ptr_type, 0));
7463 t = build3 (COND_EXPR, void_type_node, t,
7464 build_and_jump (&l0), build_and_jump (&l1));
7465 gimplify_and_add (t, pre_p);
7466
7467 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7468
7469 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7470
7471 copyin_seq = NULL;
7472 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7473 &copyin_seq, ctx);
7474
7475 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7476 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7477 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7478 gimplify_and_add (t, pre_p);
7479
7480 t = build_and_jump (&l2);
7481 gimplify_and_add (t, pre_p);
7482
7483 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7484
7485 gimple_seq_add_seq (pre_p, copyin_seq);
7486
7487 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7488 }
7489
7490
7491 /* Expand code for an OpenMP single directive. */
7492
7493 static void
7494 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7495 {
7496 tree block;
7497 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7498 gbind *bind;
7499 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7500
7501 push_gimplify_context ();
7502
7503 block = make_node (BLOCK);
7504 bind = gimple_build_bind (NULL, NULL, block);
7505 gsi_replace (gsi_p, bind, true);
7506 bind_body = NULL;
7507 dlist = NULL;
7508 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7509 &bind_body, &dlist, ctx, NULL);
7510 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7511
7512 gimple_seq_add_stmt (&bind_body, single_stmt);
7513
7514 if (ctx->record_type)
7515 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7516 else
7517 lower_omp_single_simple (single_stmt, &bind_body);
7518
7519 gimple_omp_set_body (single_stmt, NULL);
7520
7521 gimple_seq_add_seq (&bind_body, dlist);
7522
7523 bind_body = maybe_catch_exception (bind_body);
7524
7525 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7526 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7527 gimple *g = gimple_build_omp_return (nowait);
7528 gimple_seq_add_stmt (&bind_body_tail, g);
7529 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7530 if (ctx->record_type)
7531 {
7532 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7533 tree clobber = build_constructor (ctx->record_type, NULL);
7534 TREE_THIS_VOLATILE (clobber) = 1;
7535 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7536 clobber), GSI_SAME_STMT);
7537 }
7538 gimple_seq_add_seq (&bind_body, bind_body_tail);
7539 gimple_bind_set_body (bind, bind_body);
7540
7541 pop_gimplify_context (bind);
7542
7543 gimple_bind_append_vars (bind, ctx->block_vars);
7544 BLOCK_VARS (block) = ctx->block_vars;
7545 if (BLOCK_VARS (block))
7546 TREE_USED (block) = 1;
7547 }
7548
7549
7550 /* Expand code for an OpenMP master directive. */
7551
7552 static void
7553 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7554 {
7555 tree block, lab = NULL, x, bfn_decl;
7556 gimple *stmt = gsi_stmt (*gsi_p);
7557 gbind *bind;
7558 location_t loc = gimple_location (stmt);
7559 gimple_seq tseq;
7560
7561 push_gimplify_context ();
7562
7563 block = make_node (BLOCK);
7564 bind = gimple_build_bind (NULL, NULL, block);
7565 gsi_replace (gsi_p, bind, true);
7566 gimple_bind_add_stmt (bind, stmt);
7567
7568 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7569 x = build_call_expr_loc (loc, bfn_decl, 0);
7570 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7571 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7572 tseq = NULL;
7573 gimplify_and_add (x, &tseq);
7574 gimple_bind_add_seq (bind, tseq);
7575
7576 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7577 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7578 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7579 gimple_omp_set_body (stmt, NULL);
7580
7581 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7582
7583 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7584
7585 pop_gimplify_context (bind);
7586
7587 gimple_bind_append_vars (bind, ctx->block_vars);
7588 BLOCK_VARS (block) = ctx->block_vars;
7589 }
7590
7591 /* Helper function for lower_omp_task_reductions. For a specific PASS
7592 find out the current clause it should be processed, or return false
7593 if all have been processed already. */
7594
7595 static inline bool
7596 omp_task_reduction_iterate (int pass, enum tree_code code,
7597 enum omp_clause_code ccode, tree *c, tree *decl,
7598 tree *type, tree *next)
7599 {
7600 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7601 {
7602 if (ccode == OMP_CLAUSE_REDUCTION
7603 && code != OMP_TASKLOOP
7604 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7605 continue;
7606 *decl = OMP_CLAUSE_DECL (*c);
7607 *type = TREE_TYPE (*decl);
7608 if (TREE_CODE (*decl) == MEM_REF)
7609 {
7610 if (pass != 1)
7611 continue;
7612 }
7613 else
7614 {
7615 if (omp_is_reference (*decl))
7616 *type = TREE_TYPE (*type);
7617 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7618 continue;
7619 }
7620 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7621 return true;
7622 }
7623 *decl = NULL_TREE;
7624 *type = NULL_TREE;
7625 *next = NULL_TREE;
7626 return false;
7627 }
7628
7629 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7630 OMP_TASKGROUP only with task modifier). Register mapping of those in
7631 START sequence and reducing them and unregister them in the END sequence. */
7632
7633 static void
7634 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7635 gimple_seq *start, gimple_seq *end)
7636 {
7637 enum omp_clause_code ccode
7638 = (code == OMP_TASKGROUP
7639 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7640 tree cancellable = NULL_TREE;
7641 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7642 if (clauses == NULL_TREE)
7643 return;
7644 if (code == OMP_FOR || code == OMP_SECTIONS)
7645 {
7646 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7647 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7648 && outer->cancellable)
7649 {
7650 cancellable = error_mark_node;
7651 break;
7652 }
7653 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7654 break;
7655 }
7656 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7657 tree *last = &TYPE_FIELDS (record_type);
7658 unsigned cnt = 0;
7659 if (cancellable)
7660 {
7661 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7662 ptr_type_node);
7663 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7664 integer_type_node);
7665 *last = field;
7666 DECL_CHAIN (field) = ifield;
7667 last = &DECL_CHAIN (ifield);
7668 DECL_CONTEXT (field) = record_type;
7669 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7670 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7671 DECL_CONTEXT (ifield) = record_type;
7672 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7673 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7674 }
7675 for (int pass = 0; pass < 2; pass++)
7676 {
7677 tree decl, type, next;
7678 for (tree c = clauses;
7679 omp_task_reduction_iterate (pass, code, ccode,
7680 &c, &decl, &type, &next); c = next)
7681 {
7682 ++cnt;
7683 tree new_type = type;
7684 if (ctx->outer)
7685 new_type = remap_type (type, &ctx->outer->cb);
7686 tree field
7687 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7688 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7689 new_type);
7690 if (DECL_P (decl) && type == TREE_TYPE (decl))
7691 {
7692 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7695 }
7696 else
7697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7698 DECL_CONTEXT (field) = record_type;
7699 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7700 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7701 *last = field;
7702 last = &DECL_CHAIN (field);
7703 tree bfield
7704 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7705 boolean_type_node);
7706 DECL_CONTEXT (bfield) = record_type;
7707 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7708 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7709 *last = bfield;
7710 last = &DECL_CHAIN (bfield);
7711 }
7712 }
7713 *last = NULL_TREE;
7714 layout_type (record_type);
7715
7716 /* Build up an array which registers with the runtime all the reductions
7717 and deregisters them at the end. Format documented in libgomp/task.c. */
7718 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7719 tree avar = create_tmp_var_raw (atype);
7720 gimple_add_tmp_var (avar);
7721 TREE_ADDRESSABLE (avar) = 1;
7722 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7723 NULL_TREE, NULL_TREE);
7724 tree t = build_int_cst (pointer_sized_int_node, cnt);
7725 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7726 gimple_seq seq = NULL;
7727 tree sz = fold_convert (pointer_sized_int_node,
7728 TYPE_SIZE_UNIT (record_type));
7729 int cachesz = 64;
7730 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7731 build_int_cst (pointer_sized_int_node, cachesz - 1));
7732 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7733 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7734 ctx->task_reductions.create (1 + cnt);
7735 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7736 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7737 ? sz : NULL_TREE);
7738 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7739 gimple_seq_add_seq (start, seq);
7740 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7741 NULL_TREE, NULL_TREE);
7742 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7743 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7744 NULL_TREE, NULL_TREE);
7745 t = build_int_cst (pointer_sized_int_node,
7746 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7747 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7748 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7749 NULL_TREE, NULL_TREE);
7750 t = build_int_cst (pointer_sized_int_node, -1);
7751 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7752 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7753 NULL_TREE, NULL_TREE);
7754 t = build_int_cst (pointer_sized_int_node, 0);
7755 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7756
7757 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7758 and for each task reduction checks a bool right after the private variable
7759 within that thread's chunk; if the bool is clear, it hasn't been
7760 initialized and thus isn't going to be reduced nor destructed, otherwise
7761 reduce and destruct it. */
7762 tree idx = create_tmp_var (size_type_node);
7763 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7764 tree num_thr_sz = create_tmp_var (size_type_node);
7765 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7766 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7767 tree lab3 = NULL_TREE;
7768 gimple *g;
7769 if (code == OMP_FOR || code == OMP_SECTIONS)
7770 {
7771 /* For worksharing constructs, only perform it in the master thread,
7772 with the exception of cancelled implicit barriers - then only handle
7773 the current thread. */
7774 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7775 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7776 tree thr_num = create_tmp_var (integer_type_node);
7777 g = gimple_build_call (t, 0);
7778 gimple_call_set_lhs (g, thr_num);
7779 gimple_seq_add_stmt (end, g);
7780 if (cancellable)
7781 {
7782 tree c;
7783 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7784 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7785 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7786 if (code == OMP_FOR)
7787 c = gimple_omp_for_clauses (ctx->stmt);
7788 else /* if (code == OMP_SECTIONS) */
7789 c = gimple_omp_sections_clauses (ctx->stmt);
7790 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7791 cancellable = c;
7792 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7793 lab5, lab6);
7794 gimple_seq_add_stmt (end, g);
7795 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7796 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7797 gimple_seq_add_stmt (end, g);
7798 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7799 build_one_cst (TREE_TYPE (idx)));
7800 gimple_seq_add_stmt (end, g);
7801 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7802 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7803 }
7804 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7805 gimple_seq_add_stmt (end, g);
7806 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7807 }
7808 if (code != OMP_PARALLEL)
7809 {
7810 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7811 tree num_thr = create_tmp_var (integer_type_node);
7812 g = gimple_build_call (t, 0);
7813 gimple_call_set_lhs (g, num_thr);
7814 gimple_seq_add_stmt (end, g);
7815 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7816 gimple_seq_add_stmt (end, g);
7817 if (cancellable)
7818 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7819 }
7820 else
7821 {
7822 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7823 OMP_CLAUSE__REDUCTEMP_);
7824 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7825 t = fold_convert (size_type_node, t);
7826 gimplify_assign (num_thr_sz, t, end);
7827 }
7828 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7829 NULL_TREE, NULL_TREE);
7830 tree data = create_tmp_var (pointer_sized_int_node);
7831 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7832 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7833 tree ptr;
7834 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7835 ptr = create_tmp_var (build_pointer_type (record_type));
7836 else
7837 ptr = create_tmp_var (ptr_type_node);
7838 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7839
7840 tree field = TYPE_FIELDS (record_type);
7841 cnt = 0;
7842 if (cancellable)
7843 field = DECL_CHAIN (DECL_CHAIN (field));
7844 for (int pass = 0; pass < 2; pass++)
7845 {
7846 tree decl, type, next;
7847 for (tree c = clauses;
7848 omp_task_reduction_iterate (pass, code, ccode,
7849 &c, &decl, &type, &next); c = next)
7850 {
7851 tree var = decl, ref;
7852 if (TREE_CODE (decl) == MEM_REF)
7853 {
7854 var = TREE_OPERAND (var, 0);
7855 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7856 var = TREE_OPERAND (var, 0);
7857 tree v = var;
7858 if (TREE_CODE (var) == ADDR_EXPR)
7859 var = TREE_OPERAND (var, 0);
7860 else if (TREE_CODE (var) == INDIRECT_REF)
7861 var = TREE_OPERAND (var, 0);
7862 tree orig_var = var;
7863 if (is_variable_sized (var))
7864 {
7865 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7866 var = DECL_VALUE_EXPR (var);
7867 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7868 var = TREE_OPERAND (var, 0);
7869 gcc_assert (DECL_P (var));
7870 }
7871 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7872 if (orig_var != var)
7873 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7874 else if (TREE_CODE (v) == ADDR_EXPR)
7875 t = build_fold_addr_expr (t);
7876 else if (TREE_CODE (v) == INDIRECT_REF)
7877 t = build_fold_indirect_ref (t);
7878 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7879 {
7880 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7881 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7882 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7883 }
7884 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7885 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7886 fold_convert (size_type_node,
7887 TREE_OPERAND (decl, 1)));
7888 }
7889 else
7890 {
7891 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7892 if (!omp_is_reference (decl))
7893 t = build_fold_addr_expr (t);
7894 }
7895 t = fold_convert (pointer_sized_int_node, t);
7896 seq = NULL;
7897 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7898 gimple_seq_add_seq (start, seq);
7899 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7900 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7901 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7902 t = unshare_expr (byte_position (field));
7903 t = fold_convert (pointer_sized_int_node, t);
7904 ctx->task_reduction_map->put (c, cnt);
7905 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7906 ? t : NULL_TREE);
7907 seq = NULL;
7908 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7909 gimple_seq_add_seq (start, seq);
7910 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7911 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7912 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7913
7914 tree bfield = DECL_CHAIN (field);
7915 tree cond;
7916 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7917 /* In parallel or worksharing all threads unconditionally
7918 initialize all their task reduction private variables. */
7919 cond = boolean_true_node;
7920 else if (TREE_TYPE (ptr) == ptr_type_node)
7921 {
7922 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7923 unshare_expr (byte_position (bfield)));
7924 seq = NULL;
7925 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7926 gimple_seq_add_seq (end, seq);
7927 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7928 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7929 build_int_cst (pbool, 0));
7930 }
7931 else
7932 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7933 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7934 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7935 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7936 tree condv = create_tmp_var (boolean_type_node);
7937 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7938 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7939 lab3, lab4);
7940 gimple_seq_add_stmt (end, g);
7941 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7942 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7943 {
7944 /* If this reduction doesn't need destruction and parallel
7945 has been cancelled, there is nothing to do for this
7946 reduction, so jump around the merge operation. */
7947 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7948 g = gimple_build_cond (NE_EXPR, cancellable,
7949 build_zero_cst (TREE_TYPE (cancellable)),
7950 lab4, lab5);
7951 gimple_seq_add_stmt (end, g);
7952 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7953 }
7954
7955 tree new_var;
7956 if (TREE_TYPE (ptr) == ptr_type_node)
7957 {
7958 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7959 unshare_expr (byte_position (field)));
7960 seq = NULL;
7961 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7962 gimple_seq_add_seq (end, seq);
7963 tree pbool = build_pointer_type (TREE_TYPE (field));
7964 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7965 build_int_cst (pbool, 0));
7966 }
7967 else
7968 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7969 build_simple_mem_ref (ptr), field, NULL_TREE);
7970
7971 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7972 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7973 ref = build_simple_mem_ref (ref);
7974 /* reduction(-:var) sums up the partial results, so it acts
7975 identically to reduction(+:var). */
7976 if (rcode == MINUS_EXPR)
7977 rcode = PLUS_EXPR;
7978 if (TREE_CODE (decl) == MEM_REF)
7979 {
7980 tree type = TREE_TYPE (new_var);
7981 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7982 tree i = create_tmp_var (TREE_TYPE (v));
7983 tree ptype = build_pointer_type (TREE_TYPE (type));
7984 if (DECL_P (v))
7985 {
7986 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7987 tree vv = create_tmp_var (TREE_TYPE (v));
7988 gimplify_assign (vv, v, start);
7989 v = vv;
7990 }
7991 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7992 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7993 new_var = build_fold_addr_expr (new_var);
7994 new_var = fold_convert (ptype, new_var);
7995 ref = fold_convert (ptype, ref);
7996 tree m = create_tmp_var (ptype);
7997 gimplify_assign (m, new_var, end);
7998 new_var = m;
7999 m = create_tmp_var (ptype);
8000 gimplify_assign (m, ref, end);
8001 ref = m;
8002 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8003 tree body = create_artificial_label (UNKNOWN_LOCATION);
8004 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8005 gimple_seq_add_stmt (end, gimple_build_label (body));
8006 tree priv = build_simple_mem_ref (new_var);
8007 tree out = build_simple_mem_ref (ref);
8008 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8009 {
8010 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8011 tree decl_placeholder
8012 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8013 tree lab6 = NULL_TREE;
8014 if (cancellable)
8015 {
8016 /* If this reduction needs destruction and parallel
8017 has been cancelled, jump around the merge operation
8018 to the destruction. */
8019 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8020 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8021 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8022 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8023 lab6, lab5);
8024 gimple_seq_add_stmt (end, g);
8025 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8026 }
8027 SET_DECL_VALUE_EXPR (placeholder, out);
8028 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8029 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8030 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8031 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8032 gimple_seq_add_seq (end,
8033 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8034 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8035 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8036 {
8037 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8038 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8039 }
8040 if (cancellable)
8041 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8042 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8043 if (x)
8044 {
8045 gimple_seq tseq = NULL;
8046 gimplify_stmt (&x, &tseq);
8047 gimple_seq_add_seq (end, tseq);
8048 }
8049 }
8050 else
8051 {
8052 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8053 out = unshare_expr (out);
8054 gimplify_assign (out, x, end);
8055 }
8056 gimple *g
8057 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8058 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8059 gimple_seq_add_stmt (end, g);
8060 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8061 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8062 gimple_seq_add_stmt (end, g);
8063 g = gimple_build_assign (i, PLUS_EXPR, i,
8064 build_int_cst (TREE_TYPE (i), 1));
8065 gimple_seq_add_stmt (end, g);
8066 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8067 gimple_seq_add_stmt (end, g);
8068 gimple_seq_add_stmt (end, gimple_build_label (endl));
8069 }
8070 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8071 {
8072 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8073 tree oldv = NULL_TREE;
8074 tree lab6 = NULL_TREE;
8075 if (cancellable)
8076 {
8077 /* If this reduction needs destruction and parallel
8078 has been cancelled, jump around the merge operation
8079 to the destruction. */
8080 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8081 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8082 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8083 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8084 lab6, lab5);
8085 gimple_seq_add_stmt (end, g);
8086 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8087 }
8088 if (omp_is_reference (decl)
8089 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8090 TREE_TYPE (ref)))
8091 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8092 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8093 tree refv = create_tmp_var (TREE_TYPE (ref));
8094 gimplify_assign (refv, ref, end);
8095 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8096 SET_DECL_VALUE_EXPR (placeholder, ref);
8097 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8098 tree d = maybe_lookup_decl (decl, ctx);
8099 gcc_assert (d);
8100 if (DECL_HAS_VALUE_EXPR_P (d))
8101 oldv = DECL_VALUE_EXPR (d);
8102 if (omp_is_reference (var))
8103 {
8104 tree v = fold_convert (TREE_TYPE (d),
8105 build_fold_addr_expr (new_var));
8106 SET_DECL_VALUE_EXPR (d, v);
8107 }
8108 else
8109 SET_DECL_VALUE_EXPR (d, new_var);
8110 DECL_HAS_VALUE_EXPR_P (d) = 1;
8111 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8112 if (oldv)
8113 SET_DECL_VALUE_EXPR (d, oldv);
8114 else
8115 {
8116 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8117 DECL_HAS_VALUE_EXPR_P (d) = 0;
8118 }
8119 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8120 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8121 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8122 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8123 if (cancellable)
8124 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8125 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8126 if (x)
8127 {
8128 gimple_seq tseq = NULL;
8129 gimplify_stmt (&x, &tseq);
8130 gimple_seq_add_seq (end, tseq);
8131 }
8132 }
8133 else
8134 {
8135 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8136 ref = unshare_expr (ref);
8137 gimplify_assign (ref, x, end);
8138 }
8139 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8140 ++cnt;
8141 field = DECL_CHAIN (bfield);
8142 }
8143 }
8144
8145 if (code == OMP_TASKGROUP)
8146 {
8147 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8148 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8149 gimple_seq_add_stmt (start, g);
8150 }
8151 else
8152 {
8153 tree c;
8154 if (code == OMP_FOR)
8155 c = gimple_omp_for_clauses (ctx->stmt);
8156 else if (code == OMP_SECTIONS)
8157 c = gimple_omp_sections_clauses (ctx->stmt);
8158 else
8159 c = gimple_omp_taskreg_clauses (ctx->stmt);
8160 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8161 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8162 build_fold_addr_expr (avar));
8163 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8164 }
8165
8166 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8167 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8168 size_one_node));
8169 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8170 gimple_seq_add_stmt (end, g);
8171 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8172 if (code == OMP_FOR || code == OMP_SECTIONS)
8173 {
8174 enum built_in_function bfn
8175 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8176 t = builtin_decl_explicit (bfn);
8177 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8178 tree arg;
8179 if (cancellable)
8180 {
8181 arg = create_tmp_var (c_bool_type);
8182 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8183 cancellable));
8184 }
8185 else
8186 arg = build_int_cst (c_bool_type, 0);
8187 g = gimple_build_call (t, 1, arg);
8188 }
8189 else
8190 {
8191 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8192 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8193 }
8194 gimple_seq_add_stmt (end, g);
8195 t = build_constructor (atype, NULL);
8196 TREE_THIS_VOLATILE (t) = 1;
8197 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8198 }
8199
8200 /* Expand code for an OpenMP taskgroup directive. */
8201
8202 static void
8203 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8204 {
8205 gimple *stmt = gsi_stmt (*gsi_p);
8206 gcall *x;
8207 gbind *bind;
8208 gimple_seq dseq = NULL;
8209 tree block = make_node (BLOCK);
8210
8211 bind = gimple_build_bind (NULL, NULL, block);
8212 gsi_replace (gsi_p, bind, true);
8213 gimple_bind_add_stmt (bind, stmt);
8214
8215 push_gimplify_context ();
8216
8217 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8218 0);
8219 gimple_bind_add_stmt (bind, x);
8220
8221 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8222 gimple_omp_taskgroup_clauses (stmt),
8223 gimple_bind_body_ptr (bind), &dseq);
8224
8225 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8226 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8227 gimple_omp_set_body (stmt, NULL);
8228
8229 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8230 gimple_bind_add_seq (bind, dseq);
8231
8232 pop_gimplify_context (bind);
8233
8234 gimple_bind_append_vars (bind, ctx->block_vars);
8235 BLOCK_VARS (block) = ctx->block_vars;
8236 }
8237
8238
8239 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8240
8241 static void
8242 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8243 omp_context *ctx)
8244 {
8245 struct omp_for_data fd;
8246 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8247 return;
8248
8249 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8250 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8251 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8252 if (!fd.ordered)
8253 return;
8254
8255 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8256 tree c = gimple_omp_ordered_clauses (ord_stmt);
8257 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8258 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8259 {
8260 /* Merge depend clauses from multiple adjacent
8261 #pragma omp ordered depend(sink:...) constructs
8262 into one #pragma omp ordered depend(sink:...), so that
8263 we can optimize them together. */
8264 gimple_stmt_iterator gsi = *gsi_p;
8265 gsi_next (&gsi);
8266 while (!gsi_end_p (gsi))
8267 {
8268 gimple *stmt = gsi_stmt (gsi);
8269 if (is_gimple_debug (stmt)
8270 || gimple_code (stmt) == GIMPLE_NOP)
8271 {
8272 gsi_next (&gsi);
8273 continue;
8274 }
8275 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8276 break;
8277 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8278 c = gimple_omp_ordered_clauses (ord_stmt2);
8279 if (c == NULL_TREE
8280 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8281 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8282 break;
8283 while (*list_p)
8284 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8285 *list_p = c;
8286 gsi_remove (&gsi, true);
8287 }
8288 }
8289
8290 /* Canonicalize sink dependence clauses into one folded clause if
8291 possible.
8292
8293 The basic algorithm is to create a sink vector whose first
8294 element is the GCD of all the first elements, and whose remaining
8295 elements are the minimum of the subsequent columns.
8296
8297 We ignore dependence vectors whose first element is zero because
8298 such dependencies are known to be executed by the same thread.
8299
8300 We take into account the direction of the loop, so a minimum
8301 becomes a maximum if the loop is iterating forwards. We also
8302 ignore sink clauses where the loop direction is unknown, or where
8303 the offsets are clearly invalid because they are not a multiple
8304 of the loop increment.
8305
8306 For example:
8307
8308 #pragma omp for ordered(2)
8309 for (i=0; i < N; ++i)
8310 for (j=0; j < M; ++j)
8311 {
8312 #pragma omp ordered \
8313 depend(sink:i-8,j-2) \
8314 depend(sink:i,j-1) \ // Completely ignored because i+0.
8315 depend(sink:i-4,j-3) \
8316 depend(sink:i-6,j-4)
8317 #pragma omp ordered depend(source)
8318 }
8319
8320 Folded clause is:
8321
8322 depend(sink:-gcd(8,4,6),-min(2,3,4))
8323 -or-
8324 depend(sink:-2,-2)
8325 */
8326
8327 /* FIXME: Computing GCD's where the first element is zero is
8328 non-trivial in the presence of collapsed loops. Do this later. */
8329 if (fd.collapse > 1)
8330 return;
8331
8332 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8333
8334 /* wide_int is not a POD so it must be default-constructed. */
8335 for (unsigned i = 0; i != 2 * len - 1; ++i)
8336 new (static_cast<void*>(folded_deps + i)) wide_int ();
8337
8338 tree folded_dep = NULL_TREE;
8339 /* TRUE if the first dimension's offset is negative. */
8340 bool neg_offset_p = false;
8341
8342 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8343 unsigned int i;
8344 while ((c = *list_p) != NULL)
8345 {
8346 bool remove = false;
8347
8348 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8349 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8350 goto next_ordered_clause;
8351
8352 tree vec;
8353 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8354 vec && TREE_CODE (vec) == TREE_LIST;
8355 vec = TREE_CHAIN (vec), ++i)
8356 {
8357 gcc_assert (i < len);
8358
8359 /* omp_extract_for_data has canonicalized the condition. */
8360 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8361 || fd.loops[i].cond_code == GT_EXPR);
8362 bool forward = fd.loops[i].cond_code == LT_EXPR;
8363 bool maybe_lexically_later = true;
8364
8365 /* While the committee makes up its mind, bail if we have any
8366 non-constant steps. */
8367 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8368 goto lower_omp_ordered_ret;
8369
8370 tree itype = TREE_TYPE (TREE_VALUE (vec));
8371 if (POINTER_TYPE_P (itype))
8372 itype = sizetype;
8373 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8374 TYPE_PRECISION (itype),
8375 TYPE_SIGN (itype));
8376
8377 /* Ignore invalid offsets that are not multiples of the step. */
8378 if (!wi::multiple_of_p (wi::abs (offset),
8379 wi::abs (wi::to_wide (fd.loops[i].step)),
8380 UNSIGNED))
8381 {
8382 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8383 "ignoring sink clause with offset that is not "
8384 "a multiple of the loop step");
8385 remove = true;
8386 goto next_ordered_clause;
8387 }
8388
8389 /* Calculate the first dimension. The first dimension of
8390 the folded dependency vector is the GCD of the first
8391 elements, while ignoring any first elements whose offset
8392 is 0. */
8393 if (i == 0)
8394 {
8395 /* Ignore dependence vectors whose first dimension is 0. */
8396 if (offset == 0)
8397 {
8398 remove = true;
8399 goto next_ordered_clause;
8400 }
8401 else
8402 {
8403 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8404 {
8405 error_at (OMP_CLAUSE_LOCATION (c),
8406 "first offset must be in opposite direction "
8407 "of loop iterations");
8408 goto lower_omp_ordered_ret;
8409 }
8410 if (forward)
8411 offset = -offset;
8412 neg_offset_p = forward;
8413 /* Initialize the first time around. */
8414 if (folded_dep == NULL_TREE)
8415 {
8416 folded_dep = c;
8417 folded_deps[0] = offset;
8418 }
8419 else
8420 folded_deps[0] = wi::gcd (folded_deps[0],
8421 offset, UNSIGNED);
8422 }
8423 }
8424 /* Calculate minimum for the remaining dimensions. */
8425 else
8426 {
8427 folded_deps[len + i - 1] = offset;
8428 if (folded_dep == c)
8429 folded_deps[i] = offset;
8430 else if (maybe_lexically_later
8431 && !wi::eq_p (folded_deps[i], offset))
8432 {
8433 if (forward ^ wi::gts_p (folded_deps[i], offset))
8434 {
8435 unsigned int j;
8436 folded_dep = c;
8437 for (j = 1; j <= i; j++)
8438 folded_deps[j] = folded_deps[len + j - 1];
8439 }
8440 else
8441 maybe_lexically_later = false;
8442 }
8443 }
8444 }
8445 gcc_assert (i == len);
8446
8447 remove = true;
8448
8449 next_ordered_clause:
8450 if (remove)
8451 *list_p = OMP_CLAUSE_CHAIN (c);
8452 else
8453 list_p = &OMP_CLAUSE_CHAIN (c);
8454 }
8455
8456 if (folded_dep)
8457 {
8458 if (neg_offset_p)
8459 folded_deps[0] = -folded_deps[0];
8460
8461 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8462 if (POINTER_TYPE_P (itype))
8463 itype = sizetype;
8464
8465 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8466 = wide_int_to_tree (itype, folded_deps[0]);
8467 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8468 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8469 }
8470
8471 lower_omp_ordered_ret:
8472
8473 /* Ordered without clauses is #pragma omp threads, while we want
8474 a nop instead if we remove all clauses. */
8475 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8476 gsi_replace (gsi_p, gimple_build_nop (), true);
8477 }
8478
8479
8480 /* Expand code for an OpenMP ordered directive. */
8481
8482 static void
8483 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8484 {
8485 tree block;
8486 gimple *stmt = gsi_stmt (*gsi_p), *g;
8487 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8488 gcall *x;
8489 gbind *bind;
8490 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8491 OMP_CLAUSE_SIMD);
8492 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8493 loop. */
8494 bool maybe_simt
8495 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8496 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8497 OMP_CLAUSE_THREADS);
8498
8499 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8500 OMP_CLAUSE_DEPEND))
8501 {
8502 /* FIXME: This is needs to be moved to the expansion to verify various
8503 conditions only testable on cfg with dominators computed, and also
8504 all the depend clauses to be merged still might need to be available
8505 for the runtime checks. */
8506 if (0)
8507 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8508 return;
8509 }
8510
8511 push_gimplify_context ();
8512
8513 block = make_node (BLOCK);
8514 bind = gimple_build_bind (NULL, NULL, block);
8515 gsi_replace (gsi_p, bind, true);
8516 gimple_bind_add_stmt (bind, stmt);
8517
8518 if (simd)
8519 {
8520 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8521 build_int_cst (NULL_TREE, threads));
8522 cfun->has_simduid_loops = true;
8523 }
8524 else
8525 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8526 0);
8527 gimple_bind_add_stmt (bind, x);
8528
8529 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8530 if (maybe_simt)
8531 {
8532 counter = create_tmp_var (integer_type_node);
8533 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8534 gimple_call_set_lhs (g, counter);
8535 gimple_bind_add_stmt (bind, g);
8536
8537 body = create_artificial_label (UNKNOWN_LOCATION);
8538 test = create_artificial_label (UNKNOWN_LOCATION);
8539 gimple_bind_add_stmt (bind, gimple_build_label (body));
8540
8541 tree simt_pred = create_tmp_var (integer_type_node);
8542 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8543 gimple_call_set_lhs (g, simt_pred);
8544 gimple_bind_add_stmt (bind, g);
8545
8546 tree t = create_artificial_label (UNKNOWN_LOCATION);
8547 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8548 gimple_bind_add_stmt (bind, g);
8549
8550 gimple_bind_add_stmt (bind, gimple_build_label (t));
8551 }
8552 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8553 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8554 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8555 gimple_omp_set_body (stmt, NULL);
8556
8557 if (maybe_simt)
8558 {
8559 gimple_bind_add_stmt (bind, gimple_build_label (test));
8560 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8561 gimple_bind_add_stmt (bind, g);
8562
8563 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8564 tree nonneg = create_tmp_var (integer_type_node);
8565 gimple_seq tseq = NULL;
8566 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8567 gimple_bind_add_seq (bind, tseq);
8568
8569 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8570 gimple_call_set_lhs (g, nonneg);
8571 gimple_bind_add_stmt (bind, g);
8572
8573 tree end = create_artificial_label (UNKNOWN_LOCATION);
8574 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8575 gimple_bind_add_stmt (bind, g);
8576
8577 gimple_bind_add_stmt (bind, gimple_build_label (end));
8578 }
8579 if (simd)
8580 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8581 build_int_cst (NULL_TREE, threads));
8582 else
8583 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8584 0);
8585 gimple_bind_add_stmt (bind, x);
8586
8587 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8588
8589 pop_gimplify_context (bind);
8590
8591 gimple_bind_append_vars (bind, ctx->block_vars);
8592 BLOCK_VARS (block) = gimple_bind_vars (bind);
8593 }
8594
8595
8596 /* Expand code for an OpenMP scan directive and the structured block
8597 before the scan directive. */
8598
8599 static void
8600 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8601 {
8602 gimple *stmt = gsi_stmt (*gsi_p);
8603 bool has_clauses
8604 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8605 tree lane = NULL_TREE;
8606 gimple_seq before = NULL;
8607 omp_context *octx = ctx->outer;
8608 gcc_assert (octx);
8609 if (octx->scan_exclusive && !has_clauses)
8610 {
8611 gimple_stmt_iterator gsi2 = *gsi_p;
8612 gsi_next (&gsi2);
8613 gimple *stmt2 = gsi_stmt (gsi2);
8614 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8615 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8616 the one with exclusive clause(s), comes first. */
8617 if (stmt2
8618 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8619 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8620 {
8621 gsi_remove (gsi_p, false);
8622 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8623 ctx = maybe_lookup_ctx (stmt2);
8624 gcc_assert (ctx);
8625 lower_omp_scan (gsi_p, ctx);
8626 return;
8627 }
8628 }
8629
8630 bool input_phase = has_clauses ^ octx->scan_inclusive;
8631 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8632 && (gimple_omp_for_kind (octx->stmt) & GF_OMP_FOR_SIMD)
8633 && !gimple_omp_for_combined_into_p (octx->stmt));
8634 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8635 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
8636 && !gimple_omp_for_combined_p (octx->stmt));
8637 if (is_simd)
8638 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
8639 OMP_CLAUSE__SIMDUID_))
8640 {
8641 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
8642 lane = create_tmp_var (unsigned_type_node);
8643 tree t = build_int_cst (integer_type_node,
8644 input_phase ? 1
8645 : octx->scan_inclusive ? 2 : 3);
8646 gimple *g
8647 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
8648 gimple_call_set_lhs (g, lane);
8649 gimple_seq_add_stmt (&before, g);
8650 }
8651
8652 if (is_simd || is_for)
8653 {
8654 for (tree c = gimple_omp_for_clauses (octx->stmt);
8655 c; c = OMP_CLAUSE_CHAIN (c))
8656 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8657 && OMP_CLAUSE_REDUCTION_INSCAN (c))
8658 {
8659 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8660 tree var = OMP_CLAUSE_DECL (c);
8661 tree new_var = lookup_decl (var, octx);
8662 tree val = new_var;
8663 tree var2 = NULL_TREE;
8664 tree var3 = NULL_TREE;
8665 tree var4 = NULL_TREE;
8666 tree lane0 = NULL_TREE;
8667 tree new_vard = new_var;
8668 if (omp_is_reference (var))
8669 {
8670 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8671 val = new_var;
8672 }
8673 if (DECL_HAS_VALUE_EXPR_P (new_vard))
8674 {
8675 val = DECL_VALUE_EXPR (new_vard);
8676 if (new_vard != new_var)
8677 {
8678 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
8679 val = TREE_OPERAND (val, 0);
8680 }
8681 if (TREE_CODE (val) == ARRAY_REF
8682 && VAR_P (TREE_OPERAND (val, 0)))
8683 {
8684 tree v = TREE_OPERAND (val, 0);
8685 if (lookup_attribute ("omp simd array",
8686 DECL_ATTRIBUTES (v)))
8687 {
8688 val = unshare_expr (val);
8689 lane0 = TREE_OPERAND (val, 1);
8690 TREE_OPERAND (val, 1) = lane;
8691 var2 = lookup_decl (v, octx);
8692 if (octx->scan_exclusive)
8693 var4 = lookup_decl (var2, octx);
8694 if (input_phase
8695 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8696 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
8697 if (!input_phase)
8698 {
8699 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
8700 var2, lane, NULL_TREE, NULL_TREE);
8701 TREE_THIS_NOTRAP (var2) = 1;
8702 if (octx->scan_exclusive)
8703 {
8704 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
8705 var4, lane, NULL_TREE,
8706 NULL_TREE);
8707 TREE_THIS_NOTRAP (var4) = 1;
8708 }
8709 }
8710 else
8711 var2 = val;
8712 }
8713 }
8714 gcc_assert (var2);
8715 }
8716 else
8717 {
8718 var2 = build_outer_var_ref (var, octx);
8719 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8720 {
8721 var3 = maybe_lookup_decl (new_vard, octx);
8722 if (var3 == new_vard || var3 == NULL_TREE)
8723 var3 = NULL_TREE;
8724 else if (is_simd && octx->scan_exclusive && !input_phase)
8725 {
8726 var4 = maybe_lookup_decl (var3, octx);
8727 if (var4 == var3 || var4 == NULL_TREE)
8728 {
8729 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
8730 {
8731 var4 = var3;
8732 var3 = NULL_TREE;
8733 }
8734 else
8735 var4 = NULL_TREE;
8736 }
8737 }
8738 }
8739 if (is_simd
8740 && octx->scan_exclusive
8741 && !input_phase
8742 && var4 == NULL_TREE)
8743 var4 = create_tmp_var (TREE_TYPE (val));
8744 }
8745 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8746 {
8747 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8748 if (input_phase)
8749 {
8750 if (var3)
8751 {
8752 /* If we've added a separate identity element
8753 variable, copy it over into val. */
8754 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
8755 var3);
8756 gimplify_and_add (x, &before);
8757 }
8758 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
8759 {
8760 /* Otherwise, assign to it the identity element. */
8761 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
8762 if (is_for)
8763 tseq = copy_gimple_seq_and_replace_locals (tseq);
8764 tree ref = build_outer_var_ref (var, octx);
8765 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8766 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8767 if (x)
8768 {
8769 if (new_vard != new_var)
8770 val = build_fold_addr_expr_loc (clause_loc, val);
8771 SET_DECL_VALUE_EXPR (new_vard, val);
8772 }
8773 SET_DECL_VALUE_EXPR (placeholder, ref);
8774 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8775 lower_omp (&tseq, octx);
8776 if (x)
8777 SET_DECL_VALUE_EXPR (new_vard, x);
8778 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8779 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8780 gimple_seq_add_seq (&before, tseq);
8781 if (is_simd)
8782 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8783 }
8784 }
8785 else if (is_simd)
8786 {
8787 tree x;
8788 if (octx->scan_exclusive)
8789 {
8790 tree v4 = unshare_expr (var4);
8791 tree v2 = unshare_expr (var2);
8792 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
8793 gimplify_and_add (x, &before);
8794 }
8795 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
8796 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8797 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8798 tree vexpr = val;
8799 if (x && new_vard != new_var)
8800 vexpr = build_fold_addr_expr_loc (clause_loc, val);
8801 if (x)
8802 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8803 SET_DECL_VALUE_EXPR (placeholder, var2);
8804 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8805 lower_omp (&tseq, octx);
8806 gimple_seq_add_seq (&before, tseq);
8807 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8808 if (x)
8809 SET_DECL_VALUE_EXPR (new_vard, x);
8810 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8811 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8812 if (octx->scan_inclusive)
8813 {
8814 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8815 var2);
8816 gimplify_and_add (x, &before);
8817 }
8818 else if (lane0 == NULL_TREE)
8819 {
8820 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8821 var4);
8822 gimplify_and_add (x, &before);
8823 }
8824 }
8825 }
8826 else
8827 {
8828 if (input_phase)
8829 {
8830 /* input phase. Set val to initializer before
8831 the body. */
8832 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
8833 gimplify_assign (val, x, &before);
8834 }
8835 else if (is_simd)
8836 {
8837 /* scan phase. */
8838 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
8839 if (code == MINUS_EXPR)
8840 code = PLUS_EXPR;
8841
8842 tree x = build2 (code, TREE_TYPE (var2),
8843 unshare_expr (var2), unshare_expr (val));
8844 if (octx->scan_inclusive)
8845 {
8846 gimplify_assign (unshare_expr (var2), x, &before);
8847 gimplify_assign (val, var2, &before);
8848 }
8849 else
8850 {
8851 gimplify_assign (unshare_expr (var4),
8852 unshare_expr (var2), &before);
8853 gimplify_assign (var2, x, &before);
8854 if (lane0 == NULL_TREE)
8855 gimplify_assign (val, var4, &before);
8856 }
8857 }
8858 }
8859 if (octx->scan_exclusive && !input_phase && lane0)
8860 {
8861 tree vexpr = unshare_expr (var4);
8862 TREE_OPERAND (vexpr, 1) = lane0;
8863 if (new_vard != new_var)
8864 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
8865 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8866 }
8867 }
8868 }
8869 else if (has_clauses)
8870 sorry_at (gimple_location (stmt),
8871 "%<#pragma omp scan%> not supported yet");
8872 if (!is_for)
8873 {
8874 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
8875 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
8876 gsi_replace (gsi_p, gimple_build_nop (), true);
8877 return;
8878 }
8879 lower_omp (gimple_omp_body_ptr (stmt), octx);
8880 if (before)
8881 {
8882 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
8883 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
8884 }
8885 }
8886
8887
8888 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8889 substitution of a couple of function calls. But in the NAMED case,
8890 requires that languages coordinate a symbol name. It is therefore
8891 best put here in common code. */
8892
8893 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
8894
8895 static void
8896 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8897 {
8898 tree block;
8899 tree name, lock, unlock;
8900 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8901 gbind *bind;
8902 location_t loc = gimple_location (stmt);
8903 gimple_seq tbody;
8904
8905 name = gimple_omp_critical_name (stmt);
8906 if (name)
8907 {
8908 tree decl;
8909
8910 if (!critical_name_mutexes)
8911 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
8912
8913 tree *n = critical_name_mutexes->get (name);
8914 if (n == NULL)
8915 {
8916 char *new_str;
8917
8918 decl = create_tmp_var_raw (ptr_type_node);
8919
8920 new_str = ACONCAT ((".gomp_critical_user_",
8921 IDENTIFIER_POINTER (name), NULL));
8922 DECL_NAME (decl) = get_identifier (new_str);
8923 TREE_PUBLIC (decl) = 1;
8924 TREE_STATIC (decl) = 1;
8925 DECL_COMMON (decl) = 1;
8926 DECL_ARTIFICIAL (decl) = 1;
8927 DECL_IGNORED_P (decl) = 1;
8928
8929 varpool_node::finalize_decl (decl);
8930
8931 critical_name_mutexes->put (name, decl);
8932 }
8933 else
8934 decl = *n;
8935
8936 /* If '#pragma omp critical' is inside offloaded region or
8937 inside function marked as offloadable, the symbol must be
8938 marked as offloadable too. */
8939 omp_context *octx;
8940 if (cgraph_node::get (current_function_decl)->offloadable)
8941 varpool_node::get_create (decl)->offloadable = 1;
8942 else
8943 for (octx = ctx->outer; octx; octx = octx->outer)
8944 if (is_gimple_omp_offloaded (octx->stmt))
8945 {
8946 varpool_node::get_create (decl)->offloadable = 1;
8947 break;
8948 }
8949
8950 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8951 lock = build_call_expr_loc (loc, lock, 1,
8952 build_fold_addr_expr_loc (loc, decl));
8953
8954 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8955 unlock = build_call_expr_loc (loc, unlock, 1,
8956 build_fold_addr_expr_loc (loc, decl));
8957 }
8958 else
8959 {
8960 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8961 lock = build_call_expr_loc (loc, lock, 0);
8962
8963 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8964 unlock = build_call_expr_loc (loc, unlock, 0);
8965 }
8966
8967 push_gimplify_context ();
8968
8969 block = make_node (BLOCK);
8970 bind = gimple_build_bind (NULL, NULL, block);
8971 gsi_replace (gsi_p, bind, true);
8972 gimple_bind_add_stmt (bind, stmt);
8973
8974 tbody = gimple_bind_body (bind);
8975 gimplify_and_add (lock, &tbody);
8976 gimple_bind_set_body (bind, tbody);
8977
8978 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8979 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8980 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8981 gimple_omp_set_body (stmt, NULL);
8982
8983 tbody = gimple_bind_body (bind);
8984 gimplify_and_add (unlock, &tbody);
8985 gimple_bind_set_body (bind, tbody);
8986
8987 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8988
8989 pop_gimplify_context (bind);
8990 gimple_bind_append_vars (bind, ctx->block_vars);
8991 BLOCK_VARS (block) = gimple_bind_vars (bind);
8992 }
8993
8994 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8995 for a lastprivate clause. Given a loop control predicate of (V
8996 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8997 is appended to *DLIST, iterator initialization is appended to
8998 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
8999 to be emitted in a critical section. */
9000
9001 static void
9002 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9003 gimple_seq *dlist, gimple_seq *clist,
9004 struct omp_context *ctx)
9005 {
9006 tree clauses, cond, vinit;
9007 enum tree_code cond_code;
9008 gimple_seq stmts;
9009
9010 cond_code = fd->loop.cond_code;
9011 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9012
9013 /* When possible, use a strict equality expression. This can let VRP
9014 type optimizations deduce the value and remove a copy. */
9015 if (tree_fits_shwi_p (fd->loop.step))
9016 {
9017 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9018 if (step == 1 || step == -1)
9019 cond_code = EQ_EXPR;
9020 }
9021
9022 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9023 || gimple_omp_for_grid_phony (fd->for_stmt))
9024 cond = omp_grid_lastprivate_predicate (fd);
9025 else
9026 {
9027 tree n2 = fd->loop.n2;
9028 if (fd->collapse > 1
9029 && TREE_CODE (n2) != INTEGER_CST
9030 && gimple_omp_for_combined_into_p (fd->for_stmt))
9031 {
9032 struct omp_context *taskreg_ctx = NULL;
9033 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9034 {
9035 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9036 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9037 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9038 {
9039 if (gimple_omp_for_combined_into_p (gfor))
9040 {
9041 gcc_assert (ctx->outer->outer
9042 && is_parallel_ctx (ctx->outer->outer));
9043 taskreg_ctx = ctx->outer->outer;
9044 }
9045 else
9046 {
9047 struct omp_for_data outer_fd;
9048 omp_extract_for_data (gfor, &outer_fd, NULL);
9049 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9050 }
9051 }
9052 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9053 taskreg_ctx = ctx->outer->outer;
9054 }
9055 else if (is_taskreg_ctx (ctx->outer))
9056 taskreg_ctx = ctx->outer;
9057 if (taskreg_ctx)
9058 {
9059 int i;
9060 tree taskreg_clauses
9061 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9062 tree innerc = omp_find_clause (taskreg_clauses,
9063 OMP_CLAUSE__LOOPTEMP_);
9064 gcc_assert (innerc);
9065 for (i = 0; i < fd->collapse; i++)
9066 {
9067 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9068 OMP_CLAUSE__LOOPTEMP_);
9069 gcc_assert (innerc);
9070 }
9071 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9072 OMP_CLAUSE__LOOPTEMP_);
9073 if (innerc)
9074 n2 = fold_convert (TREE_TYPE (n2),
9075 lookup_decl (OMP_CLAUSE_DECL (innerc),
9076 taskreg_ctx));
9077 }
9078 }
9079 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9080 }
9081
9082 clauses = gimple_omp_for_clauses (fd->for_stmt);
9083 stmts = NULL;
9084 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9085 if (!gimple_seq_empty_p (stmts))
9086 {
9087 gimple_seq_add_seq (&stmts, *dlist);
9088 *dlist = stmts;
9089
9090 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9091 vinit = fd->loop.n1;
9092 if (cond_code == EQ_EXPR
9093 && tree_fits_shwi_p (fd->loop.n2)
9094 && ! integer_zerop (fd->loop.n2))
9095 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9096 else
9097 vinit = unshare_expr (vinit);
9098
9099 /* Initialize the iterator variable, so that threads that don't execute
9100 any iterations don't execute the lastprivate clauses by accident. */
9101 gimplify_assign (fd->loop.v, vinit, body_p);
9102 }
9103 }
9104
9105 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9106
9107 static tree
9108 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9109 struct walk_stmt_info *wi)
9110 {
9111 gimple *stmt = gsi_stmt (*gsi_p);
9112
9113 *handled_ops_p = true;
9114 switch (gimple_code (stmt))
9115 {
9116 WALK_SUBSTMTS;
9117
9118 case GIMPLE_OMP_SCAN:
9119 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9120 return integer_zero_node;
9121 default:
9122 break;
9123 }
9124 return NULL;
9125 }
9126
9127 /* Helper function for lower_omp_for, add transformations for a worksharing
9128 loop with scan directives inside of it.
9129 For worksharing loop not combined with simd, transform:
9130 #pragma omp for reduction(inscan,+:r) private(i)
9131 for (i = 0; i < n; i = i + 1)
9132 {
9133 {
9134 update (r);
9135 }
9136 #pragma omp scan inclusive(r)
9137 {
9138 use (r);
9139 }
9140 }
9141
9142 into two worksharing loops + code to merge results:
9143
9144 num_threads = omp_get_num_threads ();
9145 thread_num = omp_get_thread_num ();
9146 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9147 <D.2099>:
9148 var2 = r;
9149 goto <D.2101>;
9150 <D.2100>:
9151 // For UDRs this is UDR init, or if ctors are needed, copy from
9152 // var3 that has been constructed to contain the neutral element.
9153 var2 = 0;
9154 <D.2101>:
9155 ivar = 0;
9156 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9157 // a shared array with num_threads elements and rprivb to a local array
9158 // number of elements equal to the number of (contiguous) iterations the
9159 // current thread will perform. controlb and controlp variables are
9160 // temporaries to handle deallocation of rprivb at the end of second
9161 // GOMP_FOR.
9162 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9163 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9164 for (i = 0; i < n; i = i + 1)
9165 {
9166 {
9167 // For UDRs this is UDR init or copy from var3.
9168 r = 0;
9169 // This is the input phase from user code.
9170 update (r);
9171 }
9172 {
9173 // For UDRs this is UDR merge.
9174 var2 = var2 + r;
9175 // Rather than handing it over to the user, save to local thread's
9176 // array.
9177 rprivb[ivar] = var2;
9178 // For exclusive scan, the above two statements are swapped.
9179 ivar = ivar + 1;
9180 }
9181 }
9182 // And remember the final value from this thread's into the shared
9183 // rpriva array.
9184 rpriva[(sizetype) thread_num] = var2;
9185 // If more than one thread, compute using Work-Efficient prefix sum
9186 // the inclusive parallel scan of the rpriva array.
9187 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9188 <D.2102>:
9189 GOMP_barrier ();
9190 down = 0;
9191 k = 1;
9192 num_threadsu = (unsigned int) num_threads;
9193 thread_numup1 = (unsigned int) thread_num + 1;
9194 <D.2108>:
9195 twok = k << 1;
9196 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9197 <D.2110>:
9198 down = 4294967295;
9199 k = k >> 1;
9200 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9201 <D.2112>:
9202 k = k >> 1;
9203 <D.2111>:
9204 twok = k << 1;
9205 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9206 mul = REALPART_EXPR <cplx>;
9207 ovf = IMAGPART_EXPR <cplx>;
9208 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9209 <D.2116>:
9210 andv = k & down;
9211 andvm1 = andv + 4294967295;
9212 l = mul + andvm1;
9213 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9214 <D.2120>:
9215 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9216 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9217 rpriva[l] = rpriva[l - k] + rpriva[l];
9218 <D.2117>:
9219 if (down == 0) goto <D.2121>; else goto <D.2122>;
9220 <D.2121>:
9221 k = k << 1;
9222 goto <D.2123>;
9223 <D.2122>:
9224 k = k >> 1;
9225 <D.2123>:
9226 GOMP_barrier ();
9227 if (k != 0) goto <D.2108>; else goto <D.2103>;
9228 <D.2103>:
9229 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9230 <D.2124>:
9231 // For UDRs this is UDR init or copy from var3.
9232 var2 = 0;
9233 goto <D.2126>;
9234 <D.2125>:
9235 var2 = rpriva[thread_num - 1];
9236 <D.2126>:
9237 ivar = 0;
9238 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9239 reduction(inscan,+:r) private(i)
9240 for (i = 0; i < n; i = i + 1)
9241 {
9242 {
9243 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9244 r = var2 + rprivb[ivar];
9245 }
9246 {
9247 // This is the scan phase from user code.
9248 use (r);
9249 // Plus a bump of the iterator.
9250 ivar = ivar + 1;
9251 }
9252 } */
9253
9254 static void
9255 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9256 struct omp_for_data *fd, omp_context *ctx)
9257 {
9258 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9259
9260 gimple_seq body = gimple_omp_body (stmt);
9261 gimple_stmt_iterator input1_gsi = gsi_none ();
9262 struct walk_stmt_info wi;
9263 memset (&wi, 0, sizeof (wi));
9264 wi.val_only = true;
9265 wi.info = (void *) &input1_gsi;
9266 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9267 gcc_assert (!gsi_end_p (input1_gsi));
9268
9269 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9270 gimple_stmt_iterator gsi = input1_gsi;
9271 gsi_next (&gsi);
9272 gimple_stmt_iterator scan1_gsi = gsi;
9273 gimple *scan_stmt1 = gsi_stmt (gsi);
9274 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9275
9276 gimple_seq input_body = gimple_omp_body (input_stmt1);
9277 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9278 gimple_omp_set_body (input_stmt1, NULL);
9279 gimple_omp_set_body (scan_stmt1, NULL);
9280 gimple_omp_set_body (stmt, NULL);
9281
9282 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9283 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9284 gimple_omp_set_body (stmt, body);
9285 gimple_omp_set_body (input_stmt1, input_body);
9286
9287 gimple_stmt_iterator input2_gsi = gsi_none ();
9288 memset (&wi, 0, sizeof (wi));
9289 wi.val_only = true;
9290 wi.info = (void *) &input2_gsi;
9291 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9292 gcc_assert (!gsi_end_p (input2_gsi));
9293
9294 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9295 gsi = input2_gsi;
9296 gsi_next (&gsi);
9297 gimple_stmt_iterator scan2_gsi = gsi;
9298 gimple *scan_stmt2 = gsi_stmt (gsi);
9299 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9300 gimple_omp_set_body (scan_stmt2, scan_body);
9301
9302 tree num_threads = create_tmp_var (integer_type_node);
9303 tree thread_num = create_tmp_var (integer_type_node);
9304 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9305 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9306 gimple *g = gimple_build_call (nthreads_decl, 0);
9307 gimple_call_set_lhs (g, num_threads);
9308 gimple_seq_add_stmt (body_p, g);
9309 g = gimple_build_call (threadnum_decl, 0);
9310 gimple_call_set_lhs (g, thread_num);
9311 gimple_seq_add_stmt (body_p, g);
9312
9313 tree ivar = create_tmp_var (sizetype);
9314 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9315 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9316 tree k = create_tmp_var (unsigned_type_node);
9317 tree l = create_tmp_var (unsigned_type_node);
9318
9319 gimple_seq clist = NULL, mdlist = NULL;
9320 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9321 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9322 gimple_seq scan1_list = NULL, input2_list = NULL;
9323 gimple_seq last_list = NULL, reduc_list = NULL;
9324 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9325 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9326 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9327 {
9328 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9329 tree var = OMP_CLAUSE_DECL (c);
9330 tree new_var = lookup_decl (var, ctx);
9331 tree var3 = NULL_TREE;
9332 tree new_vard = new_var;
9333 if (omp_is_reference (var))
9334 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9335 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9336 {
9337 var3 = maybe_lookup_decl (new_vard, ctx);
9338 if (var3 == new_vard)
9339 var3 = NULL_TREE;
9340 }
9341
9342 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9343 tree rpriva = create_tmp_var (ptype);
9344 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9345 OMP_CLAUSE_DECL (nc) = rpriva;
9346 *cp1 = nc;
9347 cp1 = &OMP_CLAUSE_CHAIN (nc);
9348
9349 tree rprivb = create_tmp_var (ptype);
9350 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9351 OMP_CLAUSE_DECL (nc) = rprivb;
9352 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9353 *cp1 = nc;
9354 cp1 = &OMP_CLAUSE_CHAIN (nc);
9355
9356 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9357 if (new_vard != new_var)
9358 TREE_ADDRESSABLE (var2) = 1;
9359 gimple_add_tmp_var (var2);
9360
9361 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9362 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9363 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9364 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9365 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9366
9367 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9368 thread_num, integer_minus_one_node);
9369 x = fold_convert_loc (clause_loc, sizetype, x);
9370 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9371 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9372 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9373 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9374
9375 x = fold_convert_loc (clause_loc, sizetype, l);
9376 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9377 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9378 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9379 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9380
9381 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9382 x = fold_convert_loc (clause_loc, sizetype, x);
9383 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9384 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9385 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9386 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9387
9388 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9389 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9390 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9391 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9392
9393 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9394 {
9395 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9396 tree val = var2;
9397
9398 x = lang_hooks.decls.omp_clause_default_ctor
9399 (c, var2, build_outer_var_ref (var, ctx));
9400 if (x)
9401 gimplify_and_add (x, &clist);
9402
9403 x = build_outer_var_ref (var, ctx);
9404 x = lang_hooks.decls.omp_clause_assign_op (c, var2, x);
9405 gimplify_and_add (x, &thr01_list);
9406
9407 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9408 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9409 if (var3)
9410 {
9411 x = lang_hooks.decls.omp_clause_assign_op (c, var2, var3);
9412 gimplify_and_add (x, &thrn1_list);
9413 x = lang_hooks.decls.omp_clause_assign_op (c, var2, var3);
9414 gimplify_and_add (x, &thr02_list);
9415 }
9416 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9417 {
9418 /* Otherwise, assign to it the identity element. */
9419 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9420 tseq = copy_gimple_seq_and_replace_locals (tseq);
9421
9422 if (new_vard != new_var)
9423 val = build_fold_addr_expr_loc (clause_loc, val);
9424 SET_DECL_VALUE_EXPR (new_vard, val);
9425 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9426 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9427 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9428 lower_omp (&tseq, ctx);
9429 gimple_seq_add_seq (&thrn1_list, tseq);
9430 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9431 lower_omp (&tseq, ctx);
9432 gimple_seq_add_seq (&thr02_list, tseq);
9433 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9434 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9435 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9436 if (y)
9437 SET_DECL_VALUE_EXPR (new_vard, y);
9438 else
9439 {
9440 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9441 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9442 }
9443 }
9444
9445 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivam1_ref);
9446 gimplify_and_add (x, &thrn2_list);
9447
9448 if (ctx->scan_exclusive)
9449 {
9450 x = unshare_expr (rprivb_ref);
9451 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9452 gimplify_and_add (x, &scan1_list);
9453 }
9454
9455 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9456 tseq = copy_gimple_seq_and_replace_locals (tseq);
9457 SET_DECL_VALUE_EXPR (placeholder, var2);
9458 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9459 lower_omp (&tseq, ctx);
9460 gimple_seq_add_seq (&scan1_list, tseq);
9461
9462 if (ctx->scan_inclusive)
9463 {
9464 x = unshare_expr (rprivb_ref);
9465 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9466 gimplify_and_add (x, &scan1_list);
9467 }
9468
9469 x = unshare_expr (rpriva_ref);
9470 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9471 gimplify_and_add (x, &mdlist);
9472
9473 x = unshare_expr (new_var);
9474 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9475 gimplify_and_add (x, &input2_list);
9476
9477 val = rprivb_ref;
9478 if (new_vard != new_var)
9479 val = build_fold_addr_expr_loc (clause_loc, val);
9480
9481 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9482 tseq = copy_gimple_seq_and_replace_locals (tseq);
9483 SET_DECL_VALUE_EXPR (new_vard, val);
9484 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9485 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9486 lower_omp (&tseq, ctx);
9487 if (y)
9488 SET_DECL_VALUE_EXPR (new_vard, y);
9489 else
9490 {
9491 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9492 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9493 }
9494 SET_DECL_VALUE_EXPR (placeholder, new_var);
9495 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9496 lower_omp (&tseq, ctx);
9497 gimple_seq_add_seq (&input2_list, tseq);
9498
9499 x = build_outer_var_ref (var, ctx);
9500 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9501 gimplify_and_add (x, &last_list);
9502
9503 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9504 gimplify_and_add (x, &reduc_list);
9505 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9506 tseq = copy_gimple_seq_and_replace_locals (tseq);
9507 val = rprival_ref;
9508 if (new_vard != new_var)
9509 val = build_fold_addr_expr_loc (clause_loc, val);
9510 SET_DECL_VALUE_EXPR (new_vard, val);
9511 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9512 SET_DECL_VALUE_EXPR (placeholder, var2);
9513 lower_omp (&tseq, ctx);
9514 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9515 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9516 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9517 if (y)
9518 SET_DECL_VALUE_EXPR (new_vard, y);
9519 else
9520 {
9521 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9522 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9523 }
9524 gimple_seq_add_seq (&reduc_list, tseq);
9525 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9526 gimplify_and_add (x, &reduc_list);
9527
9528 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9529 if (x)
9530 gimplify_and_add (x, dlist);
9531 }
9532 else
9533 {
9534 x = build_outer_var_ref (var, ctx);
9535 gimplify_assign (var2, x, &thr01_list);
9536
9537 x = omp_reduction_init (c, TREE_TYPE (new_var));
9538 gimplify_assign (var2, unshare_expr (x), &thrn1_list);
9539 gimplify_assign (var2, x, &thr02_list);
9540
9541 gimplify_assign (var2, rprivam1_ref, &thrn2_list);
9542
9543 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9544 if (code == MINUS_EXPR)
9545 code = PLUS_EXPR;
9546
9547 if (ctx->scan_exclusive)
9548 gimplify_assign (unshare_expr (rprivb_ref), var2, &scan1_list);
9549 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
9550 gimplify_assign (var2, x, &scan1_list);
9551 if (ctx->scan_inclusive)
9552 gimplify_assign (unshare_expr (rprivb_ref), var2, &scan1_list);
9553
9554 gimplify_assign (unshare_expr (rpriva_ref), var2, &mdlist);
9555
9556 x = build2 (code, TREE_TYPE (new_var), var2, rprivb_ref);
9557 gimplify_assign (new_var, x, &input2_list);
9558
9559 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
9560 &last_list);
9561
9562 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
9563 unshare_expr (rprival_ref));
9564 gimplify_assign (rprival_ref, x, &reduc_list);
9565 }
9566 }
9567
9568 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9569 gimple_seq_add_stmt (&scan1_list, g);
9570 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9571 gimple_seq_add_stmt (gimple_omp_body_ptr (scan_stmt2), g);
9572
9573 tree controlb = create_tmp_var (boolean_type_node);
9574 tree controlp = create_tmp_var (ptr_type_node);
9575 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9576 OMP_CLAUSE_DECL (nc) = controlb;
9577 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9578 *cp1 = nc;
9579 cp1 = &OMP_CLAUSE_CHAIN (nc);
9580 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9581 OMP_CLAUSE_DECL (nc) = controlp;
9582 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9583 *cp1 = nc;
9584 cp1 = &OMP_CLAUSE_CHAIN (nc);
9585 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9586 OMP_CLAUSE_DECL (nc) = controlb;
9587 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9588 *cp2 = nc;
9589 cp2 = &OMP_CLAUSE_CHAIN (nc);
9590 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9591 OMP_CLAUSE_DECL (nc) = controlp;
9592 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9593 *cp2 = nc;
9594 cp2 = &OMP_CLAUSE_CHAIN (nc);
9595
9596 *cp1 = gimple_omp_for_clauses (stmt);
9597 gimple_omp_for_set_clauses (stmt, new_clauses1);
9598 *cp2 = gimple_omp_for_clauses (new_stmt);
9599 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
9600
9601 gimple_omp_set_body (scan_stmt1, scan1_list);
9602 gimple_omp_set_body (input_stmt2, input2_list);
9603
9604 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
9605 GSI_SAME_STMT);
9606 gsi_remove (&input1_gsi, true);
9607 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
9608 GSI_SAME_STMT);
9609 gsi_remove (&scan1_gsi, true);
9610 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
9611 GSI_SAME_STMT);
9612 gsi_remove (&input2_gsi, true);
9613 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
9614 GSI_SAME_STMT);
9615 gsi_remove (&scan2_gsi, true);
9616
9617 gimple_seq_add_seq (body_p, clist);
9618
9619 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9620 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9621 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9622 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
9623 gimple_seq_add_stmt (body_p, g);
9624 g = gimple_build_label (lab1);
9625 gimple_seq_add_stmt (body_p, g);
9626 gimple_seq_add_seq (body_p, thr01_list);
9627 g = gimple_build_goto (lab3);
9628 gimple_seq_add_stmt (body_p, g);
9629 g = gimple_build_label (lab2);
9630 gimple_seq_add_stmt (body_p, g);
9631 gimple_seq_add_seq (body_p, thrn1_list);
9632 g = gimple_build_label (lab3);
9633 gimple_seq_add_stmt (body_p, g);
9634
9635 g = gimple_build_assign (ivar, size_zero_node);
9636 gimple_seq_add_stmt (body_p, g);
9637
9638 gimple_seq_add_stmt (body_p, stmt);
9639 gimple_seq_add_seq (body_p, body);
9640 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
9641 fd->loop.v));
9642
9643 g = gimple_build_omp_return (true);
9644 gimple_seq_add_stmt (body_p, g);
9645 gimple_seq_add_seq (body_p, mdlist);
9646
9647 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9648 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9649 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
9650 gimple_seq_add_stmt (body_p, g);
9651 g = gimple_build_label (lab1);
9652 gimple_seq_add_stmt (body_p, g);
9653
9654 g = omp_build_barrier (NULL);
9655 gimple_seq_add_stmt (body_p, g);
9656
9657 tree down = create_tmp_var (unsigned_type_node);
9658 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
9659 gimple_seq_add_stmt (body_p, g);
9660
9661 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
9662 gimple_seq_add_stmt (body_p, g);
9663
9664 tree num_threadsu = create_tmp_var (unsigned_type_node);
9665 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
9666 gimple_seq_add_stmt (body_p, g);
9667
9668 tree thread_numu = create_tmp_var (unsigned_type_node);
9669 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
9670 gimple_seq_add_stmt (body_p, g);
9671
9672 tree thread_nump1 = create_tmp_var (unsigned_type_node);
9673 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
9674 build_int_cst (unsigned_type_node, 1));
9675 gimple_seq_add_stmt (body_p, g);
9676
9677 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9678 g = gimple_build_label (lab3);
9679 gimple_seq_add_stmt (body_p, g);
9680
9681 tree twok = create_tmp_var (unsigned_type_node);
9682 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9683 gimple_seq_add_stmt (body_p, g);
9684
9685 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9686 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9687 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9688 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
9689 gimple_seq_add_stmt (body_p, g);
9690 g = gimple_build_label (lab4);
9691 gimple_seq_add_stmt (body_p, g);
9692 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
9693 gimple_seq_add_stmt (body_p, g);
9694 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9695 gimple_seq_add_stmt (body_p, g);
9696
9697 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
9698 gimple_seq_add_stmt (body_p, g);
9699 g = gimple_build_label (lab6);
9700 gimple_seq_add_stmt (body_p, g);
9701
9702 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9703 gimple_seq_add_stmt (body_p, g);
9704
9705 g = gimple_build_label (lab5);
9706 gimple_seq_add_stmt (body_p, g);
9707
9708 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9709 gimple_seq_add_stmt (body_p, g);
9710
9711 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
9712 DECL_GIMPLE_REG_P (cplx) = 1;
9713 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
9714 gimple_call_set_lhs (g, cplx);
9715 gimple_seq_add_stmt (body_p, g);
9716 tree mul = create_tmp_var (unsigned_type_node);
9717 g = gimple_build_assign (mul, REALPART_EXPR,
9718 build1 (REALPART_EXPR, unsigned_type_node, cplx));
9719 gimple_seq_add_stmt (body_p, g);
9720 tree ovf = create_tmp_var (unsigned_type_node);
9721 g = gimple_build_assign (ovf, IMAGPART_EXPR,
9722 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
9723 gimple_seq_add_stmt (body_p, g);
9724
9725 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
9726 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
9727 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
9728 lab7, lab8);
9729 gimple_seq_add_stmt (body_p, g);
9730 g = gimple_build_label (lab7);
9731 gimple_seq_add_stmt (body_p, g);
9732
9733 tree andv = create_tmp_var (unsigned_type_node);
9734 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
9735 gimple_seq_add_stmt (body_p, g);
9736 tree andvm1 = create_tmp_var (unsigned_type_node);
9737 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
9738 build_minus_one_cst (unsigned_type_node));
9739 gimple_seq_add_stmt (body_p, g);
9740
9741 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
9742 gimple_seq_add_stmt (body_p, g);
9743
9744 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
9745 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
9746 gimple_seq_add_stmt (body_p, g);
9747 g = gimple_build_label (lab9);
9748 gimple_seq_add_stmt (body_p, g);
9749 gimple_seq_add_seq (body_p, reduc_list);
9750 g = gimple_build_label (lab8);
9751 gimple_seq_add_stmt (body_p, g);
9752
9753 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
9754 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
9755 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
9756 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
9757 lab10, lab11);
9758 gimple_seq_add_stmt (body_p, g);
9759 g = gimple_build_label (lab10);
9760 gimple_seq_add_stmt (body_p, g);
9761 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
9762 gimple_seq_add_stmt (body_p, g);
9763 g = gimple_build_goto (lab12);
9764 gimple_seq_add_stmt (body_p, g);
9765 g = gimple_build_label (lab11);
9766 gimple_seq_add_stmt (body_p, g);
9767 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9768 gimple_seq_add_stmt (body_p, g);
9769 g = gimple_build_label (lab12);
9770 gimple_seq_add_stmt (body_p, g);
9771
9772 g = omp_build_barrier (NULL);
9773 gimple_seq_add_stmt (body_p, g);
9774
9775 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
9776 lab3, lab2);
9777 gimple_seq_add_stmt (body_p, g);
9778
9779 g = gimple_build_label (lab2);
9780 gimple_seq_add_stmt (body_p, g);
9781
9782 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9783 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9784 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9785 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
9786 gimple_seq_add_stmt (body_p, g);
9787 g = gimple_build_label (lab1);
9788 gimple_seq_add_stmt (body_p, g);
9789 gimple_seq_add_seq (body_p, thr02_list);
9790 g = gimple_build_goto (lab3);
9791 gimple_seq_add_stmt (body_p, g);
9792 g = gimple_build_label (lab2);
9793 gimple_seq_add_stmt (body_p, g);
9794 gimple_seq_add_seq (body_p, thrn2_list);
9795 g = gimple_build_label (lab3);
9796 gimple_seq_add_stmt (body_p, g);
9797
9798 g = gimple_build_assign (ivar, size_zero_node);
9799 gimple_seq_add_stmt (body_p, g);
9800 gimple_seq_add_stmt (body_p, new_stmt);
9801 gimple_seq_add_seq (body_p, new_body);
9802
9803 gimple_seq new_dlist = NULL;
9804 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9805 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9806 tree num_threadsm1 = create_tmp_var (integer_type_node);
9807 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
9808 integer_minus_one_node);
9809 gimple_seq_add_stmt (&new_dlist, g);
9810 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
9811 gimple_seq_add_stmt (&new_dlist, g);
9812 g = gimple_build_label (lab1);
9813 gimple_seq_add_stmt (&new_dlist, g);
9814 gimple_seq_add_seq (&new_dlist, last_list);
9815 g = gimple_build_label (lab2);
9816 gimple_seq_add_stmt (&new_dlist, g);
9817 gimple_seq_add_seq (&new_dlist, *dlist);
9818 *dlist = new_dlist;
9819 }
9820
9821 /* Lower code for an OMP loop directive. */
9822
9823 static void
9824 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9825 {
9826 tree *rhs_p, block;
9827 struct omp_for_data fd, *fdp = NULL;
9828 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
9829 gbind *new_stmt;
9830 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
9831 gimple_seq cnt_list = NULL, clist = NULL;
9832 gimple_seq oacc_head = NULL, oacc_tail = NULL;
9833 size_t i;
9834
9835 push_gimplify_context ();
9836
9837 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
9838
9839 block = make_node (BLOCK);
9840 new_stmt = gimple_build_bind (NULL, NULL, block);
9841 /* Replace at gsi right away, so that 'stmt' is no member
9842 of a sequence anymore as we're going to add to a different
9843 one below. */
9844 gsi_replace (gsi_p, new_stmt, true);
9845
9846 /* Move declaration of temporaries in the loop body before we make
9847 it go away. */
9848 omp_for_body = gimple_omp_body (stmt);
9849 if (!gimple_seq_empty_p (omp_for_body)
9850 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
9851 {
9852 gbind *inner_bind
9853 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
9854 tree vars = gimple_bind_vars (inner_bind);
9855 gimple_bind_append_vars (new_stmt, vars);
9856 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
9857 keep them on the inner_bind and it's block. */
9858 gimple_bind_set_vars (inner_bind, NULL_TREE);
9859 if (gimple_bind_block (inner_bind))
9860 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
9861 }
9862
9863 if (gimple_omp_for_combined_into_p (stmt))
9864 {
9865 omp_extract_for_data (stmt, &fd, NULL);
9866 fdp = &fd;
9867
9868 /* We need two temporaries with fd.loop.v type (istart/iend)
9869 and then (fd.collapse - 1) temporaries with the same
9870 type for count2 ... countN-1 vars if not constant. */
9871 size_t count = 2;
9872 tree type = fd.iter_type;
9873 if (fd.collapse > 1
9874 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
9875 count += fd.collapse - 1;
9876 bool taskreg_for
9877 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
9878 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
9879 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
9880 tree simtc = NULL;
9881 tree clauses = *pc;
9882 if (taskreg_for)
9883 outerc
9884 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
9885 OMP_CLAUSE__LOOPTEMP_);
9886 if (ctx->simt_stmt)
9887 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
9888 OMP_CLAUSE__LOOPTEMP_);
9889 for (i = 0; i < count; i++)
9890 {
9891 tree temp;
9892 if (taskreg_for)
9893 {
9894 gcc_assert (outerc);
9895 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
9896 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
9897 OMP_CLAUSE__LOOPTEMP_);
9898 }
9899 else
9900 {
9901 /* If there are 2 adjacent SIMD stmts, one with _simt_
9902 clause, another without, make sure they have the same
9903 decls in _looptemp_ clauses, because the outer stmt
9904 they are combined into will look up just one inner_stmt. */
9905 if (ctx->simt_stmt)
9906 temp = OMP_CLAUSE_DECL (simtc);
9907 else
9908 temp = create_tmp_var (type);
9909 insert_decl_map (&ctx->outer->cb, temp, temp);
9910 }
9911 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
9912 OMP_CLAUSE_DECL (*pc) = temp;
9913 pc = &OMP_CLAUSE_CHAIN (*pc);
9914 if (ctx->simt_stmt)
9915 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
9916 OMP_CLAUSE__LOOPTEMP_);
9917 }
9918 *pc = clauses;
9919 }
9920
9921 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
9922 dlist = NULL;
9923 body = NULL;
9924 tree rclauses
9925 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
9926 OMP_CLAUSE_REDUCTION);
9927 tree rtmp = NULL_TREE;
9928 if (rclauses)
9929 {
9930 tree type = build_pointer_type (pointer_sized_int_node);
9931 tree temp = create_tmp_var (type);
9932 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
9933 OMP_CLAUSE_DECL (c) = temp;
9934 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
9935 gimple_omp_for_set_clauses (stmt, c);
9936 lower_omp_task_reductions (ctx, OMP_FOR,
9937 gimple_omp_for_clauses (stmt),
9938 &tred_ilist, &tred_dlist);
9939 rclauses = c;
9940 rtmp = make_ssa_name (type);
9941 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
9942 }
9943
9944 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
9945 ctx);
9946
9947 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
9948 fdp);
9949 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
9950 gimple_omp_for_pre_body (stmt));
9951
9952 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9953
9954 /* Lower the header expressions. At this point, we can assume that
9955 the header is of the form:
9956
9957 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
9958
9959 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
9960 using the .omp_data_s mapping, if needed. */
9961 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
9962 {
9963 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
9964 if (!is_gimple_min_invariant (*rhs_p))
9965 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9966 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9967 recompute_tree_invariant_for_addr_expr (*rhs_p);
9968
9969 rhs_p = gimple_omp_for_final_ptr (stmt, i);
9970 if (!is_gimple_min_invariant (*rhs_p))
9971 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9972 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9973 recompute_tree_invariant_for_addr_expr (*rhs_p);
9974
9975 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
9976 if (!is_gimple_min_invariant (*rhs_p))
9977 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9978 }
9979 if (rclauses)
9980 gimple_seq_add_seq (&tred_ilist, cnt_list);
9981 else
9982 gimple_seq_add_seq (&body, cnt_list);
9983
9984 /* Once lowered, extract the bounds and clauses. */
9985 omp_extract_for_data (stmt, &fd, NULL);
9986
9987 if (is_gimple_omp_oacc (ctx->stmt)
9988 && !ctx_in_oacc_kernels_region (ctx))
9989 lower_oacc_head_tail (gimple_location (stmt),
9990 gimple_omp_for_clauses (stmt),
9991 &oacc_head, &oacc_tail, ctx);
9992
9993 /* Add OpenACC partitioning and reduction markers just before the loop. */
9994 if (oacc_head)
9995 gimple_seq_add_seq (&body, oacc_head);
9996
9997 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
9998
9999 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10000 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10002 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10003 {
10004 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10005 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10006 OMP_CLAUSE_LINEAR_STEP (c)
10007 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10008 ctx);
10009 }
10010
10011 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10012 && gimple_omp_for_grid_phony (stmt));
10013 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10014 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10015 {
10016 gcc_assert (!phony_loop);
10017 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10018 }
10019 else
10020 {
10021 if (!phony_loop)
10022 gimple_seq_add_stmt (&body, stmt);
10023 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10024 }
10025
10026 if (!phony_loop)
10027 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10028 fd.loop.v));
10029
10030 /* After the loop, add exit clauses. */
10031 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10032
10033 if (clist)
10034 {
10035 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10036 gcall *g = gimple_build_call (fndecl, 0);
10037 gimple_seq_add_stmt (&body, g);
10038 gimple_seq_add_seq (&body, clist);
10039 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10040 g = gimple_build_call (fndecl, 0);
10041 gimple_seq_add_stmt (&body, g);
10042 }
10043
10044 if (ctx->cancellable)
10045 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10046
10047 gimple_seq_add_seq (&body, dlist);
10048
10049 if (rclauses)
10050 {
10051 gimple_seq_add_seq (&tred_ilist, body);
10052 body = tred_ilist;
10053 }
10054
10055 body = maybe_catch_exception (body);
10056
10057 if (!phony_loop)
10058 {
10059 /* Region exit marker goes at the end of the loop body. */
10060 gimple *g = gimple_build_omp_return (fd.have_nowait);
10061 gimple_seq_add_stmt (&body, g);
10062
10063 gimple_seq_add_seq (&body, tred_dlist);
10064
10065 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10066
10067 if (rclauses)
10068 OMP_CLAUSE_DECL (rclauses) = rtmp;
10069 }
10070
10071 /* Add OpenACC joining and reduction markers just after the loop. */
10072 if (oacc_tail)
10073 gimple_seq_add_seq (&body, oacc_tail);
10074
10075 pop_gimplify_context (new_stmt);
10076
10077 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10078 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10079 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10080 if (BLOCK_VARS (block))
10081 TREE_USED (block) = 1;
10082
10083 gimple_bind_set_body (new_stmt, body);
10084 gimple_omp_set_body (stmt, NULL);
10085 gimple_omp_for_set_pre_body (stmt, NULL);
10086 }
10087
10088 /* Callback for walk_stmts. Check if the current statement only contains
10089 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10090
10091 static tree
10092 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10093 bool *handled_ops_p,
10094 struct walk_stmt_info *wi)
10095 {
10096 int *info = (int *) wi->info;
10097 gimple *stmt = gsi_stmt (*gsi_p);
10098
10099 *handled_ops_p = true;
10100 switch (gimple_code (stmt))
10101 {
10102 WALK_SUBSTMTS;
10103
10104 case GIMPLE_DEBUG:
10105 break;
10106 case GIMPLE_OMP_FOR:
10107 case GIMPLE_OMP_SECTIONS:
10108 *info = *info == 0 ? 1 : -1;
10109 break;
10110 default:
10111 *info = -1;
10112 break;
10113 }
10114 return NULL;
10115 }
10116
10117 struct omp_taskcopy_context
10118 {
10119 /* This field must be at the beginning, as we do "inheritance": Some
10120 callback functions for tree-inline.c (e.g., omp_copy_decl)
10121 receive a copy_body_data pointer that is up-casted to an
10122 omp_context pointer. */
10123 copy_body_data cb;
10124 omp_context *ctx;
10125 };
10126
10127 static tree
10128 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10129 {
10130 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10131
10132 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10133 return create_tmp_var (TREE_TYPE (var));
10134
10135 return var;
10136 }
10137
10138 static tree
10139 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10140 {
10141 tree name, new_fields = NULL, type, f;
10142
10143 type = lang_hooks.types.make_type (RECORD_TYPE);
10144 name = DECL_NAME (TYPE_NAME (orig_type));
10145 name = build_decl (gimple_location (tcctx->ctx->stmt),
10146 TYPE_DECL, name, type);
10147 TYPE_NAME (type) = name;
10148
10149 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10150 {
10151 tree new_f = copy_node (f);
10152 DECL_CONTEXT (new_f) = type;
10153 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10154 TREE_CHAIN (new_f) = new_fields;
10155 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10156 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10157 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10158 &tcctx->cb, NULL);
10159 new_fields = new_f;
10160 tcctx->cb.decl_map->put (f, new_f);
10161 }
10162 TYPE_FIELDS (type) = nreverse (new_fields);
10163 layout_type (type);
10164 return type;
10165 }
10166
10167 /* Create task copyfn. */
10168
10169 static void
10170 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10171 {
10172 struct function *child_cfun;
10173 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10174 tree record_type, srecord_type, bind, list;
10175 bool record_needs_remap = false, srecord_needs_remap = false;
10176 splay_tree_node n;
10177 struct omp_taskcopy_context tcctx;
10178 location_t loc = gimple_location (task_stmt);
10179 size_t looptempno = 0;
10180
10181 child_fn = gimple_omp_task_copy_fn (task_stmt);
10182 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10183 gcc_assert (child_cfun->cfg == NULL);
10184 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10185
10186 /* Reset DECL_CONTEXT on function arguments. */
10187 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10188 DECL_CONTEXT (t) = child_fn;
10189
10190 /* Populate the function. */
10191 push_gimplify_context ();
10192 push_cfun (child_cfun);
10193
10194 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10195 TREE_SIDE_EFFECTS (bind) = 1;
10196 list = NULL;
10197 DECL_SAVED_TREE (child_fn) = bind;
10198 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10199
10200 /* Remap src and dst argument types if needed. */
10201 record_type = ctx->record_type;
10202 srecord_type = ctx->srecord_type;
10203 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10204 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10205 {
10206 record_needs_remap = true;
10207 break;
10208 }
10209 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10210 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10211 {
10212 srecord_needs_remap = true;
10213 break;
10214 }
10215
10216 if (record_needs_remap || srecord_needs_remap)
10217 {
10218 memset (&tcctx, '\0', sizeof (tcctx));
10219 tcctx.cb.src_fn = ctx->cb.src_fn;
10220 tcctx.cb.dst_fn = child_fn;
10221 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10222 gcc_checking_assert (tcctx.cb.src_node);
10223 tcctx.cb.dst_node = tcctx.cb.src_node;
10224 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10225 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10226 tcctx.cb.eh_lp_nr = 0;
10227 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10228 tcctx.cb.decl_map = new hash_map<tree, tree>;
10229 tcctx.ctx = ctx;
10230
10231 if (record_needs_remap)
10232 record_type = task_copyfn_remap_type (&tcctx, record_type);
10233 if (srecord_needs_remap)
10234 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10235 }
10236 else
10237 tcctx.cb.decl_map = NULL;
10238
10239 arg = DECL_ARGUMENTS (child_fn);
10240 TREE_TYPE (arg) = build_pointer_type (record_type);
10241 sarg = DECL_CHAIN (arg);
10242 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10243
10244 /* First pass: initialize temporaries used in record_type and srecord_type
10245 sizes and field offsets. */
10246 if (tcctx.cb.decl_map)
10247 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10248 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10249 {
10250 tree *p;
10251
10252 decl = OMP_CLAUSE_DECL (c);
10253 p = tcctx.cb.decl_map->get (decl);
10254 if (p == NULL)
10255 continue;
10256 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10257 sf = (tree) n->value;
10258 sf = *tcctx.cb.decl_map->get (sf);
10259 src = build_simple_mem_ref_loc (loc, sarg);
10260 src = omp_build_component_ref (src, sf);
10261 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10262 append_to_statement_list (t, &list);
10263 }
10264
10265 /* Second pass: copy shared var pointers and copy construct non-VLA
10266 firstprivate vars. */
10267 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10268 switch (OMP_CLAUSE_CODE (c))
10269 {
10270 splay_tree_key key;
10271 case OMP_CLAUSE_SHARED:
10272 decl = OMP_CLAUSE_DECL (c);
10273 key = (splay_tree_key) decl;
10274 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10275 key = (splay_tree_key) &DECL_UID (decl);
10276 n = splay_tree_lookup (ctx->field_map, key);
10277 if (n == NULL)
10278 break;
10279 f = (tree) n->value;
10280 if (tcctx.cb.decl_map)
10281 f = *tcctx.cb.decl_map->get (f);
10282 n = splay_tree_lookup (ctx->sfield_map, key);
10283 sf = (tree) n->value;
10284 if (tcctx.cb.decl_map)
10285 sf = *tcctx.cb.decl_map->get (sf);
10286 src = build_simple_mem_ref_loc (loc, sarg);
10287 src = omp_build_component_ref (src, sf);
10288 dst = build_simple_mem_ref_loc (loc, arg);
10289 dst = omp_build_component_ref (dst, f);
10290 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10291 append_to_statement_list (t, &list);
10292 break;
10293 case OMP_CLAUSE_REDUCTION:
10294 case OMP_CLAUSE_IN_REDUCTION:
10295 decl = OMP_CLAUSE_DECL (c);
10296 if (TREE_CODE (decl) == MEM_REF)
10297 {
10298 decl = TREE_OPERAND (decl, 0);
10299 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10300 decl = TREE_OPERAND (decl, 0);
10301 if (TREE_CODE (decl) == INDIRECT_REF
10302 || TREE_CODE (decl) == ADDR_EXPR)
10303 decl = TREE_OPERAND (decl, 0);
10304 }
10305 key = (splay_tree_key) decl;
10306 n = splay_tree_lookup (ctx->field_map, key);
10307 if (n == NULL)
10308 break;
10309 f = (tree) n->value;
10310 if (tcctx.cb.decl_map)
10311 f = *tcctx.cb.decl_map->get (f);
10312 n = splay_tree_lookup (ctx->sfield_map, key);
10313 sf = (tree) n->value;
10314 if (tcctx.cb.decl_map)
10315 sf = *tcctx.cb.decl_map->get (sf);
10316 src = build_simple_mem_ref_loc (loc, sarg);
10317 src = omp_build_component_ref (src, sf);
10318 if (decl != OMP_CLAUSE_DECL (c)
10319 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10320 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10321 src = build_simple_mem_ref_loc (loc, src);
10322 dst = build_simple_mem_ref_loc (loc, arg);
10323 dst = omp_build_component_ref (dst, f);
10324 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10325 append_to_statement_list (t, &list);
10326 break;
10327 case OMP_CLAUSE__LOOPTEMP_:
10328 /* Fields for first two _looptemp_ clauses are initialized by
10329 GOMP_taskloop*, the rest are handled like firstprivate. */
10330 if (looptempno < 2)
10331 {
10332 looptempno++;
10333 break;
10334 }
10335 /* FALLTHRU */
10336 case OMP_CLAUSE__REDUCTEMP_:
10337 case OMP_CLAUSE_FIRSTPRIVATE:
10338 decl = OMP_CLAUSE_DECL (c);
10339 if (is_variable_sized (decl))
10340 break;
10341 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10342 if (n == NULL)
10343 break;
10344 f = (tree) n->value;
10345 if (tcctx.cb.decl_map)
10346 f = *tcctx.cb.decl_map->get (f);
10347 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10348 if (n != NULL)
10349 {
10350 sf = (tree) n->value;
10351 if (tcctx.cb.decl_map)
10352 sf = *tcctx.cb.decl_map->get (sf);
10353 src = build_simple_mem_ref_loc (loc, sarg);
10354 src = omp_build_component_ref (src, sf);
10355 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10356 src = build_simple_mem_ref_loc (loc, src);
10357 }
10358 else
10359 src = decl;
10360 dst = build_simple_mem_ref_loc (loc, arg);
10361 dst = omp_build_component_ref (dst, f);
10362 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10363 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10364 else
10365 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10366 append_to_statement_list (t, &list);
10367 break;
10368 case OMP_CLAUSE_PRIVATE:
10369 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10370 break;
10371 decl = OMP_CLAUSE_DECL (c);
10372 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10373 f = (tree) n->value;
10374 if (tcctx.cb.decl_map)
10375 f = *tcctx.cb.decl_map->get (f);
10376 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10377 if (n != NULL)
10378 {
10379 sf = (tree) n->value;
10380 if (tcctx.cb.decl_map)
10381 sf = *tcctx.cb.decl_map->get (sf);
10382 src = build_simple_mem_ref_loc (loc, sarg);
10383 src = omp_build_component_ref (src, sf);
10384 if (use_pointer_for_field (decl, NULL))
10385 src = build_simple_mem_ref_loc (loc, src);
10386 }
10387 else
10388 src = decl;
10389 dst = build_simple_mem_ref_loc (loc, arg);
10390 dst = omp_build_component_ref (dst, f);
10391 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10392 append_to_statement_list (t, &list);
10393 break;
10394 default:
10395 break;
10396 }
10397
10398 /* Last pass: handle VLA firstprivates. */
10399 if (tcctx.cb.decl_map)
10400 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10401 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10402 {
10403 tree ind, ptr, df;
10404
10405 decl = OMP_CLAUSE_DECL (c);
10406 if (!is_variable_sized (decl))
10407 continue;
10408 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10409 if (n == NULL)
10410 continue;
10411 f = (tree) n->value;
10412 f = *tcctx.cb.decl_map->get (f);
10413 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10414 ind = DECL_VALUE_EXPR (decl);
10415 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10416 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10417 n = splay_tree_lookup (ctx->sfield_map,
10418 (splay_tree_key) TREE_OPERAND (ind, 0));
10419 sf = (tree) n->value;
10420 sf = *tcctx.cb.decl_map->get (sf);
10421 src = build_simple_mem_ref_loc (loc, sarg);
10422 src = omp_build_component_ref (src, sf);
10423 src = build_simple_mem_ref_loc (loc, src);
10424 dst = build_simple_mem_ref_loc (loc, arg);
10425 dst = omp_build_component_ref (dst, f);
10426 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10427 append_to_statement_list (t, &list);
10428 n = splay_tree_lookup (ctx->field_map,
10429 (splay_tree_key) TREE_OPERAND (ind, 0));
10430 df = (tree) n->value;
10431 df = *tcctx.cb.decl_map->get (df);
10432 ptr = build_simple_mem_ref_loc (loc, arg);
10433 ptr = omp_build_component_ref (ptr, df);
10434 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10435 build_fold_addr_expr_loc (loc, dst));
10436 append_to_statement_list (t, &list);
10437 }
10438
10439 t = build1 (RETURN_EXPR, void_type_node, NULL);
10440 append_to_statement_list (t, &list);
10441
10442 if (tcctx.cb.decl_map)
10443 delete tcctx.cb.decl_map;
10444 pop_gimplify_context (NULL);
10445 BIND_EXPR_BODY (bind) = list;
10446 pop_cfun ();
10447 }
10448
10449 static void
10450 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10451 {
10452 tree c, clauses;
10453 gimple *g;
10454 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10455
10456 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10457 gcc_assert (clauses);
10458 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10459 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10460 switch (OMP_CLAUSE_DEPEND_KIND (c))
10461 {
10462 case OMP_CLAUSE_DEPEND_LAST:
10463 /* Lowering already done at gimplification. */
10464 return;
10465 case OMP_CLAUSE_DEPEND_IN:
10466 cnt[2]++;
10467 break;
10468 case OMP_CLAUSE_DEPEND_OUT:
10469 case OMP_CLAUSE_DEPEND_INOUT:
10470 cnt[0]++;
10471 break;
10472 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10473 cnt[1]++;
10474 break;
10475 case OMP_CLAUSE_DEPEND_DEPOBJ:
10476 cnt[3]++;
10477 break;
10478 case OMP_CLAUSE_DEPEND_SOURCE:
10479 case OMP_CLAUSE_DEPEND_SINK:
10480 /* FALLTHRU */
10481 default:
10482 gcc_unreachable ();
10483 }
10484 if (cnt[1] || cnt[3])
10485 idx = 5;
10486 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10487 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10488 tree array = create_tmp_var (type);
10489 TREE_ADDRESSABLE (array) = 1;
10490 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10491 NULL_TREE);
10492 if (idx == 5)
10493 {
10494 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10495 gimple_seq_add_stmt (iseq, g);
10496 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10497 NULL_TREE);
10498 }
10499 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10500 gimple_seq_add_stmt (iseq, g);
10501 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10502 {
10503 r = build4 (ARRAY_REF, ptr_type_node, array,
10504 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10505 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10506 gimple_seq_add_stmt (iseq, g);
10507 }
10508 for (i = 0; i < 4; i++)
10509 {
10510 if (cnt[i] == 0)
10511 continue;
10512 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10513 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10514 continue;
10515 else
10516 {
10517 switch (OMP_CLAUSE_DEPEND_KIND (c))
10518 {
10519 case OMP_CLAUSE_DEPEND_IN:
10520 if (i != 2)
10521 continue;
10522 break;
10523 case OMP_CLAUSE_DEPEND_OUT:
10524 case OMP_CLAUSE_DEPEND_INOUT:
10525 if (i != 0)
10526 continue;
10527 break;
10528 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10529 if (i != 1)
10530 continue;
10531 break;
10532 case OMP_CLAUSE_DEPEND_DEPOBJ:
10533 if (i != 3)
10534 continue;
10535 break;
10536 default:
10537 gcc_unreachable ();
10538 }
10539 tree t = OMP_CLAUSE_DECL (c);
10540 t = fold_convert (ptr_type_node, t);
10541 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
10542 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
10543 NULL_TREE, NULL_TREE);
10544 g = gimple_build_assign (r, t);
10545 gimple_seq_add_stmt (iseq, g);
10546 }
10547 }
10548 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
10549 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
10550 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
10551 OMP_CLAUSE_CHAIN (c) = *pclauses;
10552 *pclauses = c;
10553 tree clobber = build_constructor (type, NULL);
10554 TREE_THIS_VOLATILE (clobber) = 1;
10555 g = gimple_build_assign (array, clobber);
10556 gimple_seq_add_stmt (oseq, g);
10557 }
10558
10559 /* Lower the OpenMP parallel or task directive in the current statement
10560 in GSI_P. CTX holds context information for the directive. */
10561
10562 static void
10563 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10564 {
10565 tree clauses;
10566 tree child_fn, t;
10567 gimple *stmt = gsi_stmt (*gsi_p);
10568 gbind *par_bind, *bind, *dep_bind = NULL;
10569 gimple_seq par_body;
10570 location_t loc = gimple_location (stmt);
10571
10572 clauses = gimple_omp_taskreg_clauses (stmt);
10573 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10574 && gimple_omp_task_taskwait_p (stmt))
10575 {
10576 par_bind = NULL;
10577 par_body = NULL;
10578 }
10579 else
10580 {
10581 par_bind
10582 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
10583 par_body = gimple_bind_body (par_bind);
10584 }
10585 child_fn = ctx->cb.dst_fn;
10586 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10587 && !gimple_omp_parallel_combined_p (stmt))
10588 {
10589 struct walk_stmt_info wi;
10590 int ws_num = 0;
10591
10592 memset (&wi, 0, sizeof (wi));
10593 wi.info = &ws_num;
10594 wi.val_only = true;
10595 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
10596 if (ws_num == 1)
10597 gimple_omp_parallel_set_combined_p (stmt, true);
10598 }
10599 gimple_seq dep_ilist = NULL;
10600 gimple_seq dep_olist = NULL;
10601 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10602 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10603 {
10604 push_gimplify_context ();
10605 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10606 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
10607 &dep_ilist, &dep_olist);
10608 }
10609
10610 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10611 && gimple_omp_task_taskwait_p (stmt))
10612 {
10613 if (dep_bind)
10614 {
10615 gsi_replace (gsi_p, dep_bind, true);
10616 gimple_bind_add_seq (dep_bind, dep_ilist);
10617 gimple_bind_add_stmt (dep_bind, stmt);
10618 gimple_bind_add_seq (dep_bind, dep_olist);
10619 pop_gimplify_context (dep_bind);
10620 }
10621 return;
10622 }
10623
10624 if (ctx->srecord_type)
10625 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
10626
10627 gimple_seq tskred_ilist = NULL;
10628 gimple_seq tskred_olist = NULL;
10629 if ((is_task_ctx (ctx)
10630 && gimple_omp_task_taskloop_p (ctx->stmt)
10631 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
10632 OMP_CLAUSE_REDUCTION))
10633 || (is_parallel_ctx (ctx)
10634 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
10635 OMP_CLAUSE__REDUCTEMP_)))
10636 {
10637 if (dep_bind == NULL)
10638 {
10639 push_gimplify_context ();
10640 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10641 }
10642 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
10643 : OMP_PARALLEL,
10644 gimple_omp_taskreg_clauses (ctx->stmt),
10645 &tskred_ilist, &tskred_olist);
10646 }
10647
10648 push_gimplify_context ();
10649
10650 gimple_seq par_olist = NULL;
10651 gimple_seq par_ilist = NULL;
10652 gimple_seq par_rlist = NULL;
10653 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10654 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
10655 if (phony_construct && ctx->record_type)
10656 {
10657 gcc_checking_assert (!ctx->receiver_decl);
10658 ctx->receiver_decl = create_tmp_var
10659 (build_reference_type (ctx->record_type), ".omp_rec");
10660 }
10661 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
10662 lower_omp (&par_body, ctx);
10663 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
10664 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
10665
10666 /* Declare all the variables created by mapping and the variables
10667 declared in the scope of the parallel body. */
10668 record_vars_into (ctx->block_vars, child_fn);
10669 maybe_remove_omp_member_access_dummy_vars (par_bind);
10670 record_vars_into (gimple_bind_vars (par_bind), child_fn);
10671
10672 if (ctx->record_type)
10673 {
10674 ctx->sender_decl
10675 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
10676 : ctx->record_type, ".omp_data_o");
10677 DECL_NAMELESS (ctx->sender_decl) = 1;
10678 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
10679 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
10680 }
10681
10682 gimple_seq olist = NULL;
10683 gimple_seq ilist = NULL;
10684 lower_send_clauses (clauses, &ilist, &olist, ctx);
10685 lower_send_shared_vars (&ilist, &olist, ctx);
10686
10687 if (ctx->record_type)
10688 {
10689 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
10690 TREE_THIS_VOLATILE (clobber) = 1;
10691 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
10692 clobber));
10693 }
10694
10695 /* Once all the expansions are done, sequence all the different
10696 fragments inside gimple_omp_body. */
10697
10698 gimple_seq new_body = NULL;
10699
10700 if (ctx->record_type)
10701 {
10702 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10703 /* fixup_child_record_type might have changed receiver_decl's type. */
10704 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10705 gimple_seq_add_stmt (&new_body,
10706 gimple_build_assign (ctx->receiver_decl, t));
10707 }
10708
10709 gimple_seq_add_seq (&new_body, par_ilist);
10710 gimple_seq_add_seq (&new_body, par_body);
10711 gimple_seq_add_seq (&new_body, par_rlist);
10712 if (ctx->cancellable)
10713 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
10714 gimple_seq_add_seq (&new_body, par_olist);
10715 new_body = maybe_catch_exception (new_body);
10716 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
10717 gimple_seq_add_stmt (&new_body,
10718 gimple_build_omp_continue (integer_zero_node,
10719 integer_zero_node));
10720 if (!phony_construct)
10721 {
10722 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10723 gimple_omp_set_body (stmt, new_body);
10724 }
10725
10726 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
10727 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10728 else
10729 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
10730 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10731 gimple_bind_add_seq (bind, ilist);
10732 if (!phony_construct)
10733 gimple_bind_add_stmt (bind, stmt);
10734 else
10735 gimple_bind_add_seq (bind, new_body);
10736 gimple_bind_add_seq (bind, olist);
10737
10738 pop_gimplify_context (NULL);
10739
10740 if (dep_bind)
10741 {
10742 gimple_bind_add_seq (dep_bind, dep_ilist);
10743 gimple_bind_add_seq (dep_bind, tskred_ilist);
10744 gimple_bind_add_stmt (dep_bind, bind);
10745 gimple_bind_add_seq (dep_bind, tskred_olist);
10746 gimple_bind_add_seq (dep_bind, dep_olist);
10747 pop_gimplify_context (dep_bind);
10748 }
10749 }
10750
10751 /* Lower the GIMPLE_OMP_TARGET in the current statement
10752 in GSI_P. CTX holds context information for the directive. */
10753
10754 static void
10755 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10756 {
10757 tree clauses;
10758 tree child_fn, t, c;
10759 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
10760 gbind *tgt_bind, *bind, *dep_bind = NULL;
10761 gimple_seq tgt_body, olist, ilist, fplist, new_body;
10762 location_t loc = gimple_location (stmt);
10763 bool offloaded, data_region;
10764 unsigned int map_cnt = 0;
10765
10766 offloaded = is_gimple_omp_offloaded (stmt);
10767 switch (gimple_omp_target_kind (stmt))
10768 {
10769 case GF_OMP_TARGET_KIND_REGION:
10770 case GF_OMP_TARGET_KIND_UPDATE:
10771 case GF_OMP_TARGET_KIND_ENTER_DATA:
10772 case GF_OMP_TARGET_KIND_EXIT_DATA:
10773 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
10774 case GF_OMP_TARGET_KIND_OACC_KERNELS:
10775 case GF_OMP_TARGET_KIND_OACC_UPDATE:
10776 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
10777 case GF_OMP_TARGET_KIND_OACC_DECLARE:
10778 data_region = false;
10779 break;
10780 case GF_OMP_TARGET_KIND_DATA:
10781 case GF_OMP_TARGET_KIND_OACC_DATA:
10782 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
10783 data_region = true;
10784 break;
10785 default:
10786 gcc_unreachable ();
10787 }
10788
10789 clauses = gimple_omp_target_clauses (stmt);
10790
10791 gimple_seq dep_ilist = NULL;
10792 gimple_seq dep_olist = NULL;
10793 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10794 {
10795 push_gimplify_context ();
10796 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10797 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
10798 &dep_ilist, &dep_olist);
10799 }
10800
10801 tgt_bind = NULL;
10802 tgt_body = NULL;
10803 if (offloaded)
10804 {
10805 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
10806 tgt_body = gimple_bind_body (tgt_bind);
10807 }
10808 else if (data_region)
10809 tgt_body = gimple_omp_body (stmt);
10810 child_fn = ctx->cb.dst_fn;
10811
10812 push_gimplify_context ();
10813 fplist = NULL;
10814
10815 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10816 switch (OMP_CLAUSE_CODE (c))
10817 {
10818 tree var, x;
10819
10820 default:
10821 break;
10822 case OMP_CLAUSE_MAP:
10823 #if CHECKING_P
10824 /* First check what we're prepared to handle in the following. */
10825 switch (OMP_CLAUSE_MAP_KIND (c))
10826 {
10827 case GOMP_MAP_ALLOC:
10828 case GOMP_MAP_TO:
10829 case GOMP_MAP_FROM:
10830 case GOMP_MAP_TOFROM:
10831 case GOMP_MAP_POINTER:
10832 case GOMP_MAP_TO_PSET:
10833 case GOMP_MAP_DELETE:
10834 case GOMP_MAP_RELEASE:
10835 case GOMP_MAP_ALWAYS_TO:
10836 case GOMP_MAP_ALWAYS_FROM:
10837 case GOMP_MAP_ALWAYS_TOFROM:
10838 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10839 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10840 case GOMP_MAP_STRUCT:
10841 case GOMP_MAP_ALWAYS_POINTER:
10842 break;
10843 case GOMP_MAP_FORCE_ALLOC:
10844 case GOMP_MAP_FORCE_TO:
10845 case GOMP_MAP_FORCE_FROM:
10846 case GOMP_MAP_FORCE_TOFROM:
10847 case GOMP_MAP_FORCE_PRESENT:
10848 case GOMP_MAP_FORCE_DEVICEPTR:
10849 case GOMP_MAP_DEVICE_RESIDENT:
10850 case GOMP_MAP_LINK:
10851 gcc_assert (is_gimple_omp_oacc (stmt));
10852 break;
10853 default:
10854 gcc_unreachable ();
10855 }
10856 #endif
10857 /* FALLTHRU */
10858 case OMP_CLAUSE_TO:
10859 case OMP_CLAUSE_FROM:
10860 oacc_firstprivate:
10861 var = OMP_CLAUSE_DECL (c);
10862 if (!DECL_P (var))
10863 {
10864 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
10865 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10866 && (OMP_CLAUSE_MAP_KIND (c)
10867 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
10868 map_cnt++;
10869 continue;
10870 }
10871
10872 if (DECL_SIZE (var)
10873 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10874 {
10875 tree var2 = DECL_VALUE_EXPR (var);
10876 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10877 var2 = TREE_OPERAND (var2, 0);
10878 gcc_assert (DECL_P (var2));
10879 var = var2;
10880 }
10881
10882 if (offloaded
10883 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10884 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10885 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10886 {
10887 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10888 {
10889 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
10890 && varpool_node::get_create (var)->offloadable)
10891 continue;
10892
10893 tree type = build_pointer_type (TREE_TYPE (var));
10894 tree new_var = lookup_decl (var, ctx);
10895 x = create_tmp_var_raw (type, get_name (new_var));
10896 gimple_add_tmp_var (x);
10897 x = build_simple_mem_ref (x);
10898 SET_DECL_VALUE_EXPR (new_var, x);
10899 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10900 }
10901 continue;
10902 }
10903
10904 if (!maybe_lookup_field (var, ctx))
10905 continue;
10906
10907 /* Don't remap oacc parallel reduction variables, because the
10908 intermediate result must be local to each gang. */
10909 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10910 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
10911 {
10912 x = build_receiver_ref (var, true, ctx);
10913 tree new_var = lookup_decl (var, ctx);
10914
10915 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10916 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
10917 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10918 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10919 x = build_simple_mem_ref (x);
10920 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10921 {
10922 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
10923 if (omp_is_reference (new_var)
10924 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
10925 {
10926 /* Create a local object to hold the instance
10927 value. */
10928 tree type = TREE_TYPE (TREE_TYPE (new_var));
10929 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
10930 tree inst = create_tmp_var (type, id);
10931 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
10932 x = build_fold_addr_expr (inst);
10933 }
10934 gimplify_assign (new_var, x, &fplist);
10935 }
10936 else if (DECL_P (new_var))
10937 {
10938 SET_DECL_VALUE_EXPR (new_var, x);
10939 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10940 }
10941 else
10942 gcc_unreachable ();
10943 }
10944 map_cnt++;
10945 break;
10946
10947 case OMP_CLAUSE_FIRSTPRIVATE:
10948 if (is_oacc_parallel (ctx))
10949 goto oacc_firstprivate;
10950 map_cnt++;
10951 var = OMP_CLAUSE_DECL (c);
10952 if (!omp_is_reference (var)
10953 && !is_gimple_reg_type (TREE_TYPE (var)))
10954 {
10955 tree new_var = lookup_decl (var, ctx);
10956 if (is_variable_sized (var))
10957 {
10958 tree pvar = DECL_VALUE_EXPR (var);
10959 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10960 pvar = TREE_OPERAND (pvar, 0);
10961 gcc_assert (DECL_P (pvar));
10962 tree new_pvar = lookup_decl (pvar, ctx);
10963 x = build_fold_indirect_ref (new_pvar);
10964 TREE_THIS_NOTRAP (x) = 1;
10965 }
10966 else
10967 x = build_receiver_ref (var, true, ctx);
10968 SET_DECL_VALUE_EXPR (new_var, x);
10969 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10970 }
10971 break;
10972
10973 case OMP_CLAUSE_PRIVATE:
10974 if (is_gimple_omp_oacc (ctx->stmt))
10975 break;
10976 var = OMP_CLAUSE_DECL (c);
10977 if (is_variable_sized (var))
10978 {
10979 tree new_var = lookup_decl (var, ctx);
10980 tree pvar = DECL_VALUE_EXPR (var);
10981 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10982 pvar = TREE_OPERAND (pvar, 0);
10983 gcc_assert (DECL_P (pvar));
10984 tree new_pvar = lookup_decl (pvar, ctx);
10985 x = build_fold_indirect_ref (new_pvar);
10986 TREE_THIS_NOTRAP (x) = 1;
10987 SET_DECL_VALUE_EXPR (new_var, x);
10988 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10989 }
10990 break;
10991
10992 case OMP_CLAUSE_USE_DEVICE_PTR:
10993 case OMP_CLAUSE_IS_DEVICE_PTR:
10994 var = OMP_CLAUSE_DECL (c);
10995 map_cnt++;
10996 if (is_variable_sized (var))
10997 {
10998 tree new_var = lookup_decl (var, ctx);
10999 tree pvar = DECL_VALUE_EXPR (var);
11000 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11001 pvar = TREE_OPERAND (pvar, 0);
11002 gcc_assert (DECL_P (pvar));
11003 tree new_pvar = lookup_decl (pvar, ctx);
11004 x = build_fold_indirect_ref (new_pvar);
11005 TREE_THIS_NOTRAP (x) = 1;
11006 SET_DECL_VALUE_EXPR (new_var, x);
11007 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11008 }
11009 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11010 {
11011 tree new_var = lookup_decl (var, ctx);
11012 tree type = build_pointer_type (TREE_TYPE (var));
11013 x = create_tmp_var_raw (type, get_name (new_var));
11014 gimple_add_tmp_var (x);
11015 x = build_simple_mem_ref (x);
11016 SET_DECL_VALUE_EXPR (new_var, x);
11017 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11018 }
11019 else
11020 {
11021 tree new_var = lookup_decl (var, ctx);
11022 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11023 gimple_add_tmp_var (x);
11024 SET_DECL_VALUE_EXPR (new_var, x);
11025 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11026 }
11027 break;
11028 }
11029
11030 if (offloaded)
11031 {
11032 target_nesting_level++;
11033 lower_omp (&tgt_body, ctx);
11034 target_nesting_level--;
11035 }
11036 else if (data_region)
11037 lower_omp (&tgt_body, ctx);
11038
11039 if (offloaded)
11040 {
11041 /* Declare all the variables created by mapping and the variables
11042 declared in the scope of the target body. */
11043 record_vars_into (ctx->block_vars, child_fn);
11044 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11045 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11046 }
11047
11048 olist = NULL;
11049 ilist = NULL;
11050 if (ctx->record_type)
11051 {
11052 ctx->sender_decl
11053 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11054 DECL_NAMELESS (ctx->sender_decl) = 1;
11055 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11056 t = make_tree_vec (3);
11057 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11058 TREE_VEC_ELT (t, 1)
11059 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11060 ".omp_data_sizes");
11061 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11062 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11063 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11064 tree tkind_type = short_unsigned_type_node;
11065 int talign_shift = 8;
11066 TREE_VEC_ELT (t, 2)
11067 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11068 ".omp_data_kinds");
11069 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11070 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11071 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11072 gimple_omp_target_set_data_arg (stmt, t);
11073
11074 vec<constructor_elt, va_gc> *vsize;
11075 vec<constructor_elt, va_gc> *vkind;
11076 vec_alloc (vsize, map_cnt);
11077 vec_alloc (vkind, map_cnt);
11078 unsigned int map_idx = 0;
11079
11080 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11081 switch (OMP_CLAUSE_CODE (c))
11082 {
11083 tree ovar, nc, s, purpose, var, x, type;
11084 unsigned int talign;
11085
11086 default:
11087 break;
11088
11089 case OMP_CLAUSE_MAP:
11090 case OMP_CLAUSE_TO:
11091 case OMP_CLAUSE_FROM:
11092 oacc_firstprivate_map:
11093 nc = c;
11094 ovar = OMP_CLAUSE_DECL (c);
11095 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11096 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11097 || (OMP_CLAUSE_MAP_KIND (c)
11098 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11099 break;
11100 if (!DECL_P (ovar))
11101 {
11102 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11103 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11104 {
11105 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11106 == get_base_address (ovar));
11107 nc = OMP_CLAUSE_CHAIN (c);
11108 ovar = OMP_CLAUSE_DECL (nc);
11109 }
11110 else
11111 {
11112 tree x = build_sender_ref (ovar, ctx);
11113 tree v
11114 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11115 gimplify_assign (x, v, &ilist);
11116 nc = NULL_TREE;
11117 }
11118 }
11119 else
11120 {
11121 if (DECL_SIZE (ovar)
11122 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11123 {
11124 tree ovar2 = DECL_VALUE_EXPR (ovar);
11125 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11126 ovar2 = TREE_OPERAND (ovar2, 0);
11127 gcc_assert (DECL_P (ovar2));
11128 ovar = ovar2;
11129 }
11130 if (!maybe_lookup_field (ovar, ctx))
11131 continue;
11132 }
11133
11134 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11135 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11136 talign = DECL_ALIGN_UNIT (ovar);
11137 if (nc)
11138 {
11139 var = lookup_decl_in_outer_ctx (ovar, ctx);
11140 x = build_sender_ref (ovar, ctx);
11141
11142 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11143 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11144 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11145 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11146 {
11147 gcc_assert (offloaded);
11148 tree avar
11149 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11150 mark_addressable (avar);
11151 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11152 talign = DECL_ALIGN_UNIT (avar);
11153 avar = build_fold_addr_expr (avar);
11154 gimplify_assign (x, avar, &ilist);
11155 }
11156 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11157 {
11158 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11159 if (!omp_is_reference (var))
11160 {
11161 if (is_gimple_reg (var)
11162 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11163 TREE_NO_WARNING (var) = 1;
11164 var = build_fold_addr_expr (var);
11165 }
11166 else
11167 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11168 gimplify_assign (x, var, &ilist);
11169 }
11170 else if (is_gimple_reg (var))
11171 {
11172 gcc_assert (offloaded);
11173 tree avar = create_tmp_var (TREE_TYPE (var));
11174 mark_addressable (avar);
11175 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11176 if (GOMP_MAP_COPY_TO_P (map_kind)
11177 || map_kind == GOMP_MAP_POINTER
11178 || map_kind == GOMP_MAP_TO_PSET
11179 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11180 {
11181 /* If we need to initialize a temporary
11182 with VAR because it is not addressable, and
11183 the variable hasn't been initialized yet, then
11184 we'll get a warning for the store to avar.
11185 Don't warn in that case, the mapping might
11186 be implicit. */
11187 TREE_NO_WARNING (var) = 1;
11188 gimplify_assign (avar, var, &ilist);
11189 }
11190 avar = build_fold_addr_expr (avar);
11191 gimplify_assign (x, avar, &ilist);
11192 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11193 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11194 && !TYPE_READONLY (TREE_TYPE (var)))
11195 {
11196 x = unshare_expr (x);
11197 x = build_simple_mem_ref (x);
11198 gimplify_assign (var, x, &olist);
11199 }
11200 }
11201 else
11202 {
11203 var = build_fold_addr_expr (var);
11204 gimplify_assign (x, var, &ilist);
11205 }
11206 }
11207 s = NULL_TREE;
11208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11209 {
11210 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11211 s = TREE_TYPE (ovar);
11212 if (TREE_CODE (s) == REFERENCE_TYPE)
11213 s = TREE_TYPE (s);
11214 s = TYPE_SIZE_UNIT (s);
11215 }
11216 else
11217 s = OMP_CLAUSE_SIZE (c);
11218 if (s == NULL_TREE)
11219 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11220 s = fold_convert (size_type_node, s);
11221 purpose = size_int (map_idx++);
11222 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11223 if (TREE_CODE (s) != INTEGER_CST)
11224 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11225
11226 unsigned HOST_WIDE_INT tkind, tkind_zero;
11227 switch (OMP_CLAUSE_CODE (c))
11228 {
11229 case OMP_CLAUSE_MAP:
11230 tkind = OMP_CLAUSE_MAP_KIND (c);
11231 tkind_zero = tkind;
11232 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11233 switch (tkind)
11234 {
11235 case GOMP_MAP_ALLOC:
11236 case GOMP_MAP_TO:
11237 case GOMP_MAP_FROM:
11238 case GOMP_MAP_TOFROM:
11239 case GOMP_MAP_ALWAYS_TO:
11240 case GOMP_MAP_ALWAYS_FROM:
11241 case GOMP_MAP_ALWAYS_TOFROM:
11242 case GOMP_MAP_RELEASE:
11243 case GOMP_MAP_FORCE_TO:
11244 case GOMP_MAP_FORCE_FROM:
11245 case GOMP_MAP_FORCE_TOFROM:
11246 case GOMP_MAP_FORCE_PRESENT:
11247 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11248 break;
11249 case GOMP_MAP_DELETE:
11250 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11251 default:
11252 break;
11253 }
11254 if (tkind_zero != tkind)
11255 {
11256 if (integer_zerop (s))
11257 tkind = tkind_zero;
11258 else if (integer_nonzerop (s))
11259 tkind_zero = tkind;
11260 }
11261 break;
11262 case OMP_CLAUSE_FIRSTPRIVATE:
11263 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11264 tkind = GOMP_MAP_TO;
11265 tkind_zero = tkind;
11266 break;
11267 case OMP_CLAUSE_TO:
11268 tkind = GOMP_MAP_TO;
11269 tkind_zero = tkind;
11270 break;
11271 case OMP_CLAUSE_FROM:
11272 tkind = GOMP_MAP_FROM;
11273 tkind_zero = tkind;
11274 break;
11275 default:
11276 gcc_unreachable ();
11277 }
11278 gcc_checking_assert (tkind
11279 < (HOST_WIDE_INT_C (1U) << talign_shift));
11280 gcc_checking_assert (tkind_zero
11281 < (HOST_WIDE_INT_C (1U) << talign_shift));
11282 talign = ceil_log2 (talign);
11283 tkind |= talign << talign_shift;
11284 tkind_zero |= talign << talign_shift;
11285 gcc_checking_assert (tkind
11286 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11287 gcc_checking_assert (tkind_zero
11288 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11289 if (tkind == tkind_zero)
11290 x = build_int_cstu (tkind_type, tkind);
11291 else
11292 {
11293 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11294 x = build3 (COND_EXPR, tkind_type,
11295 fold_build2 (EQ_EXPR, boolean_type_node,
11296 unshare_expr (s), size_zero_node),
11297 build_int_cstu (tkind_type, tkind_zero),
11298 build_int_cstu (tkind_type, tkind));
11299 }
11300 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11301 if (nc && nc != c)
11302 c = nc;
11303 break;
11304
11305 case OMP_CLAUSE_FIRSTPRIVATE:
11306 if (is_oacc_parallel (ctx))
11307 goto oacc_firstprivate_map;
11308 ovar = OMP_CLAUSE_DECL (c);
11309 if (omp_is_reference (ovar))
11310 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11311 else
11312 talign = DECL_ALIGN_UNIT (ovar);
11313 var = lookup_decl_in_outer_ctx (ovar, ctx);
11314 x = build_sender_ref (ovar, ctx);
11315 tkind = GOMP_MAP_FIRSTPRIVATE;
11316 type = TREE_TYPE (ovar);
11317 if (omp_is_reference (ovar))
11318 type = TREE_TYPE (type);
11319 if ((INTEGRAL_TYPE_P (type)
11320 && TYPE_PRECISION (type) <= POINTER_SIZE)
11321 || TREE_CODE (type) == POINTER_TYPE)
11322 {
11323 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11324 tree t = var;
11325 if (omp_is_reference (var))
11326 t = build_simple_mem_ref (var);
11327 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11328 TREE_NO_WARNING (var) = 1;
11329 if (TREE_CODE (type) != POINTER_TYPE)
11330 t = fold_convert (pointer_sized_int_node, t);
11331 t = fold_convert (TREE_TYPE (x), t);
11332 gimplify_assign (x, t, &ilist);
11333 }
11334 else if (omp_is_reference (var))
11335 gimplify_assign (x, var, &ilist);
11336 else if (is_gimple_reg (var))
11337 {
11338 tree avar = create_tmp_var (TREE_TYPE (var));
11339 mark_addressable (avar);
11340 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11341 TREE_NO_WARNING (var) = 1;
11342 gimplify_assign (avar, var, &ilist);
11343 avar = build_fold_addr_expr (avar);
11344 gimplify_assign (x, avar, &ilist);
11345 }
11346 else
11347 {
11348 var = build_fold_addr_expr (var);
11349 gimplify_assign (x, var, &ilist);
11350 }
11351 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11352 s = size_int (0);
11353 else if (omp_is_reference (ovar))
11354 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11355 else
11356 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11357 s = fold_convert (size_type_node, s);
11358 purpose = size_int (map_idx++);
11359 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11360 if (TREE_CODE (s) != INTEGER_CST)
11361 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11362
11363 gcc_checking_assert (tkind
11364 < (HOST_WIDE_INT_C (1U) << talign_shift));
11365 talign = ceil_log2 (talign);
11366 tkind |= talign << talign_shift;
11367 gcc_checking_assert (tkind
11368 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11369 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11370 build_int_cstu (tkind_type, tkind));
11371 break;
11372
11373 case OMP_CLAUSE_USE_DEVICE_PTR:
11374 case OMP_CLAUSE_IS_DEVICE_PTR:
11375 ovar = OMP_CLAUSE_DECL (c);
11376 var = lookup_decl_in_outer_ctx (ovar, ctx);
11377 x = build_sender_ref (ovar, ctx);
11378 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11379 tkind = GOMP_MAP_USE_DEVICE_PTR;
11380 else
11381 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11382 type = TREE_TYPE (ovar);
11383 if (TREE_CODE (type) == ARRAY_TYPE)
11384 var = build_fold_addr_expr (var);
11385 else
11386 {
11387 if (omp_is_reference (ovar))
11388 {
11389 type = TREE_TYPE (type);
11390 if (TREE_CODE (type) != ARRAY_TYPE)
11391 var = build_simple_mem_ref (var);
11392 var = fold_convert (TREE_TYPE (x), var);
11393 }
11394 }
11395 gimplify_assign (x, var, &ilist);
11396 s = size_int (0);
11397 purpose = size_int (map_idx++);
11398 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11399 gcc_checking_assert (tkind
11400 < (HOST_WIDE_INT_C (1U) << talign_shift));
11401 gcc_checking_assert (tkind
11402 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11403 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11404 build_int_cstu (tkind_type, tkind));
11405 break;
11406 }
11407
11408 gcc_assert (map_idx == map_cnt);
11409
11410 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11411 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11412 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11413 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11414 for (int i = 1; i <= 2; i++)
11415 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11416 {
11417 gimple_seq initlist = NULL;
11418 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11419 TREE_VEC_ELT (t, i)),
11420 &initlist, true, NULL_TREE);
11421 gimple_seq_add_seq (&ilist, initlist);
11422
11423 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
11424 NULL);
11425 TREE_THIS_VOLATILE (clobber) = 1;
11426 gimple_seq_add_stmt (&olist,
11427 gimple_build_assign (TREE_VEC_ELT (t, i),
11428 clobber));
11429 }
11430
11431 tree clobber = build_constructor (ctx->record_type, NULL);
11432 TREE_THIS_VOLATILE (clobber) = 1;
11433 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11434 clobber));
11435 }
11436
11437 /* Once all the expansions are done, sequence all the different
11438 fragments inside gimple_omp_body. */
11439
11440 new_body = NULL;
11441
11442 if (offloaded
11443 && ctx->record_type)
11444 {
11445 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11446 /* fixup_child_record_type might have changed receiver_decl's type. */
11447 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11448 gimple_seq_add_stmt (&new_body,
11449 gimple_build_assign (ctx->receiver_decl, t));
11450 }
11451 gimple_seq_add_seq (&new_body, fplist);
11452
11453 if (offloaded || data_region)
11454 {
11455 tree prev = NULL_TREE;
11456 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11457 switch (OMP_CLAUSE_CODE (c))
11458 {
11459 tree var, x;
11460 default:
11461 break;
11462 case OMP_CLAUSE_FIRSTPRIVATE:
11463 if (is_gimple_omp_oacc (ctx->stmt))
11464 break;
11465 var = OMP_CLAUSE_DECL (c);
11466 if (omp_is_reference (var)
11467 || is_gimple_reg_type (TREE_TYPE (var)))
11468 {
11469 tree new_var = lookup_decl (var, ctx);
11470 tree type;
11471 type = TREE_TYPE (var);
11472 if (omp_is_reference (var))
11473 type = TREE_TYPE (type);
11474 if ((INTEGRAL_TYPE_P (type)
11475 && TYPE_PRECISION (type) <= POINTER_SIZE)
11476 || TREE_CODE (type) == POINTER_TYPE)
11477 {
11478 x = build_receiver_ref (var, false, ctx);
11479 if (TREE_CODE (type) != POINTER_TYPE)
11480 x = fold_convert (pointer_sized_int_node, x);
11481 x = fold_convert (type, x);
11482 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11483 fb_rvalue);
11484 if (omp_is_reference (var))
11485 {
11486 tree v = create_tmp_var_raw (type, get_name (var));
11487 gimple_add_tmp_var (v);
11488 TREE_ADDRESSABLE (v) = 1;
11489 gimple_seq_add_stmt (&new_body,
11490 gimple_build_assign (v, x));
11491 x = build_fold_addr_expr (v);
11492 }
11493 gimple_seq_add_stmt (&new_body,
11494 gimple_build_assign (new_var, x));
11495 }
11496 else
11497 {
11498 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
11499 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11500 fb_rvalue);
11501 gimple_seq_add_stmt (&new_body,
11502 gimple_build_assign (new_var, x));
11503 }
11504 }
11505 else if (is_variable_sized (var))
11506 {
11507 tree pvar = DECL_VALUE_EXPR (var);
11508 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11509 pvar = TREE_OPERAND (pvar, 0);
11510 gcc_assert (DECL_P (pvar));
11511 tree new_var = lookup_decl (pvar, ctx);
11512 x = build_receiver_ref (var, false, ctx);
11513 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11514 gimple_seq_add_stmt (&new_body,
11515 gimple_build_assign (new_var, x));
11516 }
11517 break;
11518 case OMP_CLAUSE_PRIVATE:
11519 if (is_gimple_omp_oacc (ctx->stmt))
11520 break;
11521 var = OMP_CLAUSE_DECL (c);
11522 if (omp_is_reference (var))
11523 {
11524 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11525 tree new_var = lookup_decl (var, ctx);
11526 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11527 if (TREE_CONSTANT (x))
11528 {
11529 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
11530 get_name (var));
11531 gimple_add_tmp_var (x);
11532 TREE_ADDRESSABLE (x) = 1;
11533 x = build_fold_addr_expr_loc (clause_loc, x);
11534 }
11535 else
11536 break;
11537
11538 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11539 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11540 gimple_seq_add_stmt (&new_body,
11541 gimple_build_assign (new_var, x));
11542 }
11543 break;
11544 case OMP_CLAUSE_USE_DEVICE_PTR:
11545 case OMP_CLAUSE_IS_DEVICE_PTR:
11546 var = OMP_CLAUSE_DECL (c);
11547 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11548 x = build_sender_ref (var, ctx);
11549 else
11550 x = build_receiver_ref (var, false, ctx);
11551 if (is_variable_sized (var))
11552 {
11553 tree pvar = DECL_VALUE_EXPR (var);
11554 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11555 pvar = TREE_OPERAND (pvar, 0);
11556 gcc_assert (DECL_P (pvar));
11557 tree new_var = lookup_decl (pvar, ctx);
11558 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11559 gimple_seq_add_stmt (&new_body,
11560 gimple_build_assign (new_var, x));
11561 }
11562 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11563 {
11564 tree new_var = lookup_decl (var, ctx);
11565 new_var = DECL_VALUE_EXPR (new_var);
11566 gcc_assert (TREE_CODE (new_var) == MEM_REF);
11567 new_var = TREE_OPERAND (new_var, 0);
11568 gcc_assert (DECL_P (new_var));
11569 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11570 gimple_seq_add_stmt (&new_body,
11571 gimple_build_assign (new_var, x));
11572 }
11573 else
11574 {
11575 tree type = TREE_TYPE (var);
11576 tree new_var = lookup_decl (var, ctx);
11577 if (omp_is_reference (var))
11578 {
11579 type = TREE_TYPE (type);
11580 if (TREE_CODE (type) != ARRAY_TYPE)
11581 {
11582 tree v = create_tmp_var_raw (type, get_name (var));
11583 gimple_add_tmp_var (v);
11584 TREE_ADDRESSABLE (v) = 1;
11585 x = fold_convert (type, x);
11586 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11587 fb_rvalue);
11588 gimple_seq_add_stmt (&new_body,
11589 gimple_build_assign (v, x));
11590 x = build_fold_addr_expr (v);
11591 }
11592 }
11593 new_var = DECL_VALUE_EXPR (new_var);
11594 x = fold_convert (TREE_TYPE (new_var), x);
11595 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11596 gimple_seq_add_stmt (&new_body,
11597 gimple_build_assign (new_var, x));
11598 }
11599 break;
11600 }
11601 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
11602 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
11603 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
11604 or references to VLAs. */
11605 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11606 switch (OMP_CLAUSE_CODE (c))
11607 {
11608 tree var;
11609 default:
11610 break;
11611 case OMP_CLAUSE_MAP:
11612 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11613 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11614 {
11615 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11616 poly_int64 offset = 0;
11617 gcc_assert (prev);
11618 var = OMP_CLAUSE_DECL (c);
11619 if (DECL_P (var)
11620 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
11621 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
11622 ctx))
11623 && varpool_node::get_create (var)->offloadable)
11624 break;
11625 if (TREE_CODE (var) == INDIRECT_REF
11626 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
11627 var = TREE_OPERAND (var, 0);
11628 if (TREE_CODE (var) == COMPONENT_REF)
11629 {
11630 var = get_addr_base_and_unit_offset (var, &offset);
11631 gcc_assert (var != NULL_TREE && DECL_P (var));
11632 }
11633 else if (DECL_SIZE (var)
11634 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11635 {
11636 tree var2 = DECL_VALUE_EXPR (var);
11637 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11638 var2 = TREE_OPERAND (var2, 0);
11639 gcc_assert (DECL_P (var2));
11640 var = var2;
11641 }
11642 tree new_var = lookup_decl (var, ctx), x;
11643 tree type = TREE_TYPE (new_var);
11644 bool is_ref;
11645 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
11646 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11647 == COMPONENT_REF))
11648 {
11649 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
11650 is_ref = true;
11651 new_var = build2 (MEM_REF, type,
11652 build_fold_addr_expr (new_var),
11653 build_int_cst (build_pointer_type (type),
11654 offset));
11655 }
11656 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
11657 {
11658 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
11659 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
11660 new_var = build2 (MEM_REF, type,
11661 build_fold_addr_expr (new_var),
11662 build_int_cst (build_pointer_type (type),
11663 offset));
11664 }
11665 else
11666 is_ref = omp_is_reference (var);
11667 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11668 is_ref = false;
11669 bool ref_to_array = false;
11670 if (is_ref)
11671 {
11672 type = TREE_TYPE (type);
11673 if (TREE_CODE (type) == ARRAY_TYPE)
11674 {
11675 type = build_pointer_type (type);
11676 ref_to_array = true;
11677 }
11678 }
11679 else if (TREE_CODE (type) == ARRAY_TYPE)
11680 {
11681 tree decl2 = DECL_VALUE_EXPR (new_var);
11682 gcc_assert (TREE_CODE (decl2) == MEM_REF);
11683 decl2 = TREE_OPERAND (decl2, 0);
11684 gcc_assert (DECL_P (decl2));
11685 new_var = decl2;
11686 type = TREE_TYPE (new_var);
11687 }
11688 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
11689 x = fold_convert_loc (clause_loc, type, x);
11690 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
11691 {
11692 tree bias = OMP_CLAUSE_SIZE (c);
11693 if (DECL_P (bias))
11694 bias = lookup_decl (bias, ctx);
11695 bias = fold_convert_loc (clause_loc, sizetype, bias);
11696 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
11697 bias);
11698 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
11699 TREE_TYPE (x), x, bias);
11700 }
11701 if (ref_to_array)
11702 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11703 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11704 if (is_ref && !ref_to_array)
11705 {
11706 tree t = create_tmp_var_raw (type, get_name (var));
11707 gimple_add_tmp_var (t);
11708 TREE_ADDRESSABLE (t) = 1;
11709 gimple_seq_add_stmt (&new_body,
11710 gimple_build_assign (t, x));
11711 x = build_fold_addr_expr_loc (clause_loc, t);
11712 }
11713 gimple_seq_add_stmt (&new_body,
11714 gimple_build_assign (new_var, x));
11715 prev = NULL_TREE;
11716 }
11717 else if (OMP_CLAUSE_CHAIN (c)
11718 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
11719 == OMP_CLAUSE_MAP
11720 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11721 == GOMP_MAP_FIRSTPRIVATE_POINTER
11722 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11723 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11724 prev = c;
11725 break;
11726 case OMP_CLAUSE_PRIVATE:
11727 var = OMP_CLAUSE_DECL (c);
11728 if (is_variable_sized (var))
11729 {
11730 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11731 tree new_var = lookup_decl (var, ctx);
11732 tree pvar = DECL_VALUE_EXPR (var);
11733 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11734 pvar = TREE_OPERAND (pvar, 0);
11735 gcc_assert (DECL_P (pvar));
11736 tree new_pvar = lookup_decl (pvar, ctx);
11737 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11738 tree al = size_int (DECL_ALIGN (var));
11739 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
11740 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11741 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
11742 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11743 gimple_seq_add_stmt (&new_body,
11744 gimple_build_assign (new_pvar, x));
11745 }
11746 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
11747 {
11748 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11749 tree new_var = lookup_decl (var, ctx);
11750 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11751 if (TREE_CONSTANT (x))
11752 break;
11753 else
11754 {
11755 tree atmp
11756 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11757 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
11758 tree al = size_int (TYPE_ALIGN (rtype));
11759 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11760 }
11761
11762 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11763 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11764 gimple_seq_add_stmt (&new_body,
11765 gimple_build_assign (new_var, x));
11766 }
11767 break;
11768 }
11769
11770 gimple_seq fork_seq = NULL;
11771 gimple_seq join_seq = NULL;
11772
11773 if (is_oacc_parallel (ctx))
11774 {
11775 /* If there are reductions on the offloaded region itself, treat
11776 them as a dummy GANG loop. */
11777 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
11778
11779 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
11780 false, NULL, NULL, &fork_seq, &join_seq, ctx);
11781 }
11782
11783 gimple_seq_add_seq (&new_body, fork_seq);
11784 gimple_seq_add_seq (&new_body, tgt_body);
11785 gimple_seq_add_seq (&new_body, join_seq);
11786
11787 if (offloaded)
11788 new_body = maybe_catch_exception (new_body);
11789
11790 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11791 gimple_omp_set_body (stmt, new_body);
11792 }
11793
11794 bind = gimple_build_bind (NULL, NULL,
11795 tgt_bind ? gimple_bind_block (tgt_bind)
11796 : NULL_TREE);
11797 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11798 gimple_bind_add_seq (bind, ilist);
11799 gimple_bind_add_stmt (bind, stmt);
11800 gimple_bind_add_seq (bind, olist);
11801
11802 pop_gimplify_context (NULL);
11803
11804 if (dep_bind)
11805 {
11806 gimple_bind_add_seq (dep_bind, dep_ilist);
11807 gimple_bind_add_stmt (dep_bind, bind);
11808 gimple_bind_add_seq (dep_bind, dep_olist);
11809 pop_gimplify_context (dep_bind);
11810 }
11811 }
11812
11813 /* Expand code for an OpenMP teams directive. */
11814
11815 static void
11816 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11817 {
11818 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
11819 push_gimplify_context ();
11820
11821 tree block = make_node (BLOCK);
11822 gbind *bind = gimple_build_bind (NULL, NULL, block);
11823 gsi_replace (gsi_p, bind, true);
11824 gimple_seq bind_body = NULL;
11825 gimple_seq dlist = NULL;
11826 gimple_seq olist = NULL;
11827
11828 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11829 OMP_CLAUSE_NUM_TEAMS);
11830 if (num_teams == NULL_TREE)
11831 num_teams = build_int_cst (unsigned_type_node, 0);
11832 else
11833 {
11834 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
11835 num_teams = fold_convert (unsigned_type_node, num_teams);
11836 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
11837 }
11838 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11839 OMP_CLAUSE_THREAD_LIMIT);
11840 if (thread_limit == NULL_TREE)
11841 thread_limit = build_int_cst (unsigned_type_node, 0);
11842 else
11843 {
11844 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
11845 thread_limit = fold_convert (unsigned_type_node, thread_limit);
11846 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
11847 fb_rvalue);
11848 }
11849
11850 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
11851 &bind_body, &dlist, ctx, NULL);
11852 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
11853 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
11854 NULL, ctx);
11855 if (!gimple_omp_teams_grid_phony (teams_stmt))
11856 {
11857 gimple_seq_add_stmt (&bind_body, teams_stmt);
11858 location_t loc = gimple_location (teams_stmt);
11859 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
11860 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
11861 gimple_set_location (call, loc);
11862 gimple_seq_add_stmt (&bind_body, call);
11863 }
11864
11865 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
11866 gimple_omp_set_body (teams_stmt, NULL);
11867 gimple_seq_add_seq (&bind_body, olist);
11868 gimple_seq_add_seq (&bind_body, dlist);
11869 if (!gimple_omp_teams_grid_phony (teams_stmt))
11870 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
11871 gimple_bind_set_body (bind, bind_body);
11872
11873 pop_gimplify_context (bind);
11874
11875 gimple_bind_append_vars (bind, ctx->block_vars);
11876 BLOCK_VARS (block) = ctx->block_vars;
11877 if (BLOCK_VARS (block))
11878 TREE_USED (block) = 1;
11879 }
11880
11881 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
11882
11883 static void
11884 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11885 {
11886 gimple *stmt = gsi_stmt (*gsi_p);
11887 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11888 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
11889 gimple_build_omp_return (false));
11890 }
11891
11892
11893 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
11894 regimplified. If DATA is non-NULL, lower_omp_1 is outside
11895 of OMP context, but with task_shared_vars set. */
11896
11897 static tree
11898 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
11899 void *data)
11900 {
11901 tree t = *tp;
11902
11903 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
11904 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
11905 return t;
11906
11907 if (task_shared_vars
11908 && DECL_P (t)
11909 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
11910 return t;
11911
11912 /* If a global variable has been privatized, TREE_CONSTANT on
11913 ADDR_EXPR might be wrong. */
11914 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
11915 recompute_tree_invariant_for_addr_expr (t);
11916
11917 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
11918 return NULL_TREE;
11919 }
11920
11921 /* Data to be communicated between lower_omp_regimplify_operands and
11922 lower_omp_regimplify_operands_p. */
11923
11924 struct lower_omp_regimplify_operands_data
11925 {
11926 omp_context *ctx;
11927 vec<tree> *decls;
11928 };
11929
11930 /* Helper function for lower_omp_regimplify_operands. Find
11931 omp_member_access_dummy_var vars and adjust temporarily their
11932 DECL_VALUE_EXPRs if needed. */
11933
11934 static tree
11935 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
11936 void *data)
11937 {
11938 tree t = omp_member_access_dummy_var (*tp);
11939 if (t)
11940 {
11941 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
11942 lower_omp_regimplify_operands_data *ldata
11943 = (lower_omp_regimplify_operands_data *) wi->info;
11944 tree o = maybe_lookup_decl (t, ldata->ctx);
11945 if (o != t)
11946 {
11947 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
11948 ldata->decls->safe_push (*tp);
11949 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
11950 SET_DECL_VALUE_EXPR (*tp, v);
11951 }
11952 }
11953 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
11954 return NULL_TREE;
11955 }
11956
11957 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
11958 of omp_member_access_dummy_var vars during regimplification. */
11959
11960 static void
11961 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
11962 gimple_stmt_iterator *gsi_p)
11963 {
11964 auto_vec<tree, 10> decls;
11965 if (ctx)
11966 {
11967 struct walk_stmt_info wi;
11968 memset (&wi, '\0', sizeof (wi));
11969 struct lower_omp_regimplify_operands_data data;
11970 data.ctx = ctx;
11971 data.decls = &decls;
11972 wi.info = &data;
11973 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
11974 }
11975 gimple_regimplify_operands (stmt, gsi_p);
11976 while (!decls.is_empty ())
11977 {
11978 tree t = decls.pop ();
11979 tree v = decls.pop ();
11980 SET_DECL_VALUE_EXPR (t, v);
11981 }
11982 }
11983
11984 static void
11985 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11986 {
11987 gimple *stmt = gsi_stmt (*gsi_p);
11988 struct walk_stmt_info wi;
11989 gcall *call_stmt;
11990
11991 if (gimple_has_location (stmt))
11992 input_location = gimple_location (stmt);
11993
11994 if (task_shared_vars)
11995 memset (&wi, '\0', sizeof (wi));
11996
11997 /* If we have issued syntax errors, avoid doing any heavy lifting.
11998 Just replace the OMP directives with a NOP to avoid
11999 confusing RTL expansion. */
12000 if (seen_error () && is_gimple_omp (stmt))
12001 {
12002 gsi_replace (gsi_p, gimple_build_nop (), true);
12003 return;
12004 }
12005
12006 switch (gimple_code (stmt))
12007 {
12008 case GIMPLE_COND:
12009 {
12010 gcond *cond_stmt = as_a <gcond *> (stmt);
12011 if ((ctx || task_shared_vars)
12012 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12013 lower_omp_regimplify_p,
12014 ctx ? NULL : &wi, NULL)
12015 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12016 lower_omp_regimplify_p,
12017 ctx ? NULL : &wi, NULL)))
12018 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12019 }
12020 break;
12021 case GIMPLE_CATCH:
12022 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12023 break;
12024 case GIMPLE_EH_FILTER:
12025 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12026 break;
12027 case GIMPLE_TRY:
12028 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12029 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12030 break;
12031 case GIMPLE_TRANSACTION:
12032 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12033 ctx);
12034 break;
12035 case GIMPLE_BIND:
12036 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12037 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12038 break;
12039 case GIMPLE_OMP_PARALLEL:
12040 case GIMPLE_OMP_TASK:
12041 ctx = maybe_lookup_ctx (stmt);
12042 gcc_assert (ctx);
12043 if (ctx->cancellable)
12044 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12045 lower_omp_taskreg (gsi_p, ctx);
12046 break;
12047 case GIMPLE_OMP_FOR:
12048 ctx = maybe_lookup_ctx (stmt);
12049 gcc_assert (ctx);
12050 if (ctx->cancellable)
12051 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12052 lower_omp_for (gsi_p, ctx);
12053 break;
12054 case GIMPLE_OMP_SECTIONS:
12055 ctx = maybe_lookup_ctx (stmt);
12056 gcc_assert (ctx);
12057 if (ctx->cancellable)
12058 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12059 lower_omp_sections (gsi_p, ctx);
12060 break;
12061 case GIMPLE_OMP_SINGLE:
12062 ctx = maybe_lookup_ctx (stmt);
12063 gcc_assert (ctx);
12064 lower_omp_single (gsi_p, ctx);
12065 break;
12066 case GIMPLE_OMP_MASTER:
12067 ctx = maybe_lookup_ctx (stmt);
12068 gcc_assert (ctx);
12069 lower_omp_master (gsi_p, ctx);
12070 break;
12071 case GIMPLE_OMP_TASKGROUP:
12072 ctx = maybe_lookup_ctx (stmt);
12073 gcc_assert (ctx);
12074 lower_omp_taskgroup (gsi_p, ctx);
12075 break;
12076 case GIMPLE_OMP_ORDERED:
12077 ctx = maybe_lookup_ctx (stmt);
12078 gcc_assert (ctx);
12079 lower_omp_ordered (gsi_p, ctx);
12080 break;
12081 case GIMPLE_OMP_SCAN:
12082 ctx = maybe_lookup_ctx (stmt);
12083 gcc_assert (ctx);
12084 lower_omp_scan (gsi_p, ctx);
12085 break;
12086 case GIMPLE_OMP_CRITICAL:
12087 ctx = maybe_lookup_ctx (stmt);
12088 gcc_assert (ctx);
12089 lower_omp_critical (gsi_p, ctx);
12090 break;
12091 case GIMPLE_OMP_ATOMIC_LOAD:
12092 if ((ctx || task_shared_vars)
12093 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12094 as_a <gomp_atomic_load *> (stmt)),
12095 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12096 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12097 break;
12098 case GIMPLE_OMP_TARGET:
12099 ctx = maybe_lookup_ctx (stmt);
12100 gcc_assert (ctx);
12101 lower_omp_target (gsi_p, ctx);
12102 break;
12103 case GIMPLE_OMP_TEAMS:
12104 ctx = maybe_lookup_ctx (stmt);
12105 gcc_assert (ctx);
12106 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12107 lower_omp_taskreg (gsi_p, ctx);
12108 else
12109 lower_omp_teams (gsi_p, ctx);
12110 break;
12111 case GIMPLE_OMP_GRID_BODY:
12112 ctx = maybe_lookup_ctx (stmt);
12113 gcc_assert (ctx);
12114 lower_omp_grid_body (gsi_p, ctx);
12115 break;
12116 case GIMPLE_CALL:
12117 tree fndecl;
12118 call_stmt = as_a <gcall *> (stmt);
12119 fndecl = gimple_call_fndecl (call_stmt);
12120 if (fndecl
12121 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12122 switch (DECL_FUNCTION_CODE (fndecl))
12123 {
12124 case BUILT_IN_GOMP_BARRIER:
12125 if (ctx == NULL)
12126 break;
12127 /* FALLTHRU */
12128 case BUILT_IN_GOMP_CANCEL:
12129 case BUILT_IN_GOMP_CANCELLATION_POINT:
12130 omp_context *cctx;
12131 cctx = ctx;
12132 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12133 cctx = cctx->outer;
12134 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12135 if (!cctx->cancellable)
12136 {
12137 if (DECL_FUNCTION_CODE (fndecl)
12138 == BUILT_IN_GOMP_CANCELLATION_POINT)
12139 {
12140 stmt = gimple_build_nop ();
12141 gsi_replace (gsi_p, stmt, false);
12142 }
12143 break;
12144 }
12145 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12146 {
12147 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12148 gimple_call_set_fndecl (call_stmt, fndecl);
12149 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12150 }
12151 tree lhs;
12152 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12153 gimple_call_set_lhs (call_stmt, lhs);
12154 tree fallthru_label;
12155 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12156 gimple *g;
12157 g = gimple_build_label (fallthru_label);
12158 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12159 g = gimple_build_cond (NE_EXPR, lhs,
12160 fold_convert (TREE_TYPE (lhs),
12161 boolean_false_node),
12162 cctx->cancel_label, fallthru_label);
12163 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12164 break;
12165 default:
12166 break;
12167 }
12168 goto regimplify;
12169
12170 case GIMPLE_ASSIGN:
12171 for (omp_context *up = ctx; up; up = up->outer)
12172 {
12173 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12174 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12175 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12176 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12177 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12178 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12179 && (gimple_omp_target_kind (up->stmt)
12180 == GF_OMP_TARGET_KIND_DATA)))
12181 continue;
12182 else if (!up->lastprivate_conditional_map)
12183 break;
12184 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12185 if (TREE_CODE (lhs) == MEM_REF
12186 && DECL_P (TREE_OPERAND (lhs, 0))
12187 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12188 0))) == REFERENCE_TYPE)
12189 lhs = TREE_OPERAND (lhs, 0);
12190 if (DECL_P (lhs))
12191 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12192 {
12193 tree clauses;
12194 if (up->combined_into_simd_safelen0)
12195 up = up->outer;
12196 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12197 clauses = gimple_omp_for_clauses (up->stmt);
12198 else
12199 clauses = gimple_omp_sections_clauses (up->stmt);
12200 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12201 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12202 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12203 OMP_CLAUSE__CONDTEMP_);
12204 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12205 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12206 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12207 }
12208 }
12209 /* FALLTHRU */
12210
12211 default:
12212 regimplify:
12213 if ((ctx || task_shared_vars)
12214 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12215 ctx ? NULL : &wi))
12216 {
12217 /* Just remove clobbers, this should happen only if we have
12218 "privatized" local addressable variables in SIMD regions,
12219 the clobber isn't needed in that case and gimplifying address
12220 of the ARRAY_REF into a pointer and creating MEM_REF based
12221 clobber would create worse code than we get with the clobber
12222 dropped. */
12223 if (gimple_clobber_p (stmt))
12224 {
12225 gsi_replace (gsi_p, gimple_build_nop (), true);
12226 break;
12227 }
12228 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12229 }
12230 break;
12231 }
12232 }
12233
12234 static void
12235 lower_omp (gimple_seq *body, omp_context *ctx)
12236 {
12237 location_t saved_location = input_location;
12238 gimple_stmt_iterator gsi;
12239 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12240 lower_omp_1 (&gsi, ctx);
12241 /* During gimplification, we haven't folded statments inside offloading
12242 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12243 if (target_nesting_level || taskreg_nesting_level)
12244 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12245 fold_stmt (&gsi);
12246 input_location = saved_location;
12247 }
12248
12249 /* Main entry point. */
12250
12251 static unsigned int
12252 execute_lower_omp (void)
12253 {
12254 gimple_seq body;
12255 int i;
12256 omp_context *ctx;
12257
12258 /* This pass always runs, to provide PROP_gimple_lomp.
12259 But often, there is nothing to do. */
12260 if (flag_openacc == 0 && flag_openmp == 0
12261 && flag_openmp_simd == 0)
12262 return 0;
12263
12264 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12265 delete_omp_context);
12266
12267 body = gimple_body (current_function_decl);
12268
12269 if (hsa_gen_requested_p ())
12270 omp_grid_gridify_all_targets (&body);
12271
12272 scan_omp (&body, NULL);
12273 gcc_assert (taskreg_nesting_level == 0);
12274 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12275 finish_taskreg_scan (ctx);
12276 taskreg_contexts.release ();
12277
12278 if (all_contexts->root)
12279 {
12280 if (task_shared_vars)
12281 push_gimplify_context ();
12282 lower_omp (&body, NULL);
12283 if (task_shared_vars)
12284 pop_gimplify_context (NULL);
12285 }
12286
12287 if (all_contexts)
12288 {
12289 splay_tree_delete (all_contexts);
12290 all_contexts = NULL;
12291 }
12292 BITMAP_FREE (task_shared_vars);
12293
12294 /* If current function is a method, remove artificial dummy VAR_DECL created
12295 for non-static data member privatization, they aren't needed for
12296 debuginfo nor anything else, have been already replaced everywhere in the
12297 IL and cause problems with LTO. */
12298 if (DECL_ARGUMENTS (current_function_decl)
12299 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12300 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12301 == POINTER_TYPE))
12302 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12303 return 0;
12304 }
12305
12306 namespace {
12307
12308 const pass_data pass_data_lower_omp =
12309 {
12310 GIMPLE_PASS, /* type */
12311 "omplower", /* name */
12312 OPTGROUP_OMP, /* optinfo_flags */
12313 TV_NONE, /* tv_id */
12314 PROP_gimple_any, /* properties_required */
12315 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12316 0, /* properties_destroyed */
12317 0, /* todo_flags_start */
12318 0, /* todo_flags_finish */
12319 };
12320
12321 class pass_lower_omp : public gimple_opt_pass
12322 {
12323 public:
12324 pass_lower_omp (gcc::context *ctxt)
12325 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12326 {}
12327
12328 /* opt_pass methods: */
12329 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12330
12331 }; // class pass_lower_omp
12332
12333 } // anon namespace
12334
12335 gimple_opt_pass *
12336 make_pass_lower_omp (gcc::context *ctxt)
12337 {
12338 return new pass_lower_omp (ctxt);
12339 }
12340 \f
12341 /* The following is a utility to diagnose structured block violations.
12342 It is not part of the "omplower" pass, as that's invoked too late. It
12343 should be invoked by the respective front ends after gimplification. */
12344
12345 static splay_tree all_labels;
12346
12347 /* Check for mismatched contexts and generate an error if needed. Return
12348 true if an error is detected. */
12349
12350 static bool
12351 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12352 gimple *branch_ctx, gimple *label_ctx)
12353 {
12354 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12355 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12356
12357 if (label_ctx == branch_ctx)
12358 return false;
12359
12360 const char* kind = NULL;
12361
12362 if (flag_openacc)
12363 {
12364 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12365 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12366 {
12367 gcc_checking_assert (kind == NULL);
12368 kind = "OpenACC";
12369 }
12370 }
12371 if (kind == NULL)
12372 {
12373 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12374 kind = "OpenMP";
12375 }
12376
12377 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12378 so we could traverse it and issue a correct "exit" or "enter" error
12379 message upon a structured block violation.
12380
12381 We built the context by building a list with tree_cons'ing, but there is
12382 no easy counterpart in gimple tuples. It seems like far too much work
12383 for issuing exit/enter error messages. If someone really misses the
12384 distinct error message... patches welcome. */
12385
12386 #if 0
12387 /* Try to avoid confusing the user by producing and error message
12388 with correct "exit" or "enter" verbiage. We prefer "exit"
12389 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12390 if (branch_ctx == NULL)
12391 exit_p = false;
12392 else
12393 {
12394 while (label_ctx)
12395 {
12396 if (TREE_VALUE (label_ctx) == branch_ctx)
12397 {
12398 exit_p = false;
12399 break;
12400 }
12401 label_ctx = TREE_CHAIN (label_ctx);
12402 }
12403 }
12404
12405 if (exit_p)
12406 error ("invalid exit from %s structured block", kind);
12407 else
12408 error ("invalid entry to %s structured block", kind);
12409 #endif
12410
12411 /* If it's obvious we have an invalid entry, be specific about the error. */
12412 if (branch_ctx == NULL)
12413 error ("invalid entry to %s structured block", kind);
12414 else
12415 {
12416 /* Otherwise, be vague and lazy, but efficient. */
12417 error ("invalid branch to/from %s structured block", kind);
12418 }
12419
12420 gsi_replace (gsi_p, gimple_build_nop (), false);
12421 return true;
12422 }
12423
12424 /* Pass 1: Create a minimal tree of structured blocks, and record
12425 where each label is found. */
12426
12427 static tree
12428 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12429 struct walk_stmt_info *wi)
12430 {
12431 gimple *context = (gimple *) wi->info;
12432 gimple *inner_context;
12433 gimple *stmt = gsi_stmt (*gsi_p);
12434
12435 *handled_ops_p = true;
12436
12437 switch (gimple_code (stmt))
12438 {
12439 WALK_SUBSTMTS;
12440
12441 case GIMPLE_OMP_PARALLEL:
12442 case GIMPLE_OMP_TASK:
12443 case GIMPLE_OMP_SECTIONS:
12444 case GIMPLE_OMP_SINGLE:
12445 case GIMPLE_OMP_SECTION:
12446 case GIMPLE_OMP_MASTER:
12447 case GIMPLE_OMP_ORDERED:
12448 case GIMPLE_OMP_SCAN:
12449 case GIMPLE_OMP_CRITICAL:
12450 case GIMPLE_OMP_TARGET:
12451 case GIMPLE_OMP_TEAMS:
12452 case GIMPLE_OMP_TASKGROUP:
12453 /* The minimal context here is just the current OMP construct. */
12454 inner_context = stmt;
12455 wi->info = inner_context;
12456 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12457 wi->info = context;
12458 break;
12459
12460 case GIMPLE_OMP_FOR:
12461 inner_context = stmt;
12462 wi->info = inner_context;
12463 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12464 walk them. */
12465 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12466 diagnose_sb_1, NULL, wi);
12467 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12468 wi->info = context;
12469 break;
12470
12471 case GIMPLE_LABEL:
12472 splay_tree_insert (all_labels,
12473 (splay_tree_key) gimple_label_label (
12474 as_a <glabel *> (stmt)),
12475 (splay_tree_value) context);
12476 break;
12477
12478 default:
12479 break;
12480 }
12481
12482 return NULL_TREE;
12483 }
12484
12485 /* Pass 2: Check each branch and see if its context differs from that of
12486 the destination label's context. */
12487
12488 static tree
12489 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12490 struct walk_stmt_info *wi)
12491 {
12492 gimple *context = (gimple *) wi->info;
12493 splay_tree_node n;
12494 gimple *stmt = gsi_stmt (*gsi_p);
12495
12496 *handled_ops_p = true;
12497
12498 switch (gimple_code (stmt))
12499 {
12500 WALK_SUBSTMTS;
12501
12502 case GIMPLE_OMP_PARALLEL:
12503 case GIMPLE_OMP_TASK:
12504 case GIMPLE_OMP_SECTIONS:
12505 case GIMPLE_OMP_SINGLE:
12506 case GIMPLE_OMP_SECTION:
12507 case GIMPLE_OMP_MASTER:
12508 case GIMPLE_OMP_ORDERED:
12509 case GIMPLE_OMP_SCAN:
12510 case GIMPLE_OMP_CRITICAL:
12511 case GIMPLE_OMP_TARGET:
12512 case GIMPLE_OMP_TEAMS:
12513 case GIMPLE_OMP_TASKGROUP:
12514 wi->info = stmt;
12515 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12516 wi->info = context;
12517 break;
12518
12519 case GIMPLE_OMP_FOR:
12520 wi->info = stmt;
12521 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12522 walk them. */
12523 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
12524 diagnose_sb_2, NULL, wi);
12525 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12526 wi->info = context;
12527 break;
12528
12529 case GIMPLE_COND:
12530 {
12531 gcond *cond_stmt = as_a <gcond *> (stmt);
12532 tree lab = gimple_cond_true_label (cond_stmt);
12533 if (lab)
12534 {
12535 n = splay_tree_lookup (all_labels,
12536 (splay_tree_key) lab);
12537 diagnose_sb_0 (gsi_p, context,
12538 n ? (gimple *) n->value : NULL);
12539 }
12540 lab = gimple_cond_false_label (cond_stmt);
12541 if (lab)
12542 {
12543 n = splay_tree_lookup (all_labels,
12544 (splay_tree_key) lab);
12545 diagnose_sb_0 (gsi_p, context,
12546 n ? (gimple *) n->value : NULL);
12547 }
12548 }
12549 break;
12550
12551 case GIMPLE_GOTO:
12552 {
12553 tree lab = gimple_goto_dest (stmt);
12554 if (TREE_CODE (lab) != LABEL_DECL)
12555 break;
12556
12557 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12558 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
12559 }
12560 break;
12561
12562 case GIMPLE_SWITCH:
12563 {
12564 gswitch *switch_stmt = as_a <gswitch *> (stmt);
12565 unsigned int i;
12566 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
12567 {
12568 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
12569 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12570 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
12571 break;
12572 }
12573 }
12574 break;
12575
12576 case GIMPLE_RETURN:
12577 diagnose_sb_0 (gsi_p, context, NULL);
12578 break;
12579
12580 default:
12581 break;
12582 }
12583
12584 return NULL_TREE;
12585 }
12586
12587 static unsigned int
12588 diagnose_omp_structured_block_errors (void)
12589 {
12590 struct walk_stmt_info wi;
12591 gimple_seq body = gimple_body (current_function_decl);
12592
12593 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
12594
12595 memset (&wi, 0, sizeof (wi));
12596 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
12597
12598 memset (&wi, 0, sizeof (wi));
12599 wi.want_locations = true;
12600 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
12601
12602 gimple_set_body (current_function_decl, body);
12603
12604 splay_tree_delete (all_labels);
12605 all_labels = NULL;
12606
12607 return 0;
12608 }
12609
12610 namespace {
12611
12612 const pass_data pass_data_diagnose_omp_blocks =
12613 {
12614 GIMPLE_PASS, /* type */
12615 "*diagnose_omp_blocks", /* name */
12616 OPTGROUP_OMP, /* optinfo_flags */
12617 TV_NONE, /* tv_id */
12618 PROP_gimple_any, /* properties_required */
12619 0, /* properties_provided */
12620 0, /* properties_destroyed */
12621 0, /* todo_flags_start */
12622 0, /* todo_flags_finish */
12623 };
12624
12625 class pass_diagnose_omp_blocks : public gimple_opt_pass
12626 {
12627 public:
12628 pass_diagnose_omp_blocks (gcc::context *ctxt)
12629 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
12630 {}
12631
12632 /* opt_pass methods: */
12633 virtual bool gate (function *)
12634 {
12635 return flag_openacc || flag_openmp || flag_openmp_simd;
12636 }
12637 virtual unsigned int execute (function *)
12638 {
12639 return diagnose_omp_structured_block_errors ();
12640 }
12641
12642 }; // class pass_diagnose_omp_blocks
12643
12644 } // anon namespace
12645
12646 gimple_opt_pass *
12647 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
12648 {
12649 return new pass_diagnose_omp_blocks (ctxt);
12650 }
12651 \f
12652
12653 #include "gt-omp-low.h"