openmp: Implement OpenMP 5.0 base-pointer attachement and clause ordering
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
69
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
73
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
76
77 struct omp_context
78 {
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
84
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
88
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
95
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
103
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
107
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
111
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
115
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
120
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
124
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
128
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses;
134
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses;
140
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
144 int depth;
145
146 /* True if this parallel directive is nested within another. */
147 bool is_nested;
148
149 /* True if this construct can be cancelled. */
150 bool cancellable;
151
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
153 context. */
154 bool combined_into_simd_safelen1;
155
156 /* True if there is nested scan context with inclusive clause. */
157 bool scan_inclusive;
158
159 /* True if there is nested scan context with exclusive clause. */
160 bool scan_exclusive;
161
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase;
164
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent;
167
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
169 bool loop_p;
170 };
171
172 static splay_tree all_contexts;
173 static int taskreg_nesting_level;
174 static int target_nesting_level;
175 static bitmap task_shared_vars;
176 static bitmap global_nonaddressable_vars;
177 static vec<omp_context *> taskreg_contexts;
178
179 static void scan_omp (gimple_seq *, omp_context *);
180 static tree scan_omp_1_op (tree *, int *, void *);
181
182 #define WALK_SUBSTMTS \
183 case GIMPLE_BIND: \
184 case GIMPLE_TRY: \
185 case GIMPLE_CATCH: \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
190 break;
191
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
193 region. */
194
195 static bool
196 is_oacc_parallel_or_serial (omp_context *ctx)
197 {
198 enum gimple_code outer_type = gimple_code (ctx->stmt);
199 return ((outer_type == GIMPLE_OMP_TARGET)
200 && ((gimple_omp_target_kind (ctx->stmt)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
202 || (gimple_omp_target_kind (ctx->stmt)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
204 }
205
206 /* Return true if CTX corresponds to an oacc kernels region. */
207
208 static bool
209 is_oacc_kernels (omp_context *ctx)
210 {
211 enum gimple_code outer_type = gimple_code (ctx->stmt);
212 return ((outer_type == GIMPLE_OMP_TARGET)
213 && (gimple_omp_target_kind (ctx->stmt)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS));
215 }
216
217 /* Return true if STMT corresponds to an OpenMP target region. */
218 static bool
219 is_omp_target (gimple *stmt)
220 {
221 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
222 {
223 int kind = gimple_omp_target_kind (stmt);
224 return (kind == GF_OMP_TARGET_KIND_REGION
225 || kind == GF_OMP_TARGET_KIND_DATA
226 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
227 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
228 }
229 return false;
230 }
231
232 /* If DECL is the artificial dummy VAR_DECL created for non-static
233 data member privatization, return the underlying "this" parameter,
234 otherwise return NULL. */
235
236 tree
237 omp_member_access_dummy_var (tree decl)
238 {
239 if (!VAR_P (decl)
240 || !DECL_ARTIFICIAL (decl)
241 || !DECL_IGNORED_P (decl)
242 || !DECL_HAS_VALUE_EXPR_P (decl)
243 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
244 return NULL_TREE;
245
246 tree v = DECL_VALUE_EXPR (decl);
247 if (TREE_CODE (v) != COMPONENT_REF)
248 return NULL_TREE;
249
250 while (1)
251 switch (TREE_CODE (v))
252 {
253 case COMPONENT_REF:
254 case MEM_REF:
255 case INDIRECT_REF:
256 CASE_CONVERT:
257 case POINTER_PLUS_EXPR:
258 v = TREE_OPERAND (v, 0);
259 continue;
260 case PARM_DECL:
261 if (DECL_CONTEXT (v) == current_function_decl
262 && DECL_ARTIFICIAL (v)
263 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
264 return v;
265 return NULL_TREE;
266 default:
267 return NULL_TREE;
268 }
269 }
270
271 /* Helper for unshare_and_remap, called through walk_tree. */
272
273 static tree
274 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
275 {
276 tree *pair = (tree *) data;
277 if (*tp == pair[0])
278 {
279 *tp = unshare_expr (pair[1]);
280 *walk_subtrees = 0;
281 }
282 else if (IS_TYPE_OR_DECL_P (*tp))
283 *walk_subtrees = 0;
284 return NULL_TREE;
285 }
286
287 /* Return unshare_expr (X) with all occurrences of FROM
288 replaced with TO. */
289
290 static tree
291 unshare_and_remap (tree x, tree from, tree to)
292 {
293 tree pair[2] = { from, to };
294 x = unshare_expr (x);
295 walk_tree (&x, unshare_and_remap_1, pair, NULL);
296 return x;
297 }
298
299 /* Convenience function for calling scan_omp_1_op on tree operands. */
300
301 static inline tree
302 scan_omp_op (tree *tp, omp_context *ctx)
303 {
304 struct walk_stmt_info wi;
305
306 memset (&wi, 0, sizeof (wi));
307 wi.info = ctx;
308 wi.want_locations = true;
309
310 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
311 }
312
313 static void lower_omp (gimple_seq *, omp_context *);
314 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
315 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
316
317 /* Return true if CTX is for an omp parallel. */
318
319 static inline bool
320 is_parallel_ctx (omp_context *ctx)
321 {
322 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
323 }
324
325
326 /* Return true if CTX is for an omp task. */
327
328 static inline bool
329 is_task_ctx (omp_context *ctx)
330 {
331 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
332 }
333
334
335 /* Return true if CTX is for an omp taskloop. */
336
337 static inline bool
338 is_taskloop_ctx (omp_context *ctx)
339 {
340 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
341 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
342 }
343
344
345 /* Return true if CTX is for a host omp teams. */
346
347 static inline bool
348 is_host_teams_ctx (omp_context *ctx)
349 {
350 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
351 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
352 }
353
354 /* Return true if CTX is for an omp parallel or omp task or host omp teams
355 (the last one is strictly not a task region in OpenMP speak, but we
356 need to treat it similarly). */
357
358 static inline bool
359 is_taskreg_ctx (omp_context *ctx)
360 {
361 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
362 }
363
364 /* Return true if EXPR is variable sized. */
365
366 static inline bool
367 is_variable_sized (const_tree expr)
368 {
369 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
370 }
371
372 /* Lookup variables. The "maybe" form
373 allows for the variable form to not have been entered, otherwise we
374 assert that the variable must have been entered. */
375
376 static inline tree
377 lookup_decl (tree var, omp_context *ctx)
378 {
379 tree *n = ctx->cb.decl_map->get (var);
380 return *n;
381 }
382
383 static inline tree
384 maybe_lookup_decl (const_tree var, omp_context *ctx)
385 {
386 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
387 return n ? *n : NULL_TREE;
388 }
389
390 static inline tree
391 lookup_field (tree var, omp_context *ctx)
392 {
393 splay_tree_node n;
394 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
395 return (tree) n->value;
396 }
397
398 static inline tree
399 lookup_sfield (splay_tree_key key, omp_context *ctx)
400 {
401 splay_tree_node n;
402 n = splay_tree_lookup (ctx->sfield_map
403 ? ctx->sfield_map : ctx->field_map, key);
404 return (tree) n->value;
405 }
406
407 static inline tree
408 lookup_sfield (tree var, omp_context *ctx)
409 {
410 return lookup_sfield ((splay_tree_key) var, ctx);
411 }
412
413 static inline tree
414 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
415 {
416 splay_tree_node n;
417 n = splay_tree_lookup (ctx->field_map, key);
418 return n ? (tree) n->value : NULL_TREE;
419 }
420
421 static inline tree
422 maybe_lookup_field (tree var, omp_context *ctx)
423 {
424 return maybe_lookup_field ((splay_tree_key) var, ctx);
425 }
426
427 /* Return true if DECL should be copied by pointer. SHARED_CTX is
428 the parallel context if DECL is to be shared. */
429
430 static bool
431 use_pointer_for_field (tree decl, omp_context *shared_ctx)
432 {
433 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
434 || TYPE_ATOMIC (TREE_TYPE (decl)))
435 return true;
436
437 /* We can only use copy-in/copy-out semantics for shared variables
438 when we know the value is not accessible from an outer scope. */
439 if (shared_ctx)
440 {
441 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
442
443 /* ??? Trivially accessible from anywhere. But why would we even
444 be passing an address in this case? Should we simply assert
445 this to be false, or should we have a cleanup pass that removes
446 these from the list of mappings? */
447 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
448 return true;
449
450 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
451 without analyzing the expression whether or not its location
452 is accessible to anyone else. In the case of nested parallel
453 regions it certainly may be. */
454 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
455 return true;
456
457 /* Do not use copy-in/copy-out for variables that have their
458 address taken. */
459 if (is_global_var (decl))
460 {
461 /* For file scope vars, track whether we've seen them as
462 non-addressable initially and in that case, keep the same
463 answer for the duration of the pass, even when they are made
464 addressable later on e.g. through reduction expansion. Global
465 variables which weren't addressable before the pass will not
466 have their privatized copies address taken. See PR91216. */
467 if (!TREE_ADDRESSABLE (decl))
468 {
469 if (!global_nonaddressable_vars)
470 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
471 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
472 }
473 else if (!global_nonaddressable_vars
474 || !bitmap_bit_p (global_nonaddressable_vars,
475 DECL_UID (decl)))
476 return true;
477 }
478 else if (TREE_ADDRESSABLE (decl))
479 return true;
480
481 /* lower_send_shared_vars only uses copy-in, but not copy-out
482 for these. */
483 if (TREE_READONLY (decl)
484 || ((TREE_CODE (decl) == RESULT_DECL
485 || TREE_CODE (decl) == PARM_DECL)
486 && DECL_BY_REFERENCE (decl)))
487 return false;
488
489 /* Disallow copy-in/out in nested parallel if
490 decl is shared in outer parallel, otherwise
491 each thread could store the shared variable
492 in its own copy-in location, making the
493 variable no longer really shared. */
494 if (shared_ctx->is_nested)
495 {
496 omp_context *up;
497
498 for (up = shared_ctx->outer; up; up = up->outer)
499 if ((is_taskreg_ctx (up)
500 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
501 && is_gimple_omp_offloaded (up->stmt)))
502 && maybe_lookup_decl (decl, up))
503 break;
504
505 if (up)
506 {
507 tree c;
508
509 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
510 {
511 for (c = gimple_omp_target_clauses (up->stmt);
512 c; c = OMP_CLAUSE_CHAIN (c))
513 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
514 && OMP_CLAUSE_DECL (c) == decl)
515 break;
516 }
517 else
518 for (c = gimple_omp_taskreg_clauses (up->stmt);
519 c; c = OMP_CLAUSE_CHAIN (c))
520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
521 && OMP_CLAUSE_DECL (c) == decl)
522 break;
523
524 if (c)
525 goto maybe_mark_addressable_and_ret;
526 }
527 }
528
529 /* For tasks avoid using copy-in/out. As tasks can be
530 deferred or executed in different thread, when GOMP_task
531 returns, the task hasn't necessarily terminated. */
532 if (is_task_ctx (shared_ctx))
533 {
534 tree outer;
535 maybe_mark_addressable_and_ret:
536 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
537 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
538 {
539 /* Taking address of OUTER in lower_send_shared_vars
540 might need regimplification of everything that uses the
541 variable. */
542 if (!task_shared_vars)
543 task_shared_vars = BITMAP_ALLOC (NULL);
544 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
545 TREE_ADDRESSABLE (outer) = 1;
546 }
547 return true;
548 }
549 }
550
551 return false;
552 }
553
554 /* Construct a new automatic decl similar to VAR. */
555
556 static tree
557 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
558 {
559 tree copy = copy_var_decl (var, name, type);
560
561 DECL_CONTEXT (copy) = current_function_decl;
562 DECL_CHAIN (copy) = ctx->block_vars;
563 /* If VAR is listed in task_shared_vars, it means it wasn't
564 originally addressable and is just because task needs to take
565 it's address. But we don't need to take address of privatizations
566 from that var. */
567 if (TREE_ADDRESSABLE (var)
568 && ((task_shared_vars
569 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
570 || (global_nonaddressable_vars
571 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
572 TREE_ADDRESSABLE (copy) = 0;
573 ctx->block_vars = copy;
574
575 return copy;
576 }
577
578 static tree
579 omp_copy_decl_1 (tree var, omp_context *ctx)
580 {
581 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
582 }
583
584 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
585 as appropriate. */
586 static tree
587 omp_build_component_ref (tree obj, tree field)
588 {
589 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
590 if (TREE_THIS_VOLATILE (field))
591 TREE_THIS_VOLATILE (ret) |= 1;
592 if (TREE_READONLY (field))
593 TREE_READONLY (ret) |= 1;
594 return ret;
595 }
596
597 /* Build tree nodes to access the field for VAR on the receiver side. */
598
599 static tree
600 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
601 {
602 tree x, field = lookup_field (var, ctx);
603
604 /* If the receiver record type was remapped in the child function,
605 remap the field into the new record type. */
606 x = maybe_lookup_field (field, ctx);
607 if (x != NULL)
608 field = x;
609
610 x = build_simple_mem_ref (ctx->receiver_decl);
611 TREE_THIS_NOTRAP (x) = 1;
612 x = omp_build_component_ref (x, field);
613 if (by_ref)
614 {
615 x = build_simple_mem_ref (x);
616 TREE_THIS_NOTRAP (x) = 1;
617 }
618
619 return x;
620 }
621
622 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
623 of a parallel, this is a component reference; for workshare constructs
624 this is some variable. */
625
626 static tree
627 build_outer_var_ref (tree var, omp_context *ctx,
628 enum omp_clause_code code = OMP_CLAUSE_ERROR)
629 {
630 tree x;
631 omp_context *outer = ctx->outer;
632 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
633 outer = outer->outer;
634
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
636 x = var;
637 else if (is_variable_sized (var))
638 {
639 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
640 x = build_outer_var_ref (x, ctx, code);
641 x = build_simple_mem_ref (x);
642 }
643 else if (is_taskreg_ctx (ctx))
644 {
645 bool by_ref = use_pointer_for_field (var, NULL);
646 x = build_receiver_ref (var, by_ref, ctx);
647 }
648 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
649 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
650 || ctx->loop_p
651 || (code == OMP_CLAUSE_PRIVATE
652 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
653 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
654 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
655 {
656 /* #pragma omp simd isn't a worksharing construct, and can reference
657 even private vars in its linear etc. clauses.
658 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
659 to private vars in all worksharing constructs. */
660 x = NULL_TREE;
661 if (outer && is_taskreg_ctx (outer))
662 x = lookup_decl (var, outer);
663 else if (outer)
664 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
665 if (x == NULL_TREE)
666 x = var;
667 }
668 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
669 {
670 gcc_assert (outer);
671 splay_tree_node n
672 = splay_tree_lookup (outer->field_map,
673 (splay_tree_key) &DECL_UID (var));
674 if (n == NULL)
675 {
676 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
677 x = var;
678 else
679 x = lookup_decl (var, outer);
680 }
681 else
682 {
683 tree field = (tree) n->value;
684 /* If the receiver record type was remapped in the child function,
685 remap the field into the new record type. */
686 x = maybe_lookup_field (field, outer);
687 if (x != NULL)
688 field = x;
689
690 x = build_simple_mem_ref (outer->receiver_decl);
691 x = omp_build_component_ref (x, field);
692 if (use_pointer_for_field (var, outer))
693 x = build_simple_mem_ref (x);
694 }
695 }
696 else if (outer)
697 x = lookup_decl (var, outer);
698 else if (omp_is_reference (var))
699 /* This can happen with orphaned constructs. If var is reference, it is
700 possible it is shared and as such valid. */
701 x = var;
702 else if (omp_member_access_dummy_var (var))
703 x = var;
704 else
705 gcc_unreachable ();
706
707 if (x == var)
708 {
709 tree t = omp_member_access_dummy_var (var);
710 if (t)
711 {
712 x = DECL_VALUE_EXPR (var);
713 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
714 if (o != t)
715 x = unshare_and_remap (x, t, o);
716 else
717 x = unshare_expr (x);
718 }
719 }
720
721 if (omp_is_reference (var))
722 x = build_simple_mem_ref (x);
723
724 return x;
725 }
726
727 /* Build tree nodes to access the field for VAR on the sender side. */
728
729 static tree
730 build_sender_ref (splay_tree_key key, omp_context *ctx)
731 {
732 tree field = lookup_sfield (key, ctx);
733 return omp_build_component_ref (ctx->sender_decl, field);
734 }
735
736 static tree
737 build_sender_ref (tree var, omp_context *ctx)
738 {
739 return build_sender_ref ((splay_tree_key) var, ctx);
740 }
741
742 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
743 BASE_POINTERS_RESTRICT, declare the field with restrict. */
744
745 static void
746 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
747 {
748 tree field, type, sfield = NULL_TREE;
749 splay_tree_key key = (splay_tree_key) var;
750
751 if ((mask & 16) != 0)
752 {
753 key = (splay_tree_key) &DECL_NAME (var);
754 gcc_checking_assert (key != (splay_tree_key) var);
755 }
756 if ((mask & 8) != 0)
757 {
758 key = (splay_tree_key) &DECL_UID (var);
759 gcc_checking_assert (key != (splay_tree_key) var);
760 }
761 gcc_assert ((mask & 1) == 0
762 || !splay_tree_lookup (ctx->field_map, key));
763 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
764 || !splay_tree_lookup (ctx->sfield_map, key));
765 gcc_assert ((mask & 3) == 3
766 || !is_gimple_omp_oacc (ctx->stmt));
767
768 type = TREE_TYPE (var);
769 if ((mask & 16) != 0)
770 type = lang_hooks.decls.omp_array_data (var, true);
771
772 /* Prevent redeclaring the var in the split-off function with a restrict
773 pointer type. Note that we only clear type itself, restrict qualifiers in
774 the pointed-to type will be ignored by points-to analysis. */
775 if (POINTER_TYPE_P (type)
776 && TYPE_RESTRICT (type))
777 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
778
779 if (mask & 4)
780 {
781 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
782 type = build_pointer_type (build_pointer_type (type));
783 }
784 else if (by_ref)
785 type = build_pointer_type (type);
786 else if ((mask & 3) == 1 && omp_is_reference (var))
787 type = TREE_TYPE (type);
788
789 field = build_decl (DECL_SOURCE_LOCATION (var),
790 FIELD_DECL, DECL_NAME (var), type);
791
792 /* Remember what variable this field was created for. This does have a
793 side effect of making dwarf2out ignore this member, so for helpful
794 debugging we clear it later in delete_omp_context. */
795 DECL_ABSTRACT_ORIGIN (field) = var;
796 if ((mask & 16) == 0 && type == TREE_TYPE (var))
797 {
798 SET_DECL_ALIGN (field, DECL_ALIGN (var));
799 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
800 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
801 }
802 else
803 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
804
805 if ((mask & 3) == 3)
806 {
807 insert_field_into_struct (ctx->record_type, field);
808 if (ctx->srecord_type)
809 {
810 sfield = build_decl (DECL_SOURCE_LOCATION (var),
811 FIELD_DECL, DECL_NAME (var), type);
812 DECL_ABSTRACT_ORIGIN (sfield) = var;
813 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
814 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
815 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
816 insert_field_into_struct (ctx->srecord_type, sfield);
817 }
818 }
819 else
820 {
821 if (ctx->srecord_type == NULL_TREE)
822 {
823 tree t;
824
825 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
826 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
827 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
828 {
829 sfield = build_decl (DECL_SOURCE_LOCATION (t),
830 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
831 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
832 insert_field_into_struct (ctx->srecord_type, sfield);
833 splay_tree_insert (ctx->sfield_map,
834 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
835 (splay_tree_value) sfield);
836 }
837 }
838 sfield = field;
839 insert_field_into_struct ((mask & 1) ? ctx->record_type
840 : ctx->srecord_type, field);
841 }
842
843 if (mask & 1)
844 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
845 if ((mask & 2) && ctx->sfield_map)
846 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
847 }
848
849 static tree
850 install_var_local (tree var, omp_context *ctx)
851 {
852 tree new_var = omp_copy_decl_1 (var, ctx);
853 insert_decl_map (&ctx->cb, var, new_var);
854 return new_var;
855 }
856
857 /* Adjust the replacement for DECL in CTX for the new context. This means
858 copying the DECL_VALUE_EXPR, and fixing up the type. */
859
860 static void
861 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
862 {
863 tree new_decl, size;
864
865 new_decl = lookup_decl (decl, ctx);
866
867 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
868
869 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
870 && DECL_HAS_VALUE_EXPR_P (decl))
871 {
872 tree ve = DECL_VALUE_EXPR (decl);
873 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
874 SET_DECL_VALUE_EXPR (new_decl, ve);
875 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
876 }
877
878 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
879 {
880 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
881 if (size == error_mark_node)
882 size = TYPE_SIZE (TREE_TYPE (new_decl));
883 DECL_SIZE (new_decl) = size;
884
885 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
886 if (size == error_mark_node)
887 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
888 DECL_SIZE_UNIT (new_decl) = size;
889 }
890 }
891
892 /* The callback for remap_decl. Search all containing contexts for a
893 mapping of the variable; this avoids having to duplicate the splay
894 tree ahead of time. We know a mapping doesn't already exist in the
895 given context. Create new mappings to implement default semantics. */
896
897 static tree
898 omp_copy_decl (tree var, copy_body_data *cb)
899 {
900 omp_context *ctx = (omp_context *) cb;
901 tree new_var;
902
903 if (TREE_CODE (var) == LABEL_DECL)
904 {
905 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
906 return var;
907 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
908 DECL_CONTEXT (new_var) = current_function_decl;
909 insert_decl_map (&ctx->cb, var, new_var);
910 return new_var;
911 }
912
913 while (!is_taskreg_ctx (ctx))
914 {
915 ctx = ctx->outer;
916 if (ctx == NULL)
917 return var;
918 new_var = maybe_lookup_decl (var, ctx);
919 if (new_var)
920 return new_var;
921 }
922
923 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
924 return var;
925
926 return error_mark_node;
927 }
928
929 /* Create a new context, with OUTER_CTX being the surrounding context. */
930
931 static omp_context *
932 new_omp_context (gimple *stmt, omp_context *outer_ctx)
933 {
934 omp_context *ctx = XCNEW (omp_context);
935
936 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
937 (splay_tree_value) ctx);
938 ctx->stmt = stmt;
939
940 if (outer_ctx)
941 {
942 ctx->outer = outer_ctx;
943 ctx->cb = outer_ctx->cb;
944 ctx->cb.block = NULL;
945 ctx->depth = outer_ctx->depth + 1;
946 }
947 else
948 {
949 ctx->cb.src_fn = current_function_decl;
950 ctx->cb.dst_fn = current_function_decl;
951 ctx->cb.src_node = cgraph_node::get (current_function_decl);
952 gcc_checking_assert (ctx->cb.src_node);
953 ctx->cb.dst_node = ctx->cb.src_node;
954 ctx->cb.src_cfun = cfun;
955 ctx->cb.copy_decl = omp_copy_decl;
956 ctx->cb.eh_lp_nr = 0;
957 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
958 ctx->cb.adjust_array_error_bounds = true;
959 ctx->cb.dont_remap_vla_if_no_change = true;
960 ctx->depth = 1;
961 }
962
963 ctx->cb.decl_map = new hash_map<tree, tree>;
964
965 return ctx;
966 }
967
968 static gimple_seq maybe_catch_exception (gimple_seq);
969
970 /* Finalize task copyfn. */
971
972 static void
973 finalize_task_copyfn (gomp_task *task_stmt)
974 {
975 struct function *child_cfun;
976 tree child_fn;
977 gimple_seq seq = NULL, new_seq;
978 gbind *bind;
979
980 child_fn = gimple_omp_task_copy_fn (task_stmt);
981 if (child_fn == NULL_TREE)
982 return;
983
984 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
985 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
986
987 push_cfun (child_cfun);
988 bind = gimplify_body (child_fn, false);
989 gimple_seq_add_stmt (&seq, bind);
990 new_seq = maybe_catch_exception (seq);
991 if (new_seq != seq)
992 {
993 bind = gimple_build_bind (NULL, new_seq, NULL);
994 seq = NULL;
995 gimple_seq_add_stmt (&seq, bind);
996 }
997 gimple_set_body (child_fn, seq);
998 pop_cfun ();
999
1000 /* Inform the callgraph about the new function. */
1001 cgraph_node *node = cgraph_node::get_create (child_fn);
1002 node->parallelized_function = 1;
1003 cgraph_node::add_new_function (child_fn, false);
1004 }
1005
1006 /* Destroy a omp_context data structures. Called through the splay tree
1007 value delete callback. */
1008
1009 static void
1010 delete_omp_context (splay_tree_value value)
1011 {
1012 omp_context *ctx = (omp_context *) value;
1013
1014 delete ctx->cb.decl_map;
1015
1016 if (ctx->field_map)
1017 splay_tree_delete (ctx->field_map);
1018 if (ctx->sfield_map)
1019 splay_tree_delete (ctx->sfield_map);
1020
1021 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1022 it produces corrupt debug information. */
1023 if (ctx->record_type)
1024 {
1025 tree t;
1026 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1027 DECL_ABSTRACT_ORIGIN (t) = NULL;
1028 }
1029 if (ctx->srecord_type)
1030 {
1031 tree t;
1032 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1033 DECL_ABSTRACT_ORIGIN (t) = NULL;
1034 }
1035
1036 if (is_task_ctx (ctx))
1037 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1038
1039 if (ctx->task_reduction_map)
1040 {
1041 ctx->task_reductions.release ();
1042 delete ctx->task_reduction_map;
1043 }
1044
1045 delete ctx->lastprivate_conditional_map;
1046
1047 XDELETE (ctx);
1048 }
1049
1050 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1051 context. */
1052
1053 static void
1054 fixup_child_record_type (omp_context *ctx)
1055 {
1056 tree f, type = ctx->record_type;
1057
1058 if (!ctx->receiver_decl)
1059 return;
1060 /* ??? It isn't sufficient to just call remap_type here, because
1061 variably_modified_type_p doesn't work the way we expect for
1062 record types. Testing each field for whether it needs remapping
1063 and creating a new record by hand works, however. */
1064 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1065 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1066 break;
1067 if (f)
1068 {
1069 tree name, new_fields = NULL;
1070
1071 type = lang_hooks.types.make_type (RECORD_TYPE);
1072 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1073 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1074 TYPE_DECL, name, type);
1075 TYPE_NAME (type) = name;
1076
1077 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1078 {
1079 tree new_f = copy_node (f);
1080 DECL_CONTEXT (new_f) = type;
1081 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1082 DECL_CHAIN (new_f) = new_fields;
1083 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1084 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1085 &ctx->cb, NULL);
1086 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1087 &ctx->cb, NULL);
1088 new_fields = new_f;
1089
1090 /* Arrange to be able to look up the receiver field
1091 given the sender field. */
1092 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1093 (splay_tree_value) new_f);
1094 }
1095 TYPE_FIELDS (type) = nreverse (new_fields);
1096 layout_type (type);
1097 }
1098
1099 /* In a target region we never modify any of the pointers in *.omp_data_i,
1100 so attempt to help the optimizers. */
1101 if (is_gimple_omp_offloaded (ctx->stmt))
1102 type = build_qualified_type (type, TYPE_QUAL_CONST);
1103
1104 TREE_TYPE (ctx->receiver_decl)
1105 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1106 }
1107
1108 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1109 specified by CLAUSES. */
1110
1111 static void
1112 scan_sharing_clauses (tree clauses, omp_context *ctx)
1113 {
1114 tree c, decl;
1115 bool scan_array_reductions = false;
1116
1117 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1118 {
1119 bool by_ref;
1120
1121 switch (OMP_CLAUSE_CODE (c))
1122 {
1123 case OMP_CLAUSE_PRIVATE:
1124 decl = OMP_CLAUSE_DECL (c);
1125 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1126 goto do_private;
1127 else if (!is_variable_sized (decl))
1128 install_var_local (decl, ctx);
1129 break;
1130
1131 case OMP_CLAUSE_SHARED:
1132 decl = OMP_CLAUSE_DECL (c);
1133 /* Ignore shared directives in teams construct inside of
1134 target construct. */
1135 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1136 && !is_host_teams_ctx (ctx))
1137 {
1138 /* Global variables don't need to be copied,
1139 the receiver side will use them directly. */
1140 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1141 if (is_global_var (odecl))
1142 break;
1143 insert_decl_map (&ctx->cb, decl, odecl);
1144 break;
1145 }
1146 gcc_assert (is_taskreg_ctx (ctx));
1147 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1148 || !is_variable_sized (decl));
1149 /* Global variables don't need to be copied,
1150 the receiver side will use them directly. */
1151 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1152 break;
1153 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1154 {
1155 use_pointer_for_field (decl, ctx);
1156 break;
1157 }
1158 by_ref = use_pointer_for_field (decl, NULL);
1159 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1160 || TREE_ADDRESSABLE (decl)
1161 || by_ref
1162 || omp_is_reference (decl))
1163 {
1164 by_ref = use_pointer_for_field (decl, ctx);
1165 install_var_field (decl, by_ref, 3, ctx);
1166 install_var_local (decl, ctx);
1167 break;
1168 }
1169 /* We don't need to copy const scalar vars back. */
1170 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1171 goto do_private;
1172
1173 case OMP_CLAUSE_REDUCTION:
1174 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1175 ctx->local_reduction_clauses
1176 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1177 /* FALLTHRU */
1178
1179 case OMP_CLAUSE_IN_REDUCTION:
1180 decl = OMP_CLAUSE_DECL (c);
1181 if (TREE_CODE (decl) == MEM_REF)
1182 {
1183 tree t = TREE_OPERAND (decl, 0);
1184 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1185 t = TREE_OPERAND (t, 0);
1186 if (TREE_CODE (t) == INDIRECT_REF
1187 || TREE_CODE (t) == ADDR_EXPR)
1188 t = TREE_OPERAND (t, 0);
1189 install_var_local (t, ctx);
1190 if (is_taskreg_ctx (ctx)
1191 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1192 || (is_task_ctx (ctx)
1193 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1194 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1195 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1196 == POINTER_TYPE)))))
1197 && !is_variable_sized (t)
1198 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1199 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1200 && !is_task_ctx (ctx))))
1201 {
1202 by_ref = use_pointer_for_field (t, NULL);
1203 if (is_task_ctx (ctx)
1204 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1205 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1206 {
1207 install_var_field (t, false, 1, ctx);
1208 install_var_field (t, by_ref, 2, ctx);
1209 }
1210 else
1211 install_var_field (t, by_ref, 3, ctx);
1212 }
1213 break;
1214 }
1215 if (is_task_ctx (ctx)
1216 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c)
1218 && is_parallel_ctx (ctx)))
1219 {
1220 /* Global variables don't need to be copied,
1221 the receiver side will use them directly. */
1222 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1223 {
1224 by_ref = use_pointer_for_field (decl, ctx);
1225 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1226 install_var_field (decl, by_ref, 3, ctx);
1227 }
1228 install_var_local (decl, ctx);
1229 break;
1230 }
1231 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1232 && OMP_CLAUSE_REDUCTION_TASK (c))
1233 {
1234 install_var_local (decl, ctx);
1235 break;
1236 }
1237 goto do_private;
1238
1239 case OMP_CLAUSE_LASTPRIVATE:
1240 /* Let the corresponding firstprivate clause create
1241 the variable. */
1242 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1243 break;
1244 /* FALLTHRU */
1245
1246 case OMP_CLAUSE_FIRSTPRIVATE:
1247 case OMP_CLAUSE_LINEAR:
1248 decl = OMP_CLAUSE_DECL (c);
1249 do_private:
1250 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1251 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1252 && is_gimple_omp_offloaded (ctx->stmt))
1253 {
1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1255 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1256 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 install_var_field (decl, true, 3, ctx);
1258 else
1259 install_var_field (decl, false, 3, ctx);
1260 }
1261 if (is_variable_sized (decl))
1262 {
1263 if (is_task_ctx (ctx))
1264 install_var_field (decl, false, 1, ctx);
1265 break;
1266 }
1267 else if (is_taskreg_ctx (ctx))
1268 {
1269 bool global
1270 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1271 by_ref = use_pointer_for_field (decl, NULL);
1272
1273 if (is_task_ctx (ctx)
1274 && (global || by_ref || omp_is_reference (decl)))
1275 {
1276 install_var_field (decl, false, 1, ctx);
1277 if (!global)
1278 install_var_field (decl, by_ref, 2, ctx);
1279 }
1280 else if (!global)
1281 install_var_field (decl, by_ref, 3, ctx);
1282 }
1283 install_var_local (decl, ctx);
1284 break;
1285
1286 case OMP_CLAUSE_USE_DEVICE_PTR:
1287 case OMP_CLAUSE_USE_DEVICE_ADDR:
1288 decl = OMP_CLAUSE_DECL (c);
1289
1290 /* Fortran array descriptors. */
1291 if (lang_hooks.decls.omp_array_data (decl, true))
1292 install_var_field (decl, false, 19, ctx);
1293 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1294 && !omp_is_reference (decl)
1295 && !omp_is_allocatable_or_ptr (decl))
1296 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1297 install_var_field (decl, true, 11, ctx);
1298 else
1299 install_var_field (decl, false, 11, ctx);
1300 if (DECL_SIZE (decl)
1301 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1302 {
1303 tree decl2 = DECL_VALUE_EXPR (decl);
1304 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1305 decl2 = TREE_OPERAND (decl2, 0);
1306 gcc_assert (DECL_P (decl2));
1307 install_var_local (decl2, ctx);
1308 }
1309 install_var_local (decl, ctx);
1310 break;
1311
1312 case OMP_CLAUSE_IS_DEVICE_PTR:
1313 decl = OMP_CLAUSE_DECL (c);
1314 goto do_private;
1315
1316 case OMP_CLAUSE__LOOPTEMP_:
1317 case OMP_CLAUSE__REDUCTEMP_:
1318 gcc_assert (is_taskreg_ctx (ctx));
1319 decl = OMP_CLAUSE_DECL (c);
1320 install_var_field (decl, false, 3, ctx);
1321 install_var_local (decl, ctx);
1322 break;
1323
1324 case OMP_CLAUSE_COPYPRIVATE:
1325 case OMP_CLAUSE_COPYIN:
1326 decl = OMP_CLAUSE_DECL (c);
1327 by_ref = use_pointer_for_field (decl, NULL);
1328 install_var_field (decl, by_ref, 3, ctx);
1329 break;
1330
1331 case OMP_CLAUSE_FINAL:
1332 case OMP_CLAUSE_IF:
1333 case OMP_CLAUSE_NUM_THREADS:
1334 case OMP_CLAUSE_NUM_TEAMS:
1335 case OMP_CLAUSE_THREAD_LIMIT:
1336 case OMP_CLAUSE_DEVICE:
1337 case OMP_CLAUSE_SCHEDULE:
1338 case OMP_CLAUSE_DIST_SCHEDULE:
1339 case OMP_CLAUSE_DEPEND:
1340 case OMP_CLAUSE_PRIORITY:
1341 case OMP_CLAUSE_GRAINSIZE:
1342 case OMP_CLAUSE_NUM_TASKS:
1343 case OMP_CLAUSE_NUM_GANGS:
1344 case OMP_CLAUSE_NUM_WORKERS:
1345 case OMP_CLAUSE_VECTOR_LENGTH:
1346 if (ctx->outer)
1347 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1348 break;
1349
1350 case OMP_CLAUSE_TO:
1351 case OMP_CLAUSE_FROM:
1352 case OMP_CLAUSE_MAP:
1353 if (ctx->outer)
1354 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1355 decl = OMP_CLAUSE_DECL (c);
1356 /* Global variables with "omp declare target" attribute
1357 don't need to be copied, the receiver side will use them
1358 directly. However, global variables with "omp declare target link"
1359 attribute need to be copied. Or when ALWAYS modifier is used. */
1360 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1361 && DECL_P (decl)
1362 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1363 && (OMP_CLAUSE_MAP_KIND (c)
1364 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1365 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1366 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1367 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1368 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1369 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1370 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1371 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1372 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1373 && varpool_node::get_create (decl)->offloadable
1374 && !lookup_attribute ("omp declare target link",
1375 DECL_ATTRIBUTES (decl)))
1376 break;
1377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1378 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1379 {
1380 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1381 not offloaded; there is nothing to map for those. */
1382 if (!is_gimple_omp_offloaded (ctx->stmt)
1383 && !POINTER_TYPE_P (TREE_TYPE (decl))
1384 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1385 break;
1386 }
1387 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1388 && DECL_P (decl)
1389 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1390 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1391 && is_omp_target (ctx->stmt))
1392 {
1393 /* If this is an offloaded region, an attach operation should
1394 only exist when the pointer variable is mapped in a prior
1395 clause. */
1396 if (is_gimple_omp_offloaded (ctx->stmt))
1397 gcc_assert
1398 (maybe_lookup_decl (decl, ctx)
1399 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1400 && lookup_attribute ("omp declare target",
1401 DECL_ATTRIBUTES (decl))));
1402
1403 /* By itself, attach/detach is generated as part of pointer
1404 variable mapping and should not create new variables in the
1405 offloaded region, however sender refs for it must be created
1406 for its address to be passed to the runtime. */
1407 tree field
1408 = build_decl (OMP_CLAUSE_LOCATION (c),
1409 FIELD_DECL, NULL_TREE, ptr_type_node);
1410 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1411 insert_field_into_struct (ctx->record_type, field);
1412 /* To not clash with a map of the pointer variable itself,
1413 attach/detach maps have their field looked up by the *clause*
1414 tree expression, not the decl. */
1415 gcc_assert (!splay_tree_lookup (ctx->field_map,
1416 (splay_tree_key) c));
1417 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1418 (splay_tree_value) field);
1419 break;
1420 }
1421 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1422 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1423 || (OMP_CLAUSE_MAP_KIND (c)
1424 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1425 {
1426 if (TREE_CODE (decl) == COMPONENT_REF
1427 || (TREE_CODE (decl) == INDIRECT_REF
1428 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1429 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1430 == REFERENCE_TYPE)))
1431 break;
1432 if (DECL_SIZE (decl)
1433 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1434 {
1435 tree decl2 = DECL_VALUE_EXPR (decl);
1436 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1437 decl2 = TREE_OPERAND (decl2, 0);
1438 gcc_assert (DECL_P (decl2));
1439 install_var_local (decl2, ctx);
1440 }
1441 install_var_local (decl, ctx);
1442 break;
1443 }
1444 if (DECL_P (decl))
1445 {
1446 if (DECL_SIZE (decl)
1447 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1448 {
1449 tree decl2 = DECL_VALUE_EXPR (decl);
1450 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1451 decl2 = TREE_OPERAND (decl2, 0);
1452 gcc_assert (DECL_P (decl2));
1453 install_var_field (decl2, true, 3, ctx);
1454 install_var_local (decl2, ctx);
1455 install_var_local (decl, ctx);
1456 }
1457 else
1458 {
1459 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1460 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1461 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1462 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1463 install_var_field (decl, true, 7, ctx);
1464 else
1465 install_var_field (decl, true, 3, ctx);
1466 if (is_gimple_omp_offloaded (ctx->stmt)
1467 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1468 install_var_local (decl, ctx);
1469 }
1470 }
1471 else
1472 {
1473 tree base = get_base_address (decl);
1474 tree nc = OMP_CLAUSE_CHAIN (c);
1475 if (DECL_P (base)
1476 && nc != NULL_TREE
1477 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1478 && OMP_CLAUSE_DECL (nc) == base
1479 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1480 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1481 {
1482 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1483 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1484 }
1485 else
1486 {
1487 if (ctx->outer)
1488 {
1489 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1490 decl = OMP_CLAUSE_DECL (c);
1491 }
1492 gcc_assert (!splay_tree_lookup (ctx->field_map,
1493 (splay_tree_key) decl));
1494 tree field
1495 = build_decl (OMP_CLAUSE_LOCATION (c),
1496 FIELD_DECL, NULL_TREE, ptr_type_node);
1497 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1498 insert_field_into_struct (ctx->record_type, field);
1499 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1500 (splay_tree_value) field);
1501 }
1502 }
1503 break;
1504
1505 case OMP_CLAUSE_ORDER:
1506 ctx->order_concurrent = true;
1507 break;
1508
1509 case OMP_CLAUSE_BIND:
1510 ctx->loop_p = true;
1511 break;
1512
1513 case OMP_CLAUSE_NOWAIT:
1514 case OMP_CLAUSE_ORDERED:
1515 case OMP_CLAUSE_COLLAPSE:
1516 case OMP_CLAUSE_UNTIED:
1517 case OMP_CLAUSE_MERGEABLE:
1518 case OMP_CLAUSE_PROC_BIND:
1519 case OMP_CLAUSE_SAFELEN:
1520 case OMP_CLAUSE_SIMDLEN:
1521 case OMP_CLAUSE_THREADS:
1522 case OMP_CLAUSE_SIMD:
1523 case OMP_CLAUSE_NOGROUP:
1524 case OMP_CLAUSE_DEFAULTMAP:
1525 case OMP_CLAUSE_ASYNC:
1526 case OMP_CLAUSE_WAIT:
1527 case OMP_CLAUSE_GANG:
1528 case OMP_CLAUSE_WORKER:
1529 case OMP_CLAUSE_VECTOR:
1530 case OMP_CLAUSE_INDEPENDENT:
1531 case OMP_CLAUSE_AUTO:
1532 case OMP_CLAUSE_SEQ:
1533 case OMP_CLAUSE_TILE:
1534 case OMP_CLAUSE__SIMT_:
1535 case OMP_CLAUSE_DEFAULT:
1536 case OMP_CLAUSE_NONTEMPORAL:
1537 case OMP_CLAUSE_IF_PRESENT:
1538 case OMP_CLAUSE_FINALIZE:
1539 case OMP_CLAUSE_TASK_REDUCTION:
1540 case OMP_CLAUSE_ALLOCATE:
1541 break;
1542
1543 case OMP_CLAUSE_ALIGNED:
1544 decl = OMP_CLAUSE_DECL (c);
1545 if (is_global_var (decl)
1546 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1547 install_var_local (decl, ctx);
1548 break;
1549
1550 case OMP_CLAUSE__CONDTEMP_:
1551 decl = OMP_CLAUSE_DECL (c);
1552 if (is_parallel_ctx (ctx))
1553 {
1554 install_var_field (decl, false, 3, ctx);
1555 install_var_local (decl, ctx);
1556 }
1557 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1558 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1559 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1560 install_var_local (decl, ctx);
1561 break;
1562
1563 case OMP_CLAUSE__CACHE_:
1564 default:
1565 gcc_unreachable ();
1566 }
1567 }
1568
1569 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1570 {
1571 switch (OMP_CLAUSE_CODE (c))
1572 {
1573 case OMP_CLAUSE_LASTPRIVATE:
1574 /* Let the corresponding firstprivate clause create
1575 the variable. */
1576 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1577 scan_array_reductions = true;
1578 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1579 break;
1580 /* FALLTHRU */
1581
1582 case OMP_CLAUSE_FIRSTPRIVATE:
1583 case OMP_CLAUSE_PRIVATE:
1584 case OMP_CLAUSE_LINEAR:
1585 case OMP_CLAUSE_IS_DEVICE_PTR:
1586 decl = OMP_CLAUSE_DECL (c);
1587 if (is_variable_sized (decl))
1588 {
1589 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1590 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1591 && is_gimple_omp_offloaded (ctx->stmt))
1592 {
1593 tree decl2 = DECL_VALUE_EXPR (decl);
1594 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1595 decl2 = TREE_OPERAND (decl2, 0);
1596 gcc_assert (DECL_P (decl2));
1597 install_var_local (decl2, ctx);
1598 fixup_remapped_decl (decl2, ctx, false);
1599 }
1600 install_var_local (decl, ctx);
1601 }
1602 fixup_remapped_decl (decl, ctx,
1603 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1604 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1605 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1606 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1607 scan_array_reductions = true;
1608 break;
1609
1610 case OMP_CLAUSE_REDUCTION:
1611 case OMP_CLAUSE_IN_REDUCTION:
1612 decl = OMP_CLAUSE_DECL (c);
1613 if (TREE_CODE (decl) != MEM_REF)
1614 {
1615 if (is_variable_sized (decl))
1616 install_var_local (decl, ctx);
1617 fixup_remapped_decl (decl, ctx, false);
1618 }
1619 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1620 scan_array_reductions = true;
1621 break;
1622
1623 case OMP_CLAUSE_TASK_REDUCTION:
1624 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1625 scan_array_reductions = true;
1626 break;
1627
1628 case OMP_CLAUSE_SHARED:
1629 /* Ignore shared directives in teams construct inside of
1630 target construct. */
1631 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1632 && !is_host_teams_ctx (ctx))
1633 break;
1634 decl = OMP_CLAUSE_DECL (c);
1635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1636 break;
1637 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1638 {
1639 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1640 ctx->outer)))
1641 break;
1642 bool by_ref = use_pointer_for_field (decl, ctx);
1643 install_var_field (decl, by_ref, 11, ctx);
1644 break;
1645 }
1646 fixup_remapped_decl (decl, ctx, false);
1647 break;
1648
1649 case OMP_CLAUSE_MAP:
1650 if (!is_gimple_omp_offloaded (ctx->stmt))
1651 break;
1652 decl = OMP_CLAUSE_DECL (c);
1653 if (DECL_P (decl)
1654 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1655 && (OMP_CLAUSE_MAP_KIND (c)
1656 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1657 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1658 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1659 && varpool_node::get_create (decl)->offloadable)
1660 break;
1661 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1662 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1663 && is_omp_target (ctx->stmt)
1664 && !is_gimple_omp_offloaded (ctx->stmt))
1665 break;
1666 if (DECL_P (decl))
1667 {
1668 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1669 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1670 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1671 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1672 {
1673 tree new_decl = lookup_decl (decl, ctx);
1674 TREE_TYPE (new_decl)
1675 = remap_type (TREE_TYPE (decl), &ctx->cb);
1676 }
1677 else if (DECL_SIZE (decl)
1678 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1679 {
1680 tree decl2 = DECL_VALUE_EXPR (decl);
1681 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1682 decl2 = TREE_OPERAND (decl2, 0);
1683 gcc_assert (DECL_P (decl2));
1684 fixup_remapped_decl (decl2, ctx, false);
1685 fixup_remapped_decl (decl, ctx, true);
1686 }
1687 else
1688 fixup_remapped_decl (decl, ctx, false);
1689 }
1690 break;
1691
1692 case OMP_CLAUSE_COPYPRIVATE:
1693 case OMP_CLAUSE_COPYIN:
1694 case OMP_CLAUSE_DEFAULT:
1695 case OMP_CLAUSE_IF:
1696 case OMP_CLAUSE_NUM_THREADS:
1697 case OMP_CLAUSE_NUM_TEAMS:
1698 case OMP_CLAUSE_THREAD_LIMIT:
1699 case OMP_CLAUSE_DEVICE:
1700 case OMP_CLAUSE_SCHEDULE:
1701 case OMP_CLAUSE_DIST_SCHEDULE:
1702 case OMP_CLAUSE_NOWAIT:
1703 case OMP_CLAUSE_ORDERED:
1704 case OMP_CLAUSE_COLLAPSE:
1705 case OMP_CLAUSE_UNTIED:
1706 case OMP_CLAUSE_FINAL:
1707 case OMP_CLAUSE_MERGEABLE:
1708 case OMP_CLAUSE_PROC_BIND:
1709 case OMP_CLAUSE_SAFELEN:
1710 case OMP_CLAUSE_SIMDLEN:
1711 case OMP_CLAUSE_ALIGNED:
1712 case OMP_CLAUSE_DEPEND:
1713 case OMP_CLAUSE_ALLOCATE:
1714 case OMP_CLAUSE__LOOPTEMP_:
1715 case OMP_CLAUSE__REDUCTEMP_:
1716 case OMP_CLAUSE_TO:
1717 case OMP_CLAUSE_FROM:
1718 case OMP_CLAUSE_PRIORITY:
1719 case OMP_CLAUSE_GRAINSIZE:
1720 case OMP_CLAUSE_NUM_TASKS:
1721 case OMP_CLAUSE_THREADS:
1722 case OMP_CLAUSE_SIMD:
1723 case OMP_CLAUSE_NOGROUP:
1724 case OMP_CLAUSE_DEFAULTMAP:
1725 case OMP_CLAUSE_ORDER:
1726 case OMP_CLAUSE_BIND:
1727 case OMP_CLAUSE_USE_DEVICE_PTR:
1728 case OMP_CLAUSE_USE_DEVICE_ADDR:
1729 case OMP_CLAUSE_NONTEMPORAL:
1730 case OMP_CLAUSE_ASYNC:
1731 case OMP_CLAUSE_WAIT:
1732 case OMP_CLAUSE_NUM_GANGS:
1733 case OMP_CLAUSE_NUM_WORKERS:
1734 case OMP_CLAUSE_VECTOR_LENGTH:
1735 case OMP_CLAUSE_GANG:
1736 case OMP_CLAUSE_WORKER:
1737 case OMP_CLAUSE_VECTOR:
1738 case OMP_CLAUSE_INDEPENDENT:
1739 case OMP_CLAUSE_AUTO:
1740 case OMP_CLAUSE_SEQ:
1741 case OMP_CLAUSE_TILE:
1742 case OMP_CLAUSE__SIMT_:
1743 case OMP_CLAUSE_IF_PRESENT:
1744 case OMP_CLAUSE_FINALIZE:
1745 case OMP_CLAUSE__CONDTEMP_:
1746 break;
1747
1748 case OMP_CLAUSE__CACHE_:
1749 default:
1750 gcc_unreachable ();
1751 }
1752 }
1753
1754 gcc_checking_assert (!scan_array_reductions
1755 || !is_gimple_omp_oacc (ctx->stmt));
1756 if (scan_array_reductions)
1757 {
1758 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1759 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1760 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1761 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1762 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1763 {
1764 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1765 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1766 }
1767 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1768 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1769 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1770 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1771 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1772 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1773 }
1774 }
1775
1776 /* Create a new name for omp child function. Returns an identifier. */
1777
1778 static tree
1779 create_omp_child_function_name (bool task_copy)
1780 {
1781 return clone_function_name_numbered (current_function_decl,
1782 task_copy ? "_omp_cpyfn" : "_omp_fn");
1783 }
1784
1785 /* Return true if CTX may belong to offloaded code: either if current function
1786 is offloaded, or any enclosing context corresponds to a target region. */
1787
1788 static bool
1789 omp_maybe_offloaded_ctx (omp_context *ctx)
1790 {
1791 if (cgraph_node::get (current_function_decl)->offloadable)
1792 return true;
1793 for (; ctx; ctx = ctx->outer)
1794 if (is_gimple_omp_offloaded (ctx->stmt))
1795 return true;
1796 return false;
1797 }
1798
1799 /* Build a decl for the omp child function. It'll not contain a body
1800 yet, just the bare decl. */
1801
1802 static void
1803 create_omp_child_function (omp_context *ctx, bool task_copy)
1804 {
1805 tree decl, type, name, t;
1806
1807 name = create_omp_child_function_name (task_copy);
1808 if (task_copy)
1809 type = build_function_type_list (void_type_node, ptr_type_node,
1810 ptr_type_node, NULL_TREE);
1811 else
1812 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1813
1814 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1815
1816 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1817 || !task_copy);
1818 if (!task_copy)
1819 ctx->cb.dst_fn = decl;
1820 else
1821 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1822
1823 TREE_STATIC (decl) = 1;
1824 TREE_USED (decl) = 1;
1825 DECL_ARTIFICIAL (decl) = 1;
1826 DECL_IGNORED_P (decl) = 0;
1827 TREE_PUBLIC (decl) = 0;
1828 DECL_UNINLINABLE (decl) = 1;
1829 DECL_EXTERNAL (decl) = 0;
1830 DECL_CONTEXT (decl) = NULL_TREE;
1831 DECL_INITIAL (decl) = make_node (BLOCK);
1832 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1833 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1834 /* Remove omp declare simd attribute from the new attributes. */
1835 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1836 {
1837 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1838 a = a2;
1839 a = TREE_CHAIN (a);
1840 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1841 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1842 *p = TREE_CHAIN (*p);
1843 else
1844 {
1845 tree chain = TREE_CHAIN (*p);
1846 *p = copy_node (*p);
1847 p = &TREE_CHAIN (*p);
1848 *p = chain;
1849 }
1850 }
1851 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1852 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1853 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1854 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1855 DECL_FUNCTION_VERSIONED (decl)
1856 = DECL_FUNCTION_VERSIONED (current_function_decl);
1857
1858 if (omp_maybe_offloaded_ctx (ctx))
1859 {
1860 cgraph_node::get_create (decl)->offloadable = 1;
1861 if (ENABLE_OFFLOADING)
1862 g->have_offload = true;
1863 }
1864
1865 if (cgraph_node::get_create (decl)->offloadable
1866 && !lookup_attribute ("omp declare target",
1867 DECL_ATTRIBUTES (current_function_decl)))
1868 {
1869 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1870 ? "omp target entrypoint"
1871 : "omp declare target");
1872 DECL_ATTRIBUTES (decl)
1873 = tree_cons (get_identifier (target_attr),
1874 NULL_TREE, DECL_ATTRIBUTES (decl));
1875 }
1876
1877 t = build_decl (DECL_SOURCE_LOCATION (decl),
1878 RESULT_DECL, NULL_TREE, void_type_node);
1879 DECL_ARTIFICIAL (t) = 1;
1880 DECL_IGNORED_P (t) = 1;
1881 DECL_CONTEXT (t) = decl;
1882 DECL_RESULT (decl) = t;
1883
1884 tree data_name = get_identifier (".omp_data_i");
1885 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1886 ptr_type_node);
1887 DECL_ARTIFICIAL (t) = 1;
1888 DECL_NAMELESS (t) = 1;
1889 DECL_ARG_TYPE (t) = ptr_type_node;
1890 DECL_CONTEXT (t) = current_function_decl;
1891 TREE_USED (t) = 1;
1892 TREE_READONLY (t) = 1;
1893 DECL_ARGUMENTS (decl) = t;
1894 if (!task_copy)
1895 ctx->receiver_decl = t;
1896 else
1897 {
1898 t = build_decl (DECL_SOURCE_LOCATION (decl),
1899 PARM_DECL, get_identifier (".omp_data_o"),
1900 ptr_type_node);
1901 DECL_ARTIFICIAL (t) = 1;
1902 DECL_NAMELESS (t) = 1;
1903 DECL_ARG_TYPE (t) = ptr_type_node;
1904 DECL_CONTEXT (t) = current_function_decl;
1905 TREE_USED (t) = 1;
1906 TREE_ADDRESSABLE (t) = 1;
1907 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1908 DECL_ARGUMENTS (decl) = t;
1909 }
1910
1911 /* Allocate memory for the function structure. The call to
1912 allocate_struct_function clobbers CFUN, so we need to restore
1913 it afterward. */
1914 push_struct_function (decl);
1915 cfun->function_end_locus = gimple_location (ctx->stmt);
1916 init_tree_ssa (cfun);
1917 pop_cfun ();
1918 }
1919
1920 /* Callback for walk_gimple_seq. Check if combined parallel
1921 contains gimple_omp_for_combined_into_p OMP_FOR. */
1922
1923 tree
1924 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1925 bool *handled_ops_p,
1926 struct walk_stmt_info *wi)
1927 {
1928 gimple *stmt = gsi_stmt (*gsi_p);
1929
1930 *handled_ops_p = true;
1931 switch (gimple_code (stmt))
1932 {
1933 WALK_SUBSTMTS;
1934
1935 case GIMPLE_OMP_FOR:
1936 if (gimple_omp_for_combined_into_p (stmt)
1937 && gimple_omp_for_kind (stmt)
1938 == *(const enum gf_mask *) (wi->info))
1939 {
1940 wi->info = stmt;
1941 return integer_zero_node;
1942 }
1943 break;
1944 default:
1945 break;
1946 }
1947 return NULL;
1948 }
1949
1950 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1951
1952 static void
1953 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1954 omp_context *outer_ctx)
1955 {
1956 struct walk_stmt_info wi;
1957
1958 memset (&wi, 0, sizeof (wi));
1959 wi.val_only = true;
1960 wi.info = (void *) &msk;
1961 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1962 if (wi.info != (void *) &msk)
1963 {
1964 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1965 struct omp_for_data fd;
1966 omp_extract_for_data (for_stmt, &fd, NULL);
1967 /* We need two temporaries with fd.loop.v type (istart/iend)
1968 and then (fd.collapse - 1) temporaries with the same
1969 type for count2 ... countN-1 vars if not constant. */
1970 size_t count = 2, i;
1971 tree type = fd.iter_type;
1972 if (fd.collapse > 1
1973 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1974 {
1975 count += fd.collapse - 1;
1976 /* If there are lastprivate clauses on the inner
1977 GIMPLE_OMP_FOR, add one more temporaries for the total number
1978 of iterations (product of count1 ... countN-1). */
1979 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1980 OMP_CLAUSE_LASTPRIVATE)
1981 || (msk == GF_OMP_FOR_KIND_FOR
1982 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1983 OMP_CLAUSE_LASTPRIVATE)))
1984 {
1985 tree temp = create_tmp_var (type);
1986 tree c = build_omp_clause (UNKNOWN_LOCATION,
1987 OMP_CLAUSE__LOOPTEMP_);
1988 insert_decl_map (&outer_ctx->cb, temp, temp);
1989 OMP_CLAUSE_DECL (c) = temp;
1990 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1991 gimple_omp_taskreg_set_clauses (stmt, c);
1992 }
1993 if (fd.non_rect
1994 && fd.last_nonrect == fd.first_nonrect + 1)
1995 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
1996 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
1997 {
1998 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
1999 tree type2 = TREE_TYPE (v);
2000 count++;
2001 for (i = 0; i < 3; i++)
2002 {
2003 tree temp = create_tmp_var (type2);
2004 tree c = build_omp_clause (UNKNOWN_LOCATION,
2005 OMP_CLAUSE__LOOPTEMP_);
2006 insert_decl_map (&outer_ctx->cb, temp, temp);
2007 OMP_CLAUSE_DECL (c) = temp;
2008 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2009 gimple_omp_taskreg_set_clauses (stmt, c);
2010 }
2011 }
2012 }
2013 for (i = 0; i < count; i++)
2014 {
2015 tree temp = create_tmp_var (type);
2016 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2017 insert_decl_map (&outer_ctx->cb, temp, temp);
2018 OMP_CLAUSE_DECL (c) = temp;
2019 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2020 gimple_omp_taskreg_set_clauses (stmt, c);
2021 }
2022 }
2023 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2024 && omp_find_clause (gimple_omp_task_clauses (stmt),
2025 OMP_CLAUSE_REDUCTION))
2026 {
2027 tree type = build_pointer_type (pointer_sized_int_node);
2028 tree temp = create_tmp_var (type);
2029 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2030 insert_decl_map (&outer_ctx->cb, temp, temp);
2031 OMP_CLAUSE_DECL (c) = temp;
2032 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2033 gimple_omp_task_set_clauses (stmt, c);
2034 }
2035 }
2036
2037 /* Scan an OpenMP parallel directive. */
2038
2039 static void
2040 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2041 {
2042 omp_context *ctx;
2043 tree name;
2044 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2045
2046 /* Ignore parallel directives with empty bodies, unless there
2047 are copyin clauses. */
2048 if (optimize > 0
2049 && empty_body_p (gimple_omp_body (stmt))
2050 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2051 OMP_CLAUSE_COPYIN) == NULL)
2052 {
2053 gsi_replace (gsi, gimple_build_nop (), false);
2054 return;
2055 }
2056
2057 if (gimple_omp_parallel_combined_p (stmt))
2058 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2059 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2060 OMP_CLAUSE_REDUCTION);
2061 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2062 if (OMP_CLAUSE_REDUCTION_TASK (c))
2063 {
2064 tree type = build_pointer_type (pointer_sized_int_node);
2065 tree temp = create_tmp_var (type);
2066 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2067 if (outer_ctx)
2068 insert_decl_map (&outer_ctx->cb, temp, temp);
2069 OMP_CLAUSE_DECL (c) = temp;
2070 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2071 gimple_omp_parallel_set_clauses (stmt, c);
2072 break;
2073 }
2074 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2075 break;
2076
2077 ctx = new_omp_context (stmt, outer_ctx);
2078 taskreg_contexts.safe_push (ctx);
2079 if (taskreg_nesting_level > 1)
2080 ctx->is_nested = true;
2081 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2082 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2083 name = create_tmp_var_name (".omp_data_s");
2084 name = build_decl (gimple_location (stmt),
2085 TYPE_DECL, name, ctx->record_type);
2086 DECL_ARTIFICIAL (name) = 1;
2087 DECL_NAMELESS (name) = 1;
2088 TYPE_NAME (ctx->record_type) = name;
2089 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2090 create_omp_child_function (ctx, false);
2091 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2092
2093 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2094 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2095
2096 if (TYPE_FIELDS (ctx->record_type) == NULL)
2097 ctx->record_type = ctx->receiver_decl = NULL;
2098 }
2099
2100 /* Scan an OpenMP task directive. */
2101
2102 static void
2103 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2104 {
2105 omp_context *ctx;
2106 tree name, t;
2107 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2108
2109 /* Ignore task directives with empty bodies, unless they have depend
2110 clause. */
2111 if (optimize > 0
2112 && gimple_omp_body (stmt)
2113 && empty_body_p (gimple_omp_body (stmt))
2114 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2115 {
2116 gsi_replace (gsi, gimple_build_nop (), false);
2117 return;
2118 }
2119
2120 if (gimple_omp_task_taskloop_p (stmt))
2121 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2122
2123 ctx = new_omp_context (stmt, outer_ctx);
2124
2125 if (gimple_omp_task_taskwait_p (stmt))
2126 {
2127 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2128 return;
2129 }
2130
2131 taskreg_contexts.safe_push (ctx);
2132 if (taskreg_nesting_level > 1)
2133 ctx->is_nested = true;
2134 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2135 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2136 name = create_tmp_var_name (".omp_data_s");
2137 name = build_decl (gimple_location (stmt),
2138 TYPE_DECL, name, ctx->record_type);
2139 DECL_ARTIFICIAL (name) = 1;
2140 DECL_NAMELESS (name) = 1;
2141 TYPE_NAME (ctx->record_type) = name;
2142 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2143 create_omp_child_function (ctx, false);
2144 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2145
2146 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2147
2148 if (ctx->srecord_type)
2149 {
2150 name = create_tmp_var_name (".omp_data_a");
2151 name = build_decl (gimple_location (stmt),
2152 TYPE_DECL, name, ctx->srecord_type);
2153 DECL_ARTIFICIAL (name) = 1;
2154 DECL_NAMELESS (name) = 1;
2155 TYPE_NAME (ctx->srecord_type) = name;
2156 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2157 create_omp_child_function (ctx, true);
2158 }
2159
2160 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2161
2162 if (TYPE_FIELDS (ctx->record_type) == NULL)
2163 {
2164 ctx->record_type = ctx->receiver_decl = NULL;
2165 t = build_int_cst (long_integer_type_node, 0);
2166 gimple_omp_task_set_arg_size (stmt, t);
2167 t = build_int_cst (long_integer_type_node, 1);
2168 gimple_omp_task_set_arg_align (stmt, t);
2169 }
2170 }
2171
2172 /* Helper function for finish_taskreg_scan, called through walk_tree.
2173 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2174 tree, replace it in the expression. */
2175
2176 static tree
2177 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2178 {
2179 if (VAR_P (*tp))
2180 {
2181 omp_context *ctx = (omp_context *) data;
2182 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2183 if (t != *tp)
2184 {
2185 if (DECL_HAS_VALUE_EXPR_P (t))
2186 t = unshare_expr (DECL_VALUE_EXPR (t));
2187 *tp = t;
2188 }
2189 *walk_subtrees = 0;
2190 }
2191 else if (IS_TYPE_OR_DECL_P (*tp))
2192 *walk_subtrees = 0;
2193 return NULL_TREE;
2194 }
2195
2196 /* If any decls have been made addressable during scan_omp,
2197 adjust their fields if needed, and layout record types
2198 of parallel/task constructs. */
2199
2200 static void
2201 finish_taskreg_scan (omp_context *ctx)
2202 {
2203 if (ctx->record_type == NULL_TREE)
2204 return;
2205
2206 /* If any task_shared_vars were needed, verify all
2207 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2208 statements if use_pointer_for_field hasn't changed
2209 because of that. If it did, update field types now. */
2210 if (task_shared_vars)
2211 {
2212 tree c;
2213
2214 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2215 c; c = OMP_CLAUSE_CHAIN (c))
2216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2217 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2218 {
2219 tree decl = OMP_CLAUSE_DECL (c);
2220
2221 /* Global variables don't need to be copied,
2222 the receiver side will use them directly. */
2223 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2224 continue;
2225 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2226 || !use_pointer_for_field (decl, ctx))
2227 continue;
2228 tree field = lookup_field (decl, ctx);
2229 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2230 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2231 continue;
2232 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2233 TREE_THIS_VOLATILE (field) = 0;
2234 DECL_USER_ALIGN (field) = 0;
2235 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2236 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2237 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2238 if (ctx->srecord_type)
2239 {
2240 tree sfield = lookup_sfield (decl, ctx);
2241 TREE_TYPE (sfield) = TREE_TYPE (field);
2242 TREE_THIS_VOLATILE (sfield) = 0;
2243 DECL_USER_ALIGN (sfield) = 0;
2244 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2245 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2246 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2247 }
2248 }
2249 }
2250
2251 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2252 {
2253 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2254 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2255 if (c)
2256 {
2257 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2258 expects to find it at the start of data. */
2259 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2260 tree *p = &TYPE_FIELDS (ctx->record_type);
2261 while (*p)
2262 if (*p == f)
2263 {
2264 *p = DECL_CHAIN (*p);
2265 break;
2266 }
2267 else
2268 p = &DECL_CHAIN (*p);
2269 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2270 TYPE_FIELDS (ctx->record_type) = f;
2271 }
2272 layout_type (ctx->record_type);
2273 fixup_child_record_type (ctx);
2274 }
2275 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2276 {
2277 layout_type (ctx->record_type);
2278 fixup_child_record_type (ctx);
2279 }
2280 else
2281 {
2282 location_t loc = gimple_location (ctx->stmt);
2283 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2284 /* Move VLA fields to the end. */
2285 p = &TYPE_FIELDS (ctx->record_type);
2286 while (*p)
2287 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2288 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2289 {
2290 *q = *p;
2291 *p = TREE_CHAIN (*p);
2292 TREE_CHAIN (*q) = NULL_TREE;
2293 q = &TREE_CHAIN (*q);
2294 }
2295 else
2296 p = &DECL_CHAIN (*p);
2297 *p = vla_fields;
2298 if (gimple_omp_task_taskloop_p (ctx->stmt))
2299 {
2300 /* Move fields corresponding to first and second _looptemp_
2301 clause first. There are filled by GOMP_taskloop
2302 and thus need to be in specific positions. */
2303 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2304 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2305 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2306 OMP_CLAUSE__LOOPTEMP_);
2307 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2308 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2309 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2310 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2311 p = &TYPE_FIELDS (ctx->record_type);
2312 while (*p)
2313 if (*p == f1 || *p == f2 || *p == f3)
2314 *p = DECL_CHAIN (*p);
2315 else
2316 p = &DECL_CHAIN (*p);
2317 DECL_CHAIN (f1) = f2;
2318 if (c3)
2319 {
2320 DECL_CHAIN (f2) = f3;
2321 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2322 }
2323 else
2324 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2325 TYPE_FIELDS (ctx->record_type) = f1;
2326 if (ctx->srecord_type)
2327 {
2328 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2329 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2330 if (c3)
2331 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2332 p = &TYPE_FIELDS (ctx->srecord_type);
2333 while (*p)
2334 if (*p == f1 || *p == f2 || *p == f3)
2335 *p = DECL_CHAIN (*p);
2336 else
2337 p = &DECL_CHAIN (*p);
2338 DECL_CHAIN (f1) = f2;
2339 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2340 if (c3)
2341 {
2342 DECL_CHAIN (f2) = f3;
2343 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2344 }
2345 else
2346 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2347 TYPE_FIELDS (ctx->srecord_type) = f1;
2348 }
2349 }
2350 layout_type (ctx->record_type);
2351 fixup_child_record_type (ctx);
2352 if (ctx->srecord_type)
2353 layout_type (ctx->srecord_type);
2354 tree t = fold_convert_loc (loc, long_integer_type_node,
2355 TYPE_SIZE_UNIT (ctx->record_type));
2356 if (TREE_CODE (t) != INTEGER_CST)
2357 {
2358 t = unshare_expr (t);
2359 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2360 }
2361 gimple_omp_task_set_arg_size (ctx->stmt, t);
2362 t = build_int_cst (long_integer_type_node,
2363 TYPE_ALIGN_UNIT (ctx->record_type));
2364 gimple_omp_task_set_arg_align (ctx->stmt, t);
2365 }
2366 }
2367
2368 /* Find the enclosing offload context. */
2369
2370 static omp_context *
2371 enclosing_target_ctx (omp_context *ctx)
2372 {
2373 for (; ctx; ctx = ctx->outer)
2374 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2375 break;
2376
2377 return ctx;
2378 }
2379
2380 /* Return true if ctx is part of an oacc kernels region. */
2381
2382 static bool
2383 ctx_in_oacc_kernels_region (omp_context *ctx)
2384 {
2385 for (;ctx != NULL; ctx = ctx->outer)
2386 {
2387 gimple *stmt = ctx->stmt;
2388 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2389 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2390 return true;
2391 }
2392
2393 return false;
2394 }
2395
2396 /* Check the parallelism clauses inside a kernels regions.
2397 Until kernels handling moves to use the same loop indirection
2398 scheme as parallel, we need to do this checking early. */
2399
2400 static unsigned
2401 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2402 {
2403 bool checking = true;
2404 unsigned outer_mask = 0;
2405 unsigned this_mask = 0;
2406 bool has_seq = false, has_auto = false;
2407
2408 if (ctx->outer)
2409 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2410 if (!stmt)
2411 {
2412 checking = false;
2413 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2414 return outer_mask;
2415 stmt = as_a <gomp_for *> (ctx->stmt);
2416 }
2417
2418 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2419 {
2420 switch (OMP_CLAUSE_CODE (c))
2421 {
2422 case OMP_CLAUSE_GANG:
2423 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2424 break;
2425 case OMP_CLAUSE_WORKER:
2426 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2427 break;
2428 case OMP_CLAUSE_VECTOR:
2429 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2430 break;
2431 case OMP_CLAUSE_SEQ:
2432 has_seq = true;
2433 break;
2434 case OMP_CLAUSE_AUTO:
2435 has_auto = true;
2436 break;
2437 default:
2438 break;
2439 }
2440 }
2441
2442 if (checking)
2443 {
2444 if (has_seq && (this_mask || has_auto))
2445 error_at (gimple_location (stmt), "%<seq%> overrides other"
2446 " OpenACC loop specifiers");
2447 else if (has_auto && this_mask)
2448 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2449 " OpenACC loop specifiers");
2450
2451 if (this_mask & outer_mask)
2452 error_at (gimple_location (stmt), "inner loop uses same"
2453 " OpenACC parallelism as containing loop");
2454 }
2455
2456 return outer_mask | this_mask;
2457 }
2458
2459 /* Scan a GIMPLE_OMP_FOR. */
2460
2461 static omp_context *
2462 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2463 {
2464 omp_context *ctx;
2465 size_t i;
2466 tree clauses = gimple_omp_for_clauses (stmt);
2467
2468 ctx = new_omp_context (stmt, outer_ctx);
2469
2470 if (is_gimple_omp_oacc (stmt))
2471 {
2472 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2473
2474 if (!tgt || is_oacc_parallel_or_serial (tgt))
2475 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2476 {
2477 tree c_op0;
2478 switch (OMP_CLAUSE_CODE (c))
2479 {
2480 case OMP_CLAUSE_GANG:
2481 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2482 break;
2483
2484 case OMP_CLAUSE_WORKER:
2485 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2486 break;
2487
2488 case OMP_CLAUSE_VECTOR:
2489 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2490 break;
2491
2492 default:
2493 continue;
2494 }
2495
2496 if (c_op0)
2497 {
2498 error_at (OMP_CLAUSE_LOCATION (c),
2499 "argument not permitted on %qs clause",
2500 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2501 if (tgt)
2502 inform (gimple_location (tgt->stmt),
2503 "enclosing parent compute construct");
2504 else if (oacc_get_fn_attrib (current_function_decl))
2505 inform (DECL_SOURCE_LOCATION (current_function_decl),
2506 "enclosing routine");
2507 else
2508 gcc_unreachable ();
2509 }
2510 }
2511
2512 if (tgt && is_oacc_kernels (tgt))
2513 check_oacc_kernel_gwv (stmt, ctx);
2514
2515 /* Collect all variables named in reductions on this loop. Ensure
2516 that, if this loop has a reduction on some variable v, and there is
2517 a reduction on v somewhere in an outer context, then there is a
2518 reduction on v on all intervening loops as well. */
2519 tree local_reduction_clauses = NULL;
2520 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2521 {
2522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2523 local_reduction_clauses
2524 = tree_cons (NULL, c, local_reduction_clauses);
2525 }
2526 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2527 ctx->outer_reduction_clauses
2528 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2529 ctx->outer->outer_reduction_clauses);
2530 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2531 tree local_iter = local_reduction_clauses;
2532 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2533 {
2534 tree local_clause = TREE_VALUE (local_iter);
2535 tree local_var = OMP_CLAUSE_DECL (local_clause);
2536 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2537 bool have_outer_reduction = false;
2538 tree ctx_iter = outer_reduction_clauses;
2539 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2540 {
2541 tree outer_clause = TREE_VALUE (ctx_iter);
2542 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2543 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2544 if (outer_var == local_var && outer_op != local_op)
2545 {
2546 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2547 "conflicting reduction operations for %qE",
2548 local_var);
2549 inform (OMP_CLAUSE_LOCATION (outer_clause),
2550 "location of the previous reduction for %qE",
2551 outer_var);
2552 }
2553 if (outer_var == local_var)
2554 {
2555 have_outer_reduction = true;
2556 break;
2557 }
2558 }
2559 if (have_outer_reduction)
2560 {
2561 /* There is a reduction on outer_var both on this loop and on
2562 some enclosing loop. Walk up the context tree until such a
2563 loop with a reduction on outer_var is found, and complain
2564 about all intervening loops that do not have such a
2565 reduction. */
2566 struct omp_context *curr_loop = ctx->outer;
2567 bool found = false;
2568 while (curr_loop != NULL)
2569 {
2570 tree curr_iter = curr_loop->local_reduction_clauses;
2571 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2572 {
2573 tree curr_clause = TREE_VALUE (curr_iter);
2574 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2575 if (curr_var == local_var)
2576 {
2577 found = true;
2578 break;
2579 }
2580 }
2581 if (!found)
2582 warning_at (gimple_location (curr_loop->stmt), 0,
2583 "nested loop in reduction needs "
2584 "reduction clause for %qE",
2585 local_var);
2586 else
2587 break;
2588 curr_loop = curr_loop->outer;
2589 }
2590 }
2591 }
2592 ctx->local_reduction_clauses = local_reduction_clauses;
2593 ctx->outer_reduction_clauses
2594 = chainon (unshare_expr (ctx->local_reduction_clauses),
2595 ctx->outer_reduction_clauses);
2596
2597 if (tgt && is_oacc_kernels (tgt))
2598 {
2599 /* Strip out reductions, as they are not handled yet. */
2600 tree *prev_ptr = &clauses;
2601
2602 while (tree probe = *prev_ptr)
2603 {
2604 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2605
2606 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2607 *prev_ptr = *next_ptr;
2608 else
2609 prev_ptr = next_ptr;
2610 }
2611
2612 gimple_omp_for_set_clauses (stmt, clauses);
2613 }
2614 }
2615
2616 scan_sharing_clauses (clauses, ctx);
2617
2618 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2619 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2620 {
2621 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2622 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2623 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2624 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2625 }
2626 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2627 return ctx;
2628 }
2629
2630 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2631
2632 static void
2633 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2634 omp_context *outer_ctx)
2635 {
2636 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2637 gsi_replace (gsi, bind, false);
2638 gimple_seq seq = NULL;
2639 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2640 tree cond = create_tmp_var_raw (integer_type_node);
2641 DECL_CONTEXT (cond) = current_function_decl;
2642 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2643 gimple_bind_set_vars (bind, cond);
2644 gimple_call_set_lhs (g, cond);
2645 gimple_seq_add_stmt (&seq, g);
2646 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2647 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2648 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2649 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2650 gimple_seq_add_stmt (&seq, g);
2651 g = gimple_build_label (lab1);
2652 gimple_seq_add_stmt (&seq, g);
2653 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2654 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2655 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2656 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2657 gimple_omp_for_set_clauses (new_stmt, clause);
2658 gimple_seq_add_stmt (&seq, new_stmt);
2659 g = gimple_build_goto (lab3);
2660 gimple_seq_add_stmt (&seq, g);
2661 g = gimple_build_label (lab2);
2662 gimple_seq_add_stmt (&seq, g);
2663 gimple_seq_add_stmt (&seq, stmt);
2664 g = gimple_build_label (lab3);
2665 gimple_seq_add_stmt (&seq, g);
2666 gimple_bind_set_body (bind, seq);
2667 update_stmt (bind);
2668 scan_omp_for (new_stmt, outer_ctx);
2669 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2670 }
2671
2672 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2673 struct walk_stmt_info *);
2674 static omp_context *maybe_lookup_ctx (gimple *);
2675
2676 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2677 for scan phase loop. */
2678
2679 static void
2680 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2681 omp_context *outer_ctx)
2682 {
2683 /* The only change between inclusive and exclusive scan will be
2684 within the first simd loop, so just use inclusive in the
2685 worksharing loop. */
2686 outer_ctx->scan_inclusive = true;
2687 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2688 OMP_CLAUSE_DECL (c) = integer_zero_node;
2689
2690 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2691 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2692 gsi_replace (gsi, input_stmt, false);
2693 gimple_seq input_body = NULL;
2694 gimple_seq_add_stmt (&input_body, stmt);
2695 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2696
2697 gimple_stmt_iterator input1_gsi = gsi_none ();
2698 struct walk_stmt_info wi;
2699 memset (&wi, 0, sizeof (wi));
2700 wi.val_only = true;
2701 wi.info = (void *) &input1_gsi;
2702 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2703 gcc_assert (!gsi_end_p (input1_gsi));
2704
2705 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2706 gsi_next (&input1_gsi);
2707 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2708 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2709 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2710 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2711 std::swap (input_stmt1, scan_stmt1);
2712
2713 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2714 gimple_omp_set_body (input_stmt1, NULL);
2715
2716 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2717 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2718
2719 gimple_omp_set_body (input_stmt1, input_body1);
2720 gimple_omp_set_body (scan_stmt1, NULL);
2721
2722 gimple_stmt_iterator input2_gsi = gsi_none ();
2723 memset (&wi, 0, sizeof (wi));
2724 wi.val_only = true;
2725 wi.info = (void *) &input2_gsi;
2726 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2727 NULL, &wi);
2728 gcc_assert (!gsi_end_p (input2_gsi));
2729
2730 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2731 gsi_next (&input2_gsi);
2732 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2733 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2734 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2735 std::swap (input_stmt2, scan_stmt2);
2736
2737 gimple_omp_set_body (input_stmt2, NULL);
2738
2739 gimple_omp_set_body (input_stmt, input_body);
2740 gimple_omp_set_body (scan_stmt, scan_body);
2741
2742 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2743 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2744
2745 ctx = new_omp_context (scan_stmt, outer_ctx);
2746 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2747
2748 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2749 }
2750
2751 /* Scan an OpenMP sections directive. */
2752
2753 static void
2754 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2755 {
2756 omp_context *ctx;
2757
2758 ctx = new_omp_context (stmt, outer_ctx);
2759 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2760 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2761 }
2762
2763 /* Scan an OpenMP single directive. */
2764
2765 static void
2766 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2767 {
2768 omp_context *ctx;
2769 tree name;
2770
2771 ctx = new_omp_context (stmt, outer_ctx);
2772 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2773 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2774 name = create_tmp_var_name (".omp_copy_s");
2775 name = build_decl (gimple_location (stmt),
2776 TYPE_DECL, name, ctx->record_type);
2777 TYPE_NAME (ctx->record_type) = name;
2778
2779 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2780 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2781
2782 if (TYPE_FIELDS (ctx->record_type) == NULL)
2783 ctx->record_type = NULL;
2784 else
2785 layout_type (ctx->record_type);
2786 }
2787
2788 /* Scan a GIMPLE_OMP_TARGET. */
2789
2790 static void
2791 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2792 {
2793 omp_context *ctx;
2794 tree name;
2795 bool offloaded = is_gimple_omp_offloaded (stmt);
2796 tree clauses = gimple_omp_target_clauses (stmt);
2797
2798 ctx = new_omp_context (stmt, outer_ctx);
2799 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2800 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2801 name = create_tmp_var_name (".omp_data_t");
2802 name = build_decl (gimple_location (stmt),
2803 TYPE_DECL, name, ctx->record_type);
2804 DECL_ARTIFICIAL (name) = 1;
2805 DECL_NAMELESS (name) = 1;
2806 TYPE_NAME (ctx->record_type) = name;
2807 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2808
2809 if (offloaded)
2810 {
2811 create_omp_child_function (ctx, false);
2812 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2813 }
2814
2815 scan_sharing_clauses (clauses, ctx);
2816 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2817
2818 if (TYPE_FIELDS (ctx->record_type) == NULL)
2819 ctx->record_type = ctx->receiver_decl = NULL;
2820 else
2821 {
2822 TYPE_FIELDS (ctx->record_type)
2823 = nreverse (TYPE_FIELDS (ctx->record_type));
2824 if (flag_checking)
2825 {
2826 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2827 for (tree field = TYPE_FIELDS (ctx->record_type);
2828 field;
2829 field = DECL_CHAIN (field))
2830 gcc_assert (DECL_ALIGN (field) == align);
2831 }
2832 layout_type (ctx->record_type);
2833 if (offloaded)
2834 fixup_child_record_type (ctx);
2835 }
2836 }
2837
2838 /* Scan an OpenMP teams directive. */
2839
2840 static void
2841 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2842 {
2843 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2844
2845 if (!gimple_omp_teams_host (stmt))
2846 {
2847 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2848 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2849 return;
2850 }
2851 taskreg_contexts.safe_push (ctx);
2852 gcc_assert (taskreg_nesting_level == 1);
2853 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2854 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2855 tree name = create_tmp_var_name (".omp_data_s");
2856 name = build_decl (gimple_location (stmt),
2857 TYPE_DECL, name, ctx->record_type);
2858 DECL_ARTIFICIAL (name) = 1;
2859 DECL_NAMELESS (name) = 1;
2860 TYPE_NAME (ctx->record_type) = name;
2861 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2862 create_omp_child_function (ctx, false);
2863 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2864
2865 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2866 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2867
2868 if (TYPE_FIELDS (ctx->record_type) == NULL)
2869 ctx->record_type = ctx->receiver_decl = NULL;
2870 }
2871
2872 /* Check nesting restrictions. */
2873 static bool
2874 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2875 {
2876 tree c;
2877
2878 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2879 inside an OpenACC CTX. */
2880 if (!(is_gimple_omp (stmt)
2881 && is_gimple_omp_oacc (stmt))
2882 /* Except for atomic codes that we share with OpenMP. */
2883 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2884 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2885 {
2886 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2887 {
2888 error_at (gimple_location (stmt),
2889 "non-OpenACC construct inside of OpenACC routine");
2890 return false;
2891 }
2892 else
2893 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2894 if (is_gimple_omp (octx->stmt)
2895 && is_gimple_omp_oacc (octx->stmt))
2896 {
2897 error_at (gimple_location (stmt),
2898 "non-OpenACC construct inside of OpenACC region");
2899 return false;
2900 }
2901 }
2902
2903 if (ctx != NULL)
2904 {
2905 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2906 && ctx->outer
2907 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2908 ctx = ctx->outer;
2909 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2910 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2911 && !ctx->loop_p)
2912 {
2913 c = NULL_TREE;
2914 if (ctx->order_concurrent
2915 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2916 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2917 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2918 {
2919 error_at (gimple_location (stmt),
2920 "OpenMP constructs other than %<parallel%>, %<loop%>"
2921 " or %<simd%> may not be nested inside a region with"
2922 " the %<order(concurrent)%> clause");
2923 return false;
2924 }
2925 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2926 {
2927 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2928 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2929 {
2930 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2931 && (ctx->outer == NULL
2932 || !gimple_omp_for_combined_into_p (ctx->stmt)
2933 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2934 || (gimple_omp_for_kind (ctx->outer->stmt)
2935 != GF_OMP_FOR_KIND_FOR)
2936 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2937 {
2938 error_at (gimple_location (stmt),
2939 "%<ordered simd threads%> must be closely "
2940 "nested inside of %<for simd%> region");
2941 return false;
2942 }
2943 return true;
2944 }
2945 }
2946 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2947 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2948 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2949 return true;
2950 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2951 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2952 return true;
2953 error_at (gimple_location (stmt),
2954 "OpenMP constructs other than "
2955 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2956 "not be nested inside %<simd%> region");
2957 return false;
2958 }
2959 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2960 {
2961 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2962 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2963 && omp_find_clause (gimple_omp_for_clauses (stmt),
2964 OMP_CLAUSE_BIND) == NULL_TREE))
2965 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2966 {
2967 error_at (gimple_location (stmt),
2968 "only %<distribute%>, %<parallel%> or %<loop%> "
2969 "regions are allowed to be strictly nested inside "
2970 "%<teams%> region");
2971 return false;
2972 }
2973 }
2974 else if (ctx->order_concurrent
2975 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2976 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2977 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2978 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2979 {
2980 if (ctx->loop_p)
2981 error_at (gimple_location (stmt),
2982 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2983 "%<simd%> may not be nested inside a %<loop%> region");
2984 else
2985 error_at (gimple_location (stmt),
2986 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2987 "%<simd%> may not be nested inside a region with "
2988 "the %<order(concurrent)%> clause");
2989 return false;
2990 }
2991 }
2992 switch (gimple_code (stmt))
2993 {
2994 case GIMPLE_OMP_FOR:
2995 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2996 return true;
2997 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2998 {
2999 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3000 {
3001 error_at (gimple_location (stmt),
3002 "%<distribute%> region must be strictly nested "
3003 "inside %<teams%> construct");
3004 return false;
3005 }
3006 return true;
3007 }
3008 /* We split taskloop into task and nested taskloop in it. */
3009 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3010 return true;
3011 /* For now, hope this will change and loop bind(parallel) will not
3012 be allowed in lots of contexts. */
3013 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3014 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3015 return true;
3016 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3017 {
3018 bool ok = false;
3019
3020 if (ctx)
3021 switch (gimple_code (ctx->stmt))
3022 {
3023 case GIMPLE_OMP_FOR:
3024 ok = (gimple_omp_for_kind (ctx->stmt)
3025 == GF_OMP_FOR_KIND_OACC_LOOP);
3026 break;
3027
3028 case GIMPLE_OMP_TARGET:
3029 switch (gimple_omp_target_kind (ctx->stmt))
3030 {
3031 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3032 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3033 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3034 ok = true;
3035 break;
3036
3037 default:
3038 break;
3039 }
3040
3041 default:
3042 break;
3043 }
3044 else if (oacc_get_fn_attrib (current_function_decl))
3045 ok = true;
3046 if (!ok)
3047 {
3048 error_at (gimple_location (stmt),
3049 "OpenACC loop directive must be associated with"
3050 " an OpenACC compute region");
3051 return false;
3052 }
3053 }
3054 /* FALLTHRU */
3055 case GIMPLE_CALL:
3056 if (is_gimple_call (stmt)
3057 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3058 == BUILT_IN_GOMP_CANCEL
3059 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3060 == BUILT_IN_GOMP_CANCELLATION_POINT))
3061 {
3062 const char *bad = NULL;
3063 const char *kind = NULL;
3064 const char *construct
3065 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3066 == BUILT_IN_GOMP_CANCEL)
3067 ? "cancel"
3068 : "cancellation point";
3069 if (ctx == NULL)
3070 {
3071 error_at (gimple_location (stmt), "orphaned %qs construct",
3072 construct);
3073 return false;
3074 }
3075 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3076 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3077 : 0)
3078 {
3079 case 1:
3080 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3081 bad = "parallel";
3082 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3083 == BUILT_IN_GOMP_CANCEL
3084 && !integer_zerop (gimple_call_arg (stmt, 1)))
3085 ctx->cancellable = true;
3086 kind = "parallel";
3087 break;
3088 case 2:
3089 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3090 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3091 bad = "for";
3092 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3093 == BUILT_IN_GOMP_CANCEL
3094 && !integer_zerop (gimple_call_arg (stmt, 1)))
3095 {
3096 ctx->cancellable = true;
3097 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3098 OMP_CLAUSE_NOWAIT))
3099 warning_at (gimple_location (stmt), 0,
3100 "%<cancel for%> inside "
3101 "%<nowait%> for construct");
3102 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3103 OMP_CLAUSE_ORDERED))
3104 warning_at (gimple_location (stmt), 0,
3105 "%<cancel for%> inside "
3106 "%<ordered%> for construct");
3107 }
3108 kind = "for";
3109 break;
3110 case 4:
3111 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3112 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3113 bad = "sections";
3114 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3115 == BUILT_IN_GOMP_CANCEL
3116 && !integer_zerop (gimple_call_arg (stmt, 1)))
3117 {
3118 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3119 {
3120 ctx->cancellable = true;
3121 if (omp_find_clause (gimple_omp_sections_clauses
3122 (ctx->stmt),
3123 OMP_CLAUSE_NOWAIT))
3124 warning_at (gimple_location (stmt), 0,
3125 "%<cancel sections%> inside "
3126 "%<nowait%> sections construct");
3127 }
3128 else
3129 {
3130 gcc_assert (ctx->outer
3131 && gimple_code (ctx->outer->stmt)
3132 == GIMPLE_OMP_SECTIONS);
3133 ctx->outer->cancellable = true;
3134 if (omp_find_clause (gimple_omp_sections_clauses
3135 (ctx->outer->stmt),
3136 OMP_CLAUSE_NOWAIT))
3137 warning_at (gimple_location (stmt), 0,
3138 "%<cancel sections%> inside "
3139 "%<nowait%> sections construct");
3140 }
3141 }
3142 kind = "sections";
3143 break;
3144 case 8:
3145 if (!is_task_ctx (ctx)
3146 && (!is_taskloop_ctx (ctx)
3147 || ctx->outer == NULL
3148 || !is_task_ctx (ctx->outer)))
3149 bad = "task";
3150 else
3151 {
3152 for (omp_context *octx = ctx->outer;
3153 octx; octx = octx->outer)
3154 {
3155 switch (gimple_code (octx->stmt))
3156 {
3157 case GIMPLE_OMP_TASKGROUP:
3158 break;
3159 case GIMPLE_OMP_TARGET:
3160 if (gimple_omp_target_kind (octx->stmt)
3161 != GF_OMP_TARGET_KIND_REGION)
3162 continue;
3163 /* FALLTHRU */
3164 case GIMPLE_OMP_PARALLEL:
3165 case GIMPLE_OMP_TEAMS:
3166 error_at (gimple_location (stmt),
3167 "%<%s taskgroup%> construct not closely "
3168 "nested inside of %<taskgroup%> region",
3169 construct);
3170 return false;
3171 case GIMPLE_OMP_TASK:
3172 if (gimple_omp_task_taskloop_p (octx->stmt)
3173 && octx->outer
3174 && is_taskloop_ctx (octx->outer))
3175 {
3176 tree clauses
3177 = gimple_omp_for_clauses (octx->outer->stmt);
3178 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3179 break;
3180 }
3181 continue;
3182 default:
3183 continue;
3184 }
3185 break;
3186 }
3187 ctx->cancellable = true;
3188 }
3189 kind = "taskgroup";
3190 break;
3191 default:
3192 error_at (gimple_location (stmt), "invalid arguments");
3193 return false;
3194 }
3195 if (bad)
3196 {
3197 error_at (gimple_location (stmt),
3198 "%<%s %s%> construct not closely nested inside of %qs",
3199 construct, kind, bad);
3200 return false;
3201 }
3202 }
3203 /* FALLTHRU */
3204 case GIMPLE_OMP_SECTIONS:
3205 case GIMPLE_OMP_SINGLE:
3206 for (; ctx != NULL; ctx = ctx->outer)
3207 switch (gimple_code (ctx->stmt))
3208 {
3209 case GIMPLE_OMP_FOR:
3210 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3211 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3212 break;
3213 /* FALLTHRU */
3214 case GIMPLE_OMP_SECTIONS:
3215 case GIMPLE_OMP_SINGLE:
3216 case GIMPLE_OMP_ORDERED:
3217 case GIMPLE_OMP_MASTER:
3218 case GIMPLE_OMP_TASK:
3219 case GIMPLE_OMP_CRITICAL:
3220 if (is_gimple_call (stmt))
3221 {
3222 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3223 != BUILT_IN_GOMP_BARRIER)
3224 return true;
3225 error_at (gimple_location (stmt),
3226 "barrier region may not be closely nested inside "
3227 "of work-sharing, %<loop%>, %<critical%>, "
3228 "%<ordered%>, %<master%>, explicit %<task%> or "
3229 "%<taskloop%> region");
3230 return false;
3231 }
3232 error_at (gimple_location (stmt),
3233 "work-sharing region may not be closely nested inside "
3234 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3235 "%<master%>, explicit %<task%> or %<taskloop%> region");
3236 return false;
3237 case GIMPLE_OMP_PARALLEL:
3238 case GIMPLE_OMP_TEAMS:
3239 return true;
3240 case GIMPLE_OMP_TARGET:
3241 if (gimple_omp_target_kind (ctx->stmt)
3242 == GF_OMP_TARGET_KIND_REGION)
3243 return true;
3244 break;
3245 default:
3246 break;
3247 }
3248 break;
3249 case GIMPLE_OMP_MASTER:
3250 for (; ctx != NULL; ctx = ctx->outer)
3251 switch (gimple_code (ctx->stmt))
3252 {
3253 case GIMPLE_OMP_FOR:
3254 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3255 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3256 break;
3257 /* FALLTHRU */
3258 case GIMPLE_OMP_SECTIONS:
3259 case GIMPLE_OMP_SINGLE:
3260 case GIMPLE_OMP_TASK:
3261 error_at (gimple_location (stmt),
3262 "%<master%> region may not be closely nested inside "
3263 "of work-sharing, %<loop%>, explicit %<task%> or "
3264 "%<taskloop%> region");
3265 return false;
3266 case GIMPLE_OMP_PARALLEL:
3267 case GIMPLE_OMP_TEAMS:
3268 return true;
3269 case GIMPLE_OMP_TARGET:
3270 if (gimple_omp_target_kind (ctx->stmt)
3271 == GF_OMP_TARGET_KIND_REGION)
3272 return true;
3273 break;
3274 default:
3275 break;
3276 }
3277 break;
3278 case GIMPLE_OMP_TASK:
3279 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3280 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3281 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3282 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3283 {
3284 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3285 error_at (OMP_CLAUSE_LOCATION (c),
3286 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3287 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3288 return false;
3289 }
3290 break;
3291 case GIMPLE_OMP_ORDERED:
3292 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3293 c; c = OMP_CLAUSE_CHAIN (c))
3294 {
3295 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3296 {
3297 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3298 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3299 continue;
3300 }
3301 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3302 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3303 || kind == OMP_CLAUSE_DEPEND_SINK)
3304 {
3305 tree oclause;
3306 /* Look for containing ordered(N) loop. */
3307 if (ctx == NULL
3308 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3309 || (oclause
3310 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3311 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3312 {
3313 error_at (OMP_CLAUSE_LOCATION (c),
3314 "%<ordered%> construct with %<depend%> clause "
3315 "must be closely nested inside an %<ordered%> "
3316 "loop");
3317 return false;
3318 }
3319 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3320 {
3321 error_at (OMP_CLAUSE_LOCATION (c),
3322 "%<ordered%> construct with %<depend%> clause "
3323 "must be closely nested inside a loop with "
3324 "%<ordered%> clause with a parameter");
3325 return false;
3326 }
3327 }
3328 else
3329 {
3330 error_at (OMP_CLAUSE_LOCATION (c),
3331 "invalid depend kind in omp %<ordered%> %<depend%>");
3332 return false;
3333 }
3334 }
3335 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3336 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3337 {
3338 /* ordered simd must be closely nested inside of simd region,
3339 and simd region must not encounter constructs other than
3340 ordered simd, therefore ordered simd may be either orphaned,
3341 or ctx->stmt must be simd. The latter case is handled already
3342 earlier. */
3343 if (ctx != NULL)
3344 {
3345 error_at (gimple_location (stmt),
3346 "%<ordered%> %<simd%> must be closely nested inside "
3347 "%<simd%> region");
3348 return false;
3349 }
3350 }
3351 for (; ctx != NULL; ctx = ctx->outer)
3352 switch (gimple_code (ctx->stmt))
3353 {
3354 case GIMPLE_OMP_CRITICAL:
3355 case GIMPLE_OMP_TASK:
3356 case GIMPLE_OMP_ORDERED:
3357 ordered_in_taskloop:
3358 error_at (gimple_location (stmt),
3359 "%<ordered%> region may not be closely nested inside "
3360 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3361 "%<taskloop%> region");
3362 return false;
3363 case GIMPLE_OMP_FOR:
3364 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3365 goto ordered_in_taskloop;
3366 tree o;
3367 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3368 OMP_CLAUSE_ORDERED);
3369 if (o == NULL)
3370 {
3371 error_at (gimple_location (stmt),
3372 "%<ordered%> region must be closely nested inside "
3373 "a loop region with an %<ordered%> clause");
3374 return false;
3375 }
3376 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3377 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3378 {
3379 error_at (gimple_location (stmt),
3380 "%<ordered%> region without %<depend%> clause may "
3381 "not be closely nested inside a loop region with "
3382 "an %<ordered%> clause with a parameter");
3383 return false;
3384 }
3385 return true;
3386 case GIMPLE_OMP_TARGET:
3387 if (gimple_omp_target_kind (ctx->stmt)
3388 != GF_OMP_TARGET_KIND_REGION)
3389 break;
3390 /* FALLTHRU */
3391 case GIMPLE_OMP_PARALLEL:
3392 case GIMPLE_OMP_TEAMS:
3393 error_at (gimple_location (stmt),
3394 "%<ordered%> region must be closely nested inside "
3395 "a loop region with an %<ordered%> clause");
3396 return false;
3397 default:
3398 break;
3399 }
3400 break;
3401 case GIMPLE_OMP_CRITICAL:
3402 {
3403 tree this_stmt_name
3404 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3405 for (; ctx != NULL; ctx = ctx->outer)
3406 if (gomp_critical *other_crit
3407 = dyn_cast <gomp_critical *> (ctx->stmt))
3408 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3409 {
3410 error_at (gimple_location (stmt),
3411 "%<critical%> region may not be nested inside "
3412 "a %<critical%> region with the same name");
3413 return false;
3414 }
3415 }
3416 break;
3417 case GIMPLE_OMP_TEAMS:
3418 if (ctx == NULL)
3419 break;
3420 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3421 || (gimple_omp_target_kind (ctx->stmt)
3422 != GF_OMP_TARGET_KIND_REGION))
3423 {
3424 /* Teams construct can appear either strictly nested inside of
3425 target construct with no intervening stmts, or can be encountered
3426 only by initial task (so must not appear inside any OpenMP
3427 construct. */
3428 error_at (gimple_location (stmt),
3429 "%<teams%> construct must be closely nested inside of "
3430 "%<target%> construct or not nested in any OpenMP "
3431 "construct");
3432 return false;
3433 }
3434 break;
3435 case GIMPLE_OMP_TARGET:
3436 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3437 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3438 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3439 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3440 {
3441 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3442 error_at (OMP_CLAUSE_LOCATION (c),
3443 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3444 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3445 return false;
3446 }
3447 if (is_gimple_omp_offloaded (stmt)
3448 && oacc_get_fn_attrib (cfun->decl) != NULL)
3449 {
3450 error_at (gimple_location (stmt),
3451 "OpenACC region inside of OpenACC routine, nested "
3452 "parallelism not supported yet");
3453 return false;
3454 }
3455 for (; ctx != NULL; ctx = ctx->outer)
3456 {
3457 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3458 {
3459 if (is_gimple_omp (stmt)
3460 && is_gimple_omp_oacc (stmt)
3461 && is_gimple_omp (ctx->stmt))
3462 {
3463 error_at (gimple_location (stmt),
3464 "OpenACC construct inside of non-OpenACC region");
3465 return false;
3466 }
3467 continue;
3468 }
3469
3470 const char *stmt_name, *ctx_stmt_name;
3471 switch (gimple_omp_target_kind (stmt))
3472 {
3473 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3474 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3475 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3476 case GF_OMP_TARGET_KIND_ENTER_DATA:
3477 stmt_name = "target enter data"; break;
3478 case GF_OMP_TARGET_KIND_EXIT_DATA:
3479 stmt_name = "target exit data"; break;
3480 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3481 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3482 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3483 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3484 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3485 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3486 stmt_name = "enter/exit data"; break;
3487 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3488 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3489 break;
3490 default: gcc_unreachable ();
3491 }
3492 switch (gimple_omp_target_kind (ctx->stmt))
3493 {
3494 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3495 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3496 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3497 ctx_stmt_name = "parallel"; break;
3498 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3499 ctx_stmt_name = "kernels"; break;
3500 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3501 ctx_stmt_name = "serial"; break;
3502 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3503 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3504 ctx_stmt_name = "host_data"; break;
3505 default: gcc_unreachable ();
3506 }
3507
3508 /* OpenACC/OpenMP mismatch? */
3509 if (is_gimple_omp_oacc (stmt)
3510 != is_gimple_omp_oacc (ctx->stmt))
3511 {
3512 error_at (gimple_location (stmt),
3513 "%s %qs construct inside of %s %qs region",
3514 (is_gimple_omp_oacc (stmt)
3515 ? "OpenACC" : "OpenMP"), stmt_name,
3516 (is_gimple_omp_oacc (ctx->stmt)
3517 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3518 return false;
3519 }
3520 if (is_gimple_omp_offloaded (ctx->stmt))
3521 {
3522 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3523 if (is_gimple_omp_oacc (ctx->stmt))
3524 {
3525 error_at (gimple_location (stmt),
3526 "%qs construct inside of %qs region",
3527 stmt_name, ctx_stmt_name);
3528 return false;
3529 }
3530 else
3531 {
3532 warning_at (gimple_location (stmt), 0,
3533 "%qs construct inside of %qs region",
3534 stmt_name, ctx_stmt_name);
3535 }
3536 }
3537 }
3538 break;
3539 default:
3540 break;
3541 }
3542 return true;
3543 }
3544
3545
3546 /* Helper function scan_omp.
3547
3548 Callback for walk_tree or operators in walk_gimple_stmt used to
3549 scan for OMP directives in TP. */
3550
3551 static tree
3552 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3553 {
3554 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3555 omp_context *ctx = (omp_context *) wi->info;
3556 tree t = *tp;
3557
3558 switch (TREE_CODE (t))
3559 {
3560 case VAR_DECL:
3561 case PARM_DECL:
3562 case LABEL_DECL:
3563 case RESULT_DECL:
3564 if (ctx)
3565 {
3566 tree repl = remap_decl (t, &ctx->cb);
3567 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3568 *tp = repl;
3569 }
3570 break;
3571
3572 default:
3573 if (ctx && TYPE_P (t))
3574 *tp = remap_type (t, &ctx->cb);
3575 else if (!DECL_P (t))
3576 {
3577 *walk_subtrees = 1;
3578 if (ctx)
3579 {
3580 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3581 if (tem != TREE_TYPE (t))
3582 {
3583 if (TREE_CODE (t) == INTEGER_CST)
3584 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3585 else
3586 TREE_TYPE (t) = tem;
3587 }
3588 }
3589 }
3590 break;
3591 }
3592
3593 return NULL_TREE;
3594 }
3595
3596 /* Return true if FNDECL is a setjmp or a longjmp. */
3597
3598 static bool
3599 setjmp_or_longjmp_p (const_tree fndecl)
3600 {
3601 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3602 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3603 return true;
3604
3605 tree declname = DECL_NAME (fndecl);
3606 if (!declname
3607 || (DECL_CONTEXT (fndecl) != NULL_TREE
3608 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3609 || !TREE_PUBLIC (fndecl))
3610 return false;
3611
3612 const char *name = IDENTIFIER_POINTER (declname);
3613 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3614 }
3615
3616 /* Return true if FNDECL is an omp_* runtime API call. */
3617
3618 static bool
3619 omp_runtime_api_call (const_tree fndecl)
3620 {
3621 tree declname = DECL_NAME (fndecl);
3622 if (!declname
3623 || (DECL_CONTEXT (fndecl) != NULL_TREE
3624 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3625 || !TREE_PUBLIC (fndecl))
3626 return false;
3627
3628 const char *name = IDENTIFIER_POINTER (declname);
3629 if (strncmp (name, "omp_", 4) != 0)
3630 return false;
3631
3632 static const char *omp_runtime_apis[] =
3633 {
3634 /* This array has 3 sections. First omp_* calls that don't
3635 have any suffixes. */
3636 "target_alloc",
3637 "target_associate_ptr",
3638 "target_disassociate_ptr",
3639 "target_free",
3640 "target_is_present",
3641 "target_memcpy",
3642 "target_memcpy_rect",
3643 NULL,
3644 /* Now omp_* calls that are available as omp_* and omp_*_. */
3645 "capture_affinity",
3646 "destroy_lock",
3647 "destroy_nest_lock",
3648 "display_affinity",
3649 "get_active_level",
3650 "get_affinity_format",
3651 "get_cancellation",
3652 "get_default_device",
3653 "get_dynamic",
3654 "get_initial_device",
3655 "get_level",
3656 "get_max_active_levels",
3657 "get_max_task_priority",
3658 "get_max_threads",
3659 "get_nested",
3660 "get_num_devices",
3661 "get_num_places",
3662 "get_num_procs",
3663 "get_num_teams",
3664 "get_num_threads",
3665 "get_partition_num_places",
3666 "get_place_num",
3667 "get_proc_bind",
3668 "get_team_num",
3669 "get_thread_limit",
3670 "get_thread_num",
3671 "get_wtick",
3672 "get_wtime",
3673 "in_final",
3674 "in_parallel",
3675 "init_lock",
3676 "init_nest_lock",
3677 "is_initial_device",
3678 "pause_resource",
3679 "pause_resource_all",
3680 "set_affinity_format",
3681 "set_lock",
3682 "set_nest_lock",
3683 "test_lock",
3684 "test_nest_lock",
3685 "unset_lock",
3686 "unset_nest_lock",
3687 NULL,
3688 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3689 "get_ancestor_thread_num",
3690 "get_partition_place_nums",
3691 "get_place_num_procs",
3692 "get_place_proc_ids",
3693 "get_schedule",
3694 "get_team_size",
3695 "set_default_device",
3696 "set_dynamic",
3697 "set_max_active_levels",
3698 "set_nested",
3699 "set_num_threads",
3700 "set_schedule"
3701 };
3702
3703 int mode = 0;
3704 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3705 {
3706 if (omp_runtime_apis[i] == NULL)
3707 {
3708 mode++;
3709 continue;
3710 }
3711 size_t len = strlen (omp_runtime_apis[i]);
3712 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3713 && (name[4 + len] == '\0'
3714 || (mode > 0
3715 && name[4 + len] == '_'
3716 && (name[4 + len + 1] == '\0'
3717 || (mode > 1
3718 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3719 return true;
3720 }
3721 return false;
3722 }
3723
3724 /* Helper function for scan_omp.
3725
3726 Callback for walk_gimple_stmt used to scan for OMP directives in
3727 the current statement in GSI. */
3728
3729 static tree
3730 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3731 struct walk_stmt_info *wi)
3732 {
3733 gimple *stmt = gsi_stmt (*gsi);
3734 omp_context *ctx = (omp_context *) wi->info;
3735
3736 if (gimple_has_location (stmt))
3737 input_location = gimple_location (stmt);
3738
3739 /* Check the nesting restrictions. */
3740 bool remove = false;
3741 if (is_gimple_omp (stmt))
3742 remove = !check_omp_nesting_restrictions (stmt, ctx);
3743 else if (is_gimple_call (stmt))
3744 {
3745 tree fndecl = gimple_call_fndecl (stmt);
3746 if (fndecl)
3747 {
3748 if (ctx
3749 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3750 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3751 && setjmp_or_longjmp_p (fndecl)
3752 && !ctx->loop_p)
3753 {
3754 remove = true;
3755 error_at (gimple_location (stmt),
3756 "setjmp/longjmp inside %<simd%> construct");
3757 }
3758 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3759 switch (DECL_FUNCTION_CODE (fndecl))
3760 {
3761 case BUILT_IN_GOMP_BARRIER:
3762 case BUILT_IN_GOMP_CANCEL:
3763 case BUILT_IN_GOMP_CANCELLATION_POINT:
3764 case BUILT_IN_GOMP_TASKYIELD:
3765 case BUILT_IN_GOMP_TASKWAIT:
3766 case BUILT_IN_GOMP_TASKGROUP_START:
3767 case BUILT_IN_GOMP_TASKGROUP_END:
3768 remove = !check_omp_nesting_restrictions (stmt, ctx);
3769 break;
3770 default:
3771 break;
3772 }
3773 else if (ctx)
3774 {
3775 omp_context *octx = ctx;
3776 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3777 octx = ctx->outer;
3778 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3779 {
3780 remove = true;
3781 error_at (gimple_location (stmt),
3782 "OpenMP runtime API call %qD in a region with "
3783 "%<order(concurrent)%> clause", fndecl);
3784 }
3785 }
3786 }
3787 }
3788 if (remove)
3789 {
3790 stmt = gimple_build_nop ();
3791 gsi_replace (gsi, stmt, false);
3792 }
3793
3794 *handled_ops_p = true;
3795
3796 switch (gimple_code (stmt))
3797 {
3798 case GIMPLE_OMP_PARALLEL:
3799 taskreg_nesting_level++;
3800 scan_omp_parallel (gsi, ctx);
3801 taskreg_nesting_level--;
3802 break;
3803
3804 case GIMPLE_OMP_TASK:
3805 taskreg_nesting_level++;
3806 scan_omp_task (gsi, ctx);
3807 taskreg_nesting_level--;
3808 break;
3809
3810 case GIMPLE_OMP_FOR:
3811 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3812 == GF_OMP_FOR_KIND_SIMD)
3813 && gimple_omp_for_combined_into_p (stmt)
3814 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3815 {
3816 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3817 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3818 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3819 {
3820 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3821 break;
3822 }
3823 }
3824 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3825 == GF_OMP_FOR_KIND_SIMD)
3826 && omp_maybe_offloaded_ctx (ctx)
3827 && omp_max_simt_vf ()
3828 && gimple_omp_for_collapse (stmt) == 1)
3829 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3830 else
3831 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3832 break;
3833
3834 case GIMPLE_OMP_SECTIONS:
3835 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3836 break;
3837
3838 case GIMPLE_OMP_SINGLE:
3839 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3840 break;
3841
3842 case GIMPLE_OMP_SCAN:
3843 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3844 {
3845 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3846 ctx->scan_inclusive = true;
3847 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3848 ctx->scan_exclusive = true;
3849 }
3850 /* FALLTHRU */
3851 case GIMPLE_OMP_SECTION:
3852 case GIMPLE_OMP_MASTER:
3853 case GIMPLE_OMP_ORDERED:
3854 case GIMPLE_OMP_CRITICAL:
3855 ctx = new_omp_context (stmt, ctx);
3856 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3857 break;
3858
3859 case GIMPLE_OMP_TASKGROUP:
3860 ctx = new_omp_context (stmt, ctx);
3861 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3862 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3863 break;
3864
3865 case GIMPLE_OMP_TARGET:
3866 if (is_gimple_omp_offloaded (stmt))
3867 {
3868 taskreg_nesting_level++;
3869 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3870 taskreg_nesting_level--;
3871 }
3872 else
3873 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3874 break;
3875
3876 case GIMPLE_OMP_TEAMS:
3877 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3878 {
3879 taskreg_nesting_level++;
3880 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3881 taskreg_nesting_level--;
3882 }
3883 else
3884 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3885 break;
3886
3887 case GIMPLE_BIND:
3888 {
3889 tree var;
3890
3891 *handled_ops_p = false;
3892 if (ctx)
3893 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3894 var ;
3895 var = DECL_CHAIN (var))
3896 insert_decl_map (&ctx->cb, var, var);
3897 }
3898 break;
3899 default:
3900 *handled_ops_p = false;
3901 break;
3902 }
3903
3904 return NULL_TREE;
3905 }
3906
3907
3908 /* Scan all the statements starting at the current statement. CTX
3909 contains context information about the OMP directives and
3910 clauses found during the scan. */
3911
3912 static void
3913 scan_omp (gimple_seq *body_p, omp_context *ctx)
3914 {
3915 location_t saved_location;
3916 struct walk_stmt_info wi;
3917
3918 memset (&wi, 0, sizeof (wi));
3919 wi.info = ctx;
3920 wi.want_locations = true;
3921
3922 saved_location = input_location;
3923 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3924 input_location = saved_location;
3925 }
3926 \f
3927 /* Re-gimplification and code generation routines. */
3928
3929 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3930 of BIND if in a method. */
3931
3932 static void
3933 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3934 {
3935 if (DECL_ARGUMENTS (current_function_decl)
3936 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3937 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3938 == POINTER_TYPE))
3939 {
3940 tree vars = gimple_bind_vars (bind);
3941 for (tree *pvar = &vars; *pvar; )
3942 if (omp_member_access_dummy_var (*pvar))
3943 *pvar = DECL_CHAIN (*pvar);
3944 else
3945 pvar = &DECL_CHAIN (*pvar);
3946 gimple_bind_set_vars (bind, vars);
3947 }
3948 }
3949
3950 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3951 block and its subblocks. */
3952
3953 static void
3954 remove_member_access_dummy_vars (tree block)
3955 {
3956 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3957 if (omp_member_access_dummy_var (*pvar))
3958 *pvar = DECL_CHAIN (*pvar);
3959 else
3960 pvar = &DECL_CHAIN (*pvar);
3961
3962 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3963 remove_member_access_dummy_vars (block);
3964 }
3965
3966 /* If a context was created for STMT when it was scanned, return it. */
3967
3968 static omp_context *
3969 maybe_lookup_ctx (gimple *stmt)
3970 {
3971 splay_tree_node n;
3972 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3973 return n ? (omp_context *) n->value : NULL;
3974 }
3975
3976
3977 /* Find the mapping for DECL in CTX or the immediately enclosing
3978 context that has a mapping for DECL.
3979
3980 If CTX is a nested parallel directive, we may have to use the decl
3981 mappings created in CTX's parent context. Suppose that we have the
3982 following parallel nesting (variable UIDs showed for clarity):
3983
3984 iD.1562 = 0;
3985 #omp parallel shared(iD.1562) -> outer parallel
3986 iD.1562 = iD.1562 + 1;
3987
3988 #omp parallel shared (iD.1562) -> inner parallel
3989 iD.1562 = iD.1562 - 1;
3990
3991 Each parallel structure will create a distinct .omp_data_s structure
3992 for copying iD.1562 in/out of the directive:
3993
3994 outer parallel .omp_data_s.1.i -> iD.1562
3995 inner parallel .omp_data_s.2.i -> iD.1562
3996
3997 A shared variable mapping will produce a copy-out operation before
3998 the parallel directive and a copy-in operation after it. So, in
3999 this case we would have:
4000
4001 iD.1562 = 0;
4002 .omp_data_o.1.i = iD.1562;
4003 #omp parallel shared(iD.1562) -> outer parallel
4004 .omp_data_i.1 = &.omp_data_o.1
4005 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4006
4007 .omp_data_o.2.i = iD.1562; -> **
4008 #omp parallel shared(iD.1562) -> inner parallel
4009 .omp_data_i.2 = &.omp_data_o.2
4010 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4011
4012
4013 ** This is a problem. The symbol iD.1562 cannot be referenced
4014 inside the body of the outer parallel region. But since we are
4015 emitting this copy operation while expanding the inner parallel
4016 directive, we need to access the CTX structure of the outer
4017 parallel directive to get the correct mapping:
4018
4019 .omp_data_o.2.i = .omp_data_i.1->i
4020
4021 Since there may be other workshare or parallel directives enclosing
4022 the parallel directive, it may be necessary to walk up the context
4023 parent chain. This is not a problem in general because nested
4024 parallelism happens only rarely. */
4025
4026 static tree
4027 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4028 {
4029 tree t;
4030 omp_context *up;
4031
4032 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4033 t = maybe_lookup_decl (decl, up);
4034
4035 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4036
4037 return t ? t : decl;
4038 }
4039
4040
4041 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4042 in outer contexts. */
4043
4044 static tree
4045 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4046 {
4047 tree t = NULL;
4048 omp_context *up;
4049
4050 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4051 t = maybe_lookup_decl (decl, up);
4052
4053 return t ? t : decl;
4054 }
4055
4056
4057 /* Construct the initialization value for reduction operation OP. */
4058
4059 tree
4060 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4061 {
4062 switch (op)
4063 {
4064 case PLUS_EXPR:
4065 case MINUS_EXPR:
4066 case BIT_IOR_EXPR:
4067 case BIT_XOR_EXPR:
4068 case TRUTH_OR_EXPR:
4069 case TRUTH_ORIF_EXPR:
4070 case TRUTH_XOR_EXPR:
4071 case NE_EXPR:
4072 return build_zero_cst (type);
4073
4074 case MULT_EXPR:
4075 case TRUTH_AND_EXPR:
4076 case TRUTH_ANDIF_EXPR:
4077 case EQ_EXPR:
4078 return fold_convert_loc (loc, type, integer_one_node);
4079
4080 case BIT_AND_EXPR:
4081 return fold_convert_loc (loc, type, integer_minus_one_node);
4082
4083 case MAX_EXPR:
4084 if (SCALAR_FLOAT_TYPE_P (type))
4085 {
4086 REAL_VALUE_TYPE max, min;
4087 if (HONOR_INFINITIES (type))
4088 {
4089 real_inf (&max);
4090 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4091 }
4092 else
4093 real_maxval (&min, 1, TYPE_MODE (type));
4094 return build_real (type, min);
4095 }
4096 else if (POINTER_TYPE_P (type))
4097 {
4098 wide_int min
4099 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4100 return wide_int_to_tree (type, min);
4101 }
4102 else
4103 {
4104 gcc_assert (INTEGRAL_TYPE_P (type));
4105 return TYPE_MIN_VALUE (type);
4106 }
4107
4108 case MIN_EXPR:
4109 if (SCALAR_FLOAT_TYPE_P (type))
4110 {
4111 REAL_VALUE_TYPE max;
4112 if (HONOR_INFINITIES (type))
4113 real_inf (&max);
4114 else
4115 real_maxval (&max, 0, TYPE_MODE (type));
4116 return build_real (type, max);
4117 }
4118 else if (POINTER_TYPE_P (type))
4119 {
4120 wide_int max
4121 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4122 return wide_int_to_tree (type, max);
4123 }
4124 else
4125 {
4126 gcc_assert (INTEGRAL_TYPE_P (type));
4127 return TYPE_MAX_VALUE (type);
4128 }
4129
4130 default:
4131 gcc_unreachable ();
4132 }
4133 }
4134
4135 /* Construct the initialization value for reduction CLAUSE. */
4136
4137 tree
4138 omp_reduction_init (tree clause, tree type)
4139 {
4140 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4141 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4142 }
4143
4144 /* Return alignment to be assumed for var in CLAUSE, which should be
4145 OMP_CLAUSE_ALIGNED. */
4146
4147 static tree
4148 omp_clause_aligned_alignment (tree clause)
4149 {
4150 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4151 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4152
4153 /* Otherwise return implementation defined alignment. */
4154 unsigned int al = 1;
4155 opt_scalar_mode mode_iter;
4156 auto_vector_modes modes;
4157 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4158 static enum mode_class classes[]
4159 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4160 for (int i = 0; i < 4; i += 2)
4161 /* The for loop above dictates that we only walk through scalar classes. */
4162 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4163 {
4164 scalar_mode mode = mode_iter.require ();
4165 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4166 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4167 continue;
4168 machine_mode alt_vmode;
4169 for (unsigned int j = 0; j < modes.length (); ++j)
4170 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4171 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4172 vmode = alt_vmode;
4173
4174 tree type = lang_hooks.types.type_for_mode (mode, 1);
4175 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4176 continue;
4177 type = build_vector_type_for_mode (type, vmode);
4178 if (TYPE_MODE (type) != vmode)
4179 continue;
4180 if (TYPE_ALIGN_UNIT (type) > al)
4181 al = TYPE_ALIGN_UNIT (type);
4182 }
4183 return build_int_cst (integer_type_node, al);
4184 }
4185
4186
4187 /* This structure is part of the interface between lower_rec_simd_input_clauses
4188 and lower_rec_input_clauses. */
4189
4190 class omplow_simd_context {
4191 public:
4192 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4193 tree idx;
4194 tree lane;
4195 tree lastlane;
4196 vec<tree, va_heap> simt_eargs;
4197 gimple_seq simt_dlist;
4198 poly_uint64_pod max_vf;
4199 bool is_simt;
4200 };
4201
4202 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4203 privatization. */
4204
4205 static bool
4206 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4207 omplow_simd_context *sctx, tree &ivar,
4208 tree &lvar, tree *rvar = NULL,
4209 tree *rvar2 = NULL)
4210 {
4211 if (known_eq (sctx->max_vf, 0U))
4212 {
4213 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4214 if (maybe_gt (sctx->max_vf, 1U))
4215 {
4216 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4217 OMP_CLAUSE_SAFELEN);
4218 if (c)
4219 {
4220 poly_uint64 safe_len;
4221 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4222 || maybe_lt (safe_len, 1U))
4223 sctx->max_vf = 1;
4224 else
4225 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4226 }
4227 }
4228 if (maybe_gt (sctx->max_vf, 1U))
4229 {
4230 sctx->idx = create_tmp_var (unsigned_type_node);
4231 sctx->lane = create_tmp_var (unsigned_type_node);
4232 }
4233 }
4234 if (known_eq (sctx->max_vf, 1U))
4235 return false;
4236
4237 if (sctx->is_simt)
4238 {
4239 if (is_gimple_reg (new_var))
4240 {
4241 ivar = lvar = new_var;
4242 return true;
4243 }
4244 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4245 ivar = lvar = create_tmp_var (type);
4246 TREE_ADDRESSABLE (ivar) = 1;
4247 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4248 NULL, DECL_ATTRIBUTES (ivar));
4249 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4250 tree clobber = build_clobber (type);
4251 gimple *g = gimple_build_assign (ivar, clobber);
4252 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4253 }
4254 else
4255 {
4256 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4257 tree avar = create_tmp_var_raw (atype);
4258 if (TREE_ADDRESSABLE (new_var))
4259 TREE_ADDRESSABLE (avar) = 1;
4260 DECL_ATTRIBUTES (avar)
4261 = tree_cons (get_identifier ("omp simd array"), NULL,
4262 DECL_ATTRIBUTES (avar));
4263 gimple_add_tmp_var (avar);
4264 tree iavar = avar;
4265 if (rvar && !ctx->for_simd_scan_phase)
4266 {
4267 /* For inscan reductions, create another array temporary,
4268 which will hold the reduced value. */
4269 iavar = create_tmp_var_raw (atype);
4270 if (TREE_ADDRESSABLE (new_var))
4271 TREE_ADDRESSABLE (iavar) = 1;
4272 DECL_ATTRIBUTES (iavar)
4273 = tree_cons (get_identifier ("omp simd array"), NULL,
4274 tree_cons (get_identifier ("omp simd inscan"), NULL,
4275 DECL_ATTRIBUTES (iavar)));
4276 gimple_add_tmp_var (iavar);
4277 ctx->cb.decl_map->put (avar, iavar);
4278 if (sctx->lastlane == NULL_TREE)
4279 sctx->lastlane = create_tmp_var (unsigned_type_node);
4280 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4281 sctx->lastlane, NULL_TREE, NULL_TREE);
4282 TREE_THIS_NOTRAP (*rvar) = 1;
4283
4284 if (ctx->scan_exclusive)
4285 {
4286 /* And for exclusive scan yet another one, which will
4287 hold the value during the scan phase. */
4288 tree savar = create_tmp_var_raw (atype);
4289 if (TREE_ADDRESSABLE (new_var))
4290 TREE_ADDRESSABLE (savar) = 1;
4291 DECL_ATTRIBUTES (savar)
4292 = tree_cons (get_identifier ("omp simd array"), NULL,
4293 tree_cons (get_identifier ("omp simd inscan "
4294 "exclusive"), NULL,
4295 DECL_ATTRIBUTES (savar)));
4296 gimple_add_tmp_var (savar);
4297 ctx->cb.decl_map->put (iavar, savar);
4298 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4299 sctx->idx, NULL_TREE, NULL_TREE);
4300 TREE_THIS_NOTRAP (*rvar2) = 1;
4301 }
4302 }
4303 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4304 NULL_TREE, NULL_TREE);
4305 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4306 NULL_TREE, NULL_TREE);
4307 TREE_THIS_NOTRAP (ivar) = 1;
4308 TREE_THIS_NOTRAP (lvar) = 1;
4309 }
4310 if (DECL_P (new_var))
4311 {
4312 SET_DECL_VALUE_EXPR (new_var, lvar);
4313 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4314 }
4315 return true;
4316 }
4317
4318 /* Helper function of lower_rec_input_clauses. For a reference
4319 in simd reduction, add an underlying variable it will reference. */
4320
4321 static void
4322 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4323 {
4324 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4325 if (TREE_CONSTANT (z))
4326 {
4327 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4328 get_name (new_vard));
4329 gimple_add_tmp_var (z);
4330 TREE_ADDRESSABLE (z) = 1;
4331 z = build_fold_addr_expr_loc (loc, z);
4332 gimplify_assign (new_vard, z, ilist);
4333 }
4334 }
4335
4336 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4337 code to emit (type) (tskred_temp[idx]). */
4338
4339 static tree
4340 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4341 unsigned idx)
4342 {
4343 unsigned HOST_WIDE_INT sz
4344 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4345 tree r = build2 (MEM_REF, pointer_sized_int_node,
4346 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4347 idx * sz));
4348 tree v = create_tmp_var (pointer_sized_int_node);
4349 gimple *g = gimple_build_assign (v, r);
4350 gimple_seq_add_stmt (ilist, g);
4351 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4352 {
4353 v = create_tmp_var (type);
4354 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4355 gimple_seq_add_stmt (ilist, g);
4356 }
4357 return v;
4358 }
4359
4360 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4361 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4362 private variables. Initialization statements go in ILIST, while calls
4363 to destructors go in DLIST. */
4364
4365 static void
4366 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4367 omp_context *ctx, struct omp_for_data *fd)
4368 {
4369 tree c, copyin_seq, x, ptr;
4370 bool copyin_by_ref = false;
4371 bool lastprivate_firstprivate = false;
4372 bool reduction_omp_orig_ref = false;
4373 int pass;
4374 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4375 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4376 omplow_simd_context sctx = omplow_simd_context ();
4377 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4378 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4379 gimple_seq llist[4] = { };
4380 tree nonconst_simd_if = NULL_TREE;
4381
4382 copyin_seq = NULL;
4383 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4384
4385 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4386 with data sharing clauses referencing variable sized vars. That
4387 is unnecessarily hard to support and very unlikely to result in
4388 vectorized code anyway. */
4389 if (is_simd)
4390 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4391 switch (OMP_CLAUSE_CODE (c))
4392 {
4393 case OMP_CLAUSE_LINEAR:
4394 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4395 sctx.max_vf = 1;
4396 /* FALLTHRU */
4397 case OMP_CLAUSE_PRIVATE:
4398 case OMP_CLAUSE_FIRSTPRIVATE:
4399 case OMP_CLAUSE_LASTPRIVATE:
4400 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4401 sctx.max_vf = 1;
4402 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4403 {
4404 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4405 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4406 sctx.max_vf = 1;
4407 }
4408 break;
4409 case OMP_CLAUSE_REDUCTION:
4410 case OMP_CLAUSE_IN_REDUCTION:
4411 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4412 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4413 sctx.max_vf = 1;
4414 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4415 {
4416 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4417 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4418 sctx.max_vf = 1;
4419 }
4420 break;
4421 case OMP_CLAUSE_IF:
4422 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4423 sctx.max_vf = 1;
4424 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4425 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4426 break;
4427 case OMP_CLAUSE_SIMDLEN:
4428 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4429 sctx.max_vf = 1;
4430 break;
4431 case OMP_CLAUSE__CONDTEMP_:
4432 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4433 if (sctx.is_simt)
4434 sctx.max_vf = 1;
4435 break;
4436 default:
4437 continue;
4438 }
4439
4440 /* Add a placeholder for simduid. */
4441 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4442 sctx.simt_eargs.safe_push (NULL_TREE);
4443
4444 unsigned task_reduction_cnt = 0;
4445 unsigned task_reduction_cntorig = 0;
4446 unsigned task_reduction_cnt_full = 0;
4447 unsigned task_reduction_cntorig_full = 0;
4448 unsigned task_reduction_other_cnt = 0;
4449 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4450 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4451 /* Do all the fixed sized types in the first pass, and the variable sized
4452 types in the second pass. This makes sure that the scalar arguments to
4453 the variable sized types are processed before we use them in the
4454 variable sized operations. For task reductions we use 4 passes, in the
4455 first two we ignore them, in the third one gather arguments for
4456 GOMP_task_reduction_remap call and in the last pass actually handle
4457 the task reductions. */
4458 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4459 ? 4 : 2); ++pass)
4460 {
4461 if (pass == 2 && task_reduction_cnt)
4462 {
4463 tskred_atype
4464 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4465 + task_reduction_cntorig);
4466 tskred_avar = create_tmp_var_raw (tskred_atype);
4467 gimple_add_tmp_var (tskred_avar);
4468 TREE_ADDRESSABLE (tskred_avar) = 1;
4469 task_reduction_cnt_full = task_reduction_cnt;
4470 task_reduction_cntorig_full = task_reduction_cntorig;
4471 }
4472 else if (pass == 3 && task_reduction_cnt)
4473 {
4474 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4475 gimple *g
4476 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4477 size_int (task_reduction_cntorig),
4478 build_fold_addr_expr (tskred_avar));
4479 gimple_seq_add_stmt (ilist, g);
4480 }
4481 if (pass == 3 && task_reduction_other_cnt)
4482 {
4483 /* For reduction clauses, build
4484 tskred_base = (void *) tskred_temp[2]
4485 + omp_get_thread_num () * tskred_temp[1]
4486 or if tskred_temp[1] is known to be constant, that constant
4487 directly. This is the start of the private reduction copy block
4488 for the current thread. */
4489 tree v = create_tmp_var (integer_type_node);
4490 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4491 gimple *g = gimple_build_call (x, 0);
4492 gimple_call_set_lhs (g, v);
4493 gimple_seq_add_stmt (ilist, g);
4494 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4495 tskred_temp = OMP_CLAUSE_DECL (c);
4496 if (is_taskreg_ctx (ctx))
4497 tskred_temp = lookup_decl (tskred_temp, ctx);
4498 tree v2 = create_tmp_var (sizetype);
4499 g = gimple_build_assign (v2, NOP_EXPR, v);
4500 gimple_seq_add_stmt (ilist, g);
4501 if (ctx->task_reductions[0])
4502 v = fold_convert (sizetype, ctx->task_reductions[0]);
4503 else
4504 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4505 tree v3 = create_tmp_var (sizetype);
4506 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4507 gimple_seq_add_stmt (ilist, g);
4508 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4509 tskred_base = create_tmp_var (ptr_type_node);
4510 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4511 gimple_seq_add_stmt (ilist, g);
4512 }
4513 task_reduction_cnt = 0;
4514 task_reduction_cntorig = 0;
4515 task_reduction_other_cnt = 0;
4516 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4517 {
4518 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4519 tree var, new_var;
4520 bool by_ref;
4521 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4522 bool task_reduction_p = false;
4523 bool task_reduction_needs_orig_p = false;
4524 tree cond = NULL_TREE;
4525
4526 switch (c_kind)
4527 {
4528 case OMP_CLAUSE_PRIVATE:
4529 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4530 continue;
4531 break;
4532 case OMP_CLAUSE_SHARED:
4533 /* Ignore shared directives in teams construct inside
4534 of target construct. */
4535 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4536 && !is_host_teams_ctx (ctx))
4537 continue;
4538 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4539 {
4540 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4541 || is_global_var (OMP_CLAUSE_DECL (c)));
4542 continue;
4543 }
4544 case OMP_CLAUSE_FIRSTPRIVATE:
4545 case OMP_CLAUSE_COPYIN:
4546 break;
4547 case OMP_CLAUSE_LINEAR:
4548 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4549 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4550 lastprivate_firstprivate = true;
4551 break;
4552 case OMP_CLAUSE_REDUCTION:
4553 case OMP_CLAUSE_IN_REDUCTION:
4554 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4555 {
4556 task_reduction_p = true;
4557 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4558 {
4559 task_reduction_other_cnt++;
4560 if (pass == 2)
4561 continue;
4562 }
4563 else
4564 task_reduction_cnt++;
4565 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4566 {
4567 var = OMP_CLAUSE_DECL (c);
4568 /* If var is a global variable that isn't privatized
4569 in outer contexts, we don't need to look up the
4570 original address, it is always the address of the
4571 global variable itself. */
4572 if (!DECL_P (var)
4573 || omp_is_reference (var)
4574 || !is_global_var
4575 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4576 {
4577 task_reduction_needs_orig_p = true;
4578 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4579 task_reduction_cntorig++;
4580 }
4581 }
4582 }
4583 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4584 reduction_omp_orig_ref = true;
4585 break;
4586 case OMP_CLAUSE__REDUCTEMP_:
4587 if (!is_taskreg_ctx (ctx))
4588 continue;
4589 /* FALLTHRU */
4590 case OMP_CLAUSE__LOOPTEMP_:
4591 /* Handle _looptemp_/_reductemp_ clauses only on
4592 parallel/task. */
4593 if (fd)
4594 continue;
4595 break;
4596 case OMP_CLAUSE_LASTPRIVATE:
4597 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4598 {
4599 lastprivate_firstprivate = true;
4600 if (pass != 0 || is_taskloop_ctx (ctx))
4601 continue;
4602 }
4603 /* Even without corresponding firstprivate, if
4604 decl is Fortran allocatable, it needs outer var
4605 reference. */
4606 else if (pass == 0
4607 && lang_hooks.decls.omp_private_outer_ref
4608 (OMP_CLAUSE_DECL (c)))
4609 lastprivate_firstprivate = true;
4610 break;
4611 case OMP_CLAUSE_ALIGNED:
4612 if (pass != 1)
4613 continue;
4614 var = OMP_CLAUSE_DECL (c);
4615 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4616 && !is_global_var (var))
4617 {
4618 new_var = maybe_lookup_decl (var, ctx);
4619 if (new_var == NULL_TREE)
4620 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4621 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4622 tree alarg = omp_clause_aligned_alignment (c);
4623 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4624 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4625 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4626 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4627 gimplify_and_add (x, ilist);
4628 }
4629 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4630 && is_global_var (var))
4631 {
4632 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4633 new_var = lookup_decl (var, ctx);
4634 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4635 t = build_fold_addr_expr_loc (clause_loc, t);
4636 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4637 tree alarg = omp_clause_aligned_alignment (c);
4638 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4639 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4640 t = fold_convert_loc (clause_loc, ptype, t);
4641 x = create_tmp_var (ptype);
4642 t = build2 (MODIFY_EXPR, ptype, x, t);
4643 gimplify_and_add (t, ilist);
4644 t = build_simple_mem_ref_loc (clause_loc, x);
4645 SET_DECL_VALUE_EXPR (new_var, t);
4646 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4647 }
4648 continue;
4649 case OMP_CLAUSE__CONDTEMP_:
4650 if (is_parallel_ctx (ctx)
4651 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4652 break;
4653 continue;
4654 default:
4655 continue;
4656 }
4657
4658 if (task_reduction_p != (pass >= 2))
4659 continue;
4660
4661 new_var = var = OMP_CLAUSE_DECL (c);
4662 if ((c_kind == OMP_CLAUSE_REDUCTION
4663 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4664 && TREE_CODE (var) == MEM_REF)
4665 {
4666 var = TREE_OPERAND (var, 0);
4667 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4668 var = TREE_OPERAND (var, 0);
4669 if (TREE_CODE (var) == INDIRECT_REF
4670 || TREE_CODE (var) == ADDR_EXPR)
4671 var = TREE_OPERAND (var, 0);
4672 if (is_variable_sized (var))
4673 {
4674 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4675 var = DECL_VALUE_EXPR (var);
4676 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4677 var = TREE_OPERAND (var, 0);
4678 gcc_assert (DECL_P (var));
4679 }
4680 new_var = var;
4681 }
4682 if (c_kind != OMP_CLAUSE_COPYIN)
4683 new_var = lookup_decl (var, ctx);
4684
4685 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4686 {
4687 if (pass != 0)
4688 continue;
4689 }
4690 /* C/C++ array section reductions. */
4691 else if ((c_kind == OMP_CLAUSE_REDUCTION
4692 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4693 && var != OMP_CLAUSE_DECL (c))
4694 {
4695 if (pass == 0)
4696 continue;
4697
4698 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4699 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4700
4701 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4702 {
4703 tree b = TREE_OPERAND (orig_var, 1);
4704 b = maybe_lookup_decl (b, ctx);
4705 if (b == NULL)
4706 {
4707 b = TREE_OPERAND (orig_var, 1);
4708 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4709 }
4710 if (integer_zerop (bias))
4711 bias = b;
4712 else
4713 {
4714 bias = fold_convert_loc (clause_loc,
4715 TREE_TYPE (b), bias);
4716 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4717 TREE_TYPE (b), b, bias);
4718 }
4719 orig_var = TREE_OPERAND (orig_var, 0);
4720 }
4721 if (pass == 2)
4722 {
4723 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4724 if (is_global_var (out)
4725 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4726 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4727 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4728 != POINTER_TYPE)))
4729 x = var;
4730 else
4731 {
4732 bool by_ref = use_pointer_for_field (var, NULL);
4733 x = build_receiver_ref (var, by_ref, ctx);
4734 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4735 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4736 == POINTER_TYPE))
4737 x = build_fold_addr_expr (x);
4738 }
4739 if (TREE_CODE (orig_var) == INDIRECT_REF)
4740 x = build_simple_mem_ref (x);
4741 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4742 {
4743 if (var == TREE_OPERAND (orig_var, 0))
4744 x = build_fold_addr_expr (x);
4745 }
4746 bias = fold_convert (sizetype, bias);
4747 x = fold_convert (ptr_type_node, x);
4748 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4749 TREE_TYPE (x), x, bias);
4750 unsigned cnt = task_reduction_cnt - 1;
4751 if (!task_reduction_needs_orig_p)
4752 cnt += (task_reduction_cntorig_full
4753 - task_reduction_cntorig);
4754 else
4755 cnt = task_reduction_cntorig - 1;
4756 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4757 size_int (cnt), NULL_TREE, NULL_TREE);
4758 gimplify_assign (r, x, ilist);
4759 continue;
4760 }
4761
4762 if (TREE_CODE (orig_var) == INDIRECT_REF
4763 || TREE_CODE (orig_var) == ADDR_EXPR)
4764 orig_var = TREE_OPERAND (orig_var, 0);
4765 tree d = OMP_CLAUSE_DECL (c);
4766 tree type = TREE_TYPE (d);
4767 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4768 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4769 const char *name = get_name (orig_var);
4770 if (pass == 3)
4771 {
4772 tree xv = create_tmp_var (ptr_type_node);
4773 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4774 {
4775 unsigned cnt = task_reduction_cnt - 1;
4776 if (!task_reduction_needs_orig_p)
4777 cnt += (task_reduction_cntorig_full
4778 - task_reduction_cntorig);
4779 else
4780 cnt = task_reduction_cntorig - 1;
4781 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4782 size_int (cnt), NULL_TREE, NULL_TREE);
4783
4784 gimple *g = gimple_build_assign (xv, x);
4785 gimple_seq_add_stmt (ilist, g);
4786 }
4787 else
4788 {
4789 unsigned int idx = *ctx->task_reduction_map->get (c);
4790 tree off;
4791 if (ctx->task_reductions[1 + idx])
4792 off = fold_convert (sizetype,
4793 ctx->task_reductions[1 + idx]);
4794 else
4795 off = task_reduction_read (ilist, tskred_temp, sizetype,
4796 7 + 3 * idx + 1);
4797 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4798 tskred_base, off);
4799 gimple_seq_add_stmt (ilist, g);
4800 }
4801 x = fold_convert (build_pointer_type (boolean_type_node),
4802 xv);
4803 if (TREE_CONSTANT (v))
4804 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4805 TYPE_SIZE_UNIT (type));
4806 else
4807 {
4808 tree t = maybe_lookup_decl (v, ctx);
4809 if (t)
4810 v = t;
4811 else
4812 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4813 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4814 fb_rvalue);
4815 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4816 TREE_TYPE (v), v,
4817 build_int_cst (TREE_TYPE (v), 1));
4818 t = fold_build2_loc (clause_loc, MULT_EXPR,
4819 TREE_TYPE (v), t,
4820 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4821 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4822 }
4823 cond = create_tmp_var (TREE_TYPE (x));
4824 gimplify_assign (cond, x, ilist);
4825 x = xv;
4826 }
4827 else if (TREE_CONSTANT (v))
4828 {
4829 x = create_tmp_var_raw (type, name);
4830 gimple_add_tmp_var (x);
4831 TREE_ADDRESSABLE (x) = 1;
4832 x = build_fold_addr_expr_loc (clause_loc, x);
4833 }
4834 else
4835 {
4836 tree atmp
4837 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4838 tree t = maybe_lookup_decl (v, ctx);
4839 if (t)
4840 v = t;
4841 else
4842 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4843 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4844 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4845 TREE_TYPE (v), v,
4846 build_int_cst (TREE_TYPE (v), 1));
4847 t = fold_build2_loc (clause_loc, MULT_EXPR,
4848 TREE_TYPE (v), t,
4849 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4850 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4851 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4852 }
4853
4854 tree ptype = build_pointer_type (TREE_TYPE (type));
4855 x = fold_convert_loc (clause_loc, ptype, x);
4856 tree y = create_tmp_var (ptype, name);
4857 gimplify_assign (y, x, ilist);
4858 x = y;
4859 tree yb = y;
4860
4861 if (!integer_zerop (bias))
4862 {
4863 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4864 bias);
4865 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4866 x);
4867 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4868 pointer_sized_int_node, yb, bias);
4869 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4870 yb = create_tmp_var (ptype, name);
4871 gimplify_assign (yb, x, ilist);
4872 x = yb;
4873 }
4874
4875 d = TREE_OPERAND (d, 0);
4876 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4877 d = TREE_OPERAND (d, 0);
4878 if (TREE_CODE (d) == ADDR_EXPR)
4879 {
4880 if (orig_var != var)
4881 {
4882 gcc_assert (is_variable_sized (orig_var));
4883 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4884 x);
4885 gimplify_assign (new_var, x, ilist);
4886 tree new_orig_var = lookup_decl (orig_var, ctx);
4887 tree t = build_fold_indirect_ref (new_var);
4888 DECL_IGNORED_P (new_var) = 0;
4889 TREE_THIS_NOTRAP (t) = 1;
4890 SET_DECL_VALUE_EXPR (new_orig_var, t);
4891 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4892 }
4893 else
4894 {
4895 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4896 build_int_cst (ptype, 0));
4897 SET_DECL_VALUE_EXPR (new_var, x);
4898 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4899 }
4900 }
4901 else
4902 {
4903 gcc_assert (orig_var == var);
4904 if (TREE_CODE (d) == INDIRECT_REF)
4905 {
4906 x = create_tmp_var (ptype, name);
4907 TREE_ADDRESSABLE (x) = 1;
4908 gimplify_assign (x, yb, ilist);
4909 x = build_fold_addr_expr_loc (clause_loc, x);
4910 }
4911 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4912 gimplify_assign (new_var, x, ilist);
4913 }
4914 /* GOMP_taskgroup_reduction_register memsets the whole
4915 array to zero. If the initializer is zero, we don't
4916 need to initialize it again, just mark it as ever
4917 used unconditionally, i.e. cond = true. */
4918 if (cond
4919 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4920 && initializer_zerop (omp_reduction_init (c,
4921 TREE_TYPE (type))))
4922 {
4923 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4924 boolean_true_node);
4925 gimple_seq_add_stmt (ilist, g);
4926 continue;
4927 }
4928 tree end = create_artificial_label (UNKNOWN_LOCATION);
4929 if (cond)
4930 {
4931 gimple *g;
4932 if (!is_parallel_ctx (ctx))
4933 {
4934 tree condv = create_tmp_var (boolean_type_node);
4935 g = gimple_build_assign (condv,
4936 build_simple_mem_ref (cond));
4937 gimple_seq_add_stmt (ilist, g);
4938 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4939 g = gimple_build_cond (NE_EXPR, condv,
4940 boolean_false_node, end, lab1);
4941 gimple_seq_add_stmt (ilist, g);
4942 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4943 }
4944 g = gimple_build_assign (build_simple_mem_ref (cond),
4945 boolean_true_node);
4946 gimple_seq_add_stmt (ilist, g);
4947 }
4948
4949 tree y1 = create_tmp_var (ptype);
4950 gimplify_assign (y1, y, ilist);
4951 tree i2 = NULL_TREE, y2 = NULL_TREE;
4952 tree body2 = NULL_TREE, end2 = NULL_TREE;
4953 tree y3 = NULL_TREE, y4 = NULL_TREE;
4954 if (task_reduction_needs_orig_p)
4955 {
4956 y3 = create_tmp_var (ptype);
4957 tree ref;
4958 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4959 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4960 size_int (task_reduction_cnt_full
4961 + task_reduction_cntorig - 1),
4962 NULL_TREE, NULL_TREE);
4963 else
4964 {
4965 unsigned int idx = *ctx->task_reduction_map->get (c);
4966 ref = task_reduction_read (ilist, tskred_temp, ptype,
4967 7 + 3 * idx);
4968 }
4969 gimplify_assign (y3, ref, ilist);
4970 }
4971 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4972 {
4973 if (pass != 3)
4974 {
4975 y2 = create_tmp_var (ptype);
4976 gimplify_assign (y2, y, ilist);
4977 }
4978 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4979 {
4980 tree ref = build_outer_var_ref (var, ctx);
4981 /* For ref build_outer_var_ref already performs this. */
4982 if (TREE_CODE (d) == INDIRECT_REF)
4983 gcc_assert (omp_is_reference (var));
4984 else if (TREE_CODE (d) == ADDR_EXPR)
4985 ref = build_fold_addr_expr (ref);
4986 else if (omp_is_reference (var))
4987 ref = build_fold_addr_expr (ref);
4988 ref = fold_convert_loc (clause_loc, ptype, ref);
4989 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4990 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4991 {
4992 y3 = create_tmp_var (ptype);
4993 gimplify_assign (y3, unshare_expr (ref), ilist);
4994 }
4995 if (is_simd)
4996 {
4997 y4 = create_tmp_var (ptype);
4998 gimplify_assign (y4, ref, dlist);
4999 }
5000 }
5001 }
5002 tree i = create_tmp_var (TREE_TYPE (v));
5003 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5004 tree body = create_artificial_label (UNKNOWN_LOCATION);
5005 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5006 if (y2)
5007 {
5008 i2 = create_tmp_var (TREE_TYPE (v));
5009 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5010 body2 = create_artificial_label (UNKNOWN_LOCATION);
5011 end2 = create_artificial_label (UNKNOWN_LOCATION);
5012 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5013 }
5014 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5015 {
5016 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5017 tree decl_placeholder
5018 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5019 SET_DECL_VALUE_EXPR (decl_placeholder,
5020 build_simple_mem_ref (y1));
5021 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5022 SET_DECL_VALUE_EXPR (placeholder,
5023 y3 ? build_simple_mem_ref (y3)
5024 : error_mark_node);
5025 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5026 x = lang_hooks.decls.omp_clause_default_ctor
5027 (c, build_simple_mem_ref (y1),
5028 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5029 if (x)
5030 gimplify_and_add (x, ilist);
5031 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5032 {
5033 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5034 lower_omp (&tseq, ctx);
5035 gimple_seq_add_seq (ilist, tseq);
5036 }
5037 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5038 if (is_simd)
5039 {
5040 SET_DECL_VALUE_EXPR (decl_placeholder,
5041 build_simple_mem_ref (y2));
5042 SET_DECL_VALUE_EXPR (placeholder,
5043 build_simple_mem_ref (y4));
5044 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5045 lower_omp (&tseq, ctx);
5046 gimple_seq_add_seq (dlist, tseq);
5047 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5048 }
5049 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5050 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5051 if (y2)
5052 {
5053 x = lang_hooks.decls.omp_clause_dtor
5054 (c, build_simple_mem_ref (y2));
5055 if (x)
5056 gimplify_and_add (x, dlist);
5057 }
5058 }
5059 else
5060 {
5061 x = omp_reduction_init (c, TREE_TYPE (type));
5062 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5063
5064 /* reduction(-:var) sums up the partial results, so it
5065 acts identically to reduction(+:var). */
5066 if (code == MINUS_EXPR)
5067 code = PLUS_EXPR;
5068
5069 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5070 if (is_simd)
5071 {
5072 x = build2 (code, TREE_TYPE (type),
5073 build_simple_mem_ref (y4),
5074 build_simple_mem_ref (y2));
5075 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5076 }
5077 }
5078 gimple *g
5079 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5080 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5081 gimple_seq_add_stmt (ilist, g);
5082 if (y3)
5083 {
5084 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5085 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5086 gimple_seq_add_stmt (ilist, g);
5087 }
5088 g = gimple_build_assign (i, PLUS_EXPR, i,
5089 build_int_cst (TREE_TYPE (i), 1));
5090 gimple_seq_add_stmt (ilist, g);
5091 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5092 gimple_seq_add_stmt (ilist, g);
5093 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5094 if (y2)
5095 {
5096 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5097 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5098 gimple_seq_add_stmt (dlist, g);
5099 if (y4)
5100 {
5101 g = gimple_build_assign
5102 (y4, POINTER_PLUS_EXPR, y4,
5103 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5104 gimple_seq_add_stmt (dlist, g);
5105 }
5106 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5107 build_int_cst (TREE_TYPE (i2), 1));
5108 gimple_seq_add_stmt (dlist, g);
5109 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5110 gimple_seq_add_stmt (dlist, g);
5111 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5112 }
5113 continue;
5114 }
5115 else if (pass == 2)
5116 {
5117 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5118 x = var;
5119 else
5120 {
5121 bool by_ref = use_pointer_for_field (var, ctx);
5122 x = build_receiver_ref (var, by_ref, ctx);
5123 }
5124 if (!omp_is_reference (var))
5125 x = build_fold_addr_expr (x);
5126 x = fold_convert (ptr_type_node, x);
5127 unsigned cnt = task_reduction_cnt - 1;
5128 if (!task_reduction_needs_orig_p)
5129 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5130 else
5131 cnt = task_reduction_cntorig - 1;
5132 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5133 size_int (cnt), NULL_TREE, NULL_TREE);
5134 gimplify_assign (r, x, ilist);
5135 continue;
5136 }
5137 else if (pass == 3)
5138 {
5139 tree type = TREE_TYPE (new_var);
5140 if (!omp_is_reference (var))
5141 type = build_pointer_type (type);
5142 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5143 {
5144 unsigned cnt = task_reduction_cnt - 1;
5145 if (!task_reduction_needs_orig_p)
5146 cnt += (task_reduction_cntorig_full
5147 - task_reduction_cntorig);
5148 else
5149 cnt = task_reduction_cntorig - 1;
5150 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5151 size_int (cnt), NULL_TREE, NULL_TREE);
5152 }
5153 else
5154 {
5155 unsigned int idx = *ctx->task_reduction_map->get (c);
5156 tree off;
5157 if (ctx->task_reductions[1 + idx])
5158 off = fold_convert (sizetype,
5159 ctx->task_reductions[1 + idx]);
5160 else
5161 off = task_reduction_read (ilist, tskred_temp, sizetype,
5162 7 + 3 * idx + 1);
5163 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5164 tskred_base, off);
5165 }
5166 x = fold_convert (type, x);
5167 tree t;
5168 if (omp_is_reference (var))
5169 {
5170 gimplify_assign (new_var, x, ilist);
5171 t = new_var;
5172 new_var = build_simple_mem_ref (new_var);
5173 }
5174 else
5175 {
5176 t = create_tmp_var (type);
5177 gimplify_assign (t, x, ilist);
5178 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5179 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5180 }
5181 t = fold_convert (build_pointer_type (boolean_type_node), t);
5182 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5183 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5184 cond = create_tmp_var (TREE_TYPE (t));
5185 gimplify_assign (cond, t, ilist);
5186 }
5187 else if (is_variable_sized (var))
5188 {
5189 /* For variable sized types, we need to allocate the
5190 actual storage here. Call alloca and store the
5191 result in the pointer decl that we created elsewhere. */
5192 if (pass == 0)
5193 continue;
5194
5195 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5196 {
5197 gcall *stmt;
5198 tree tmp, atmp;
5199
5200 ptr = DECL_VALUE_EXPR (new_var);
5201 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5202 ptr = TREE_OPERAND (ptr, 0);
5203 gcc_assert (DECL_P (ptr));
5204 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5205
5206 /* void *tmp = __builtin_alloca */
5207 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5208 stmt = gimple_build_call (atmp, 2, x,
5209 size_int (DECL_ALIGN (var)));
5210 cfun->calls_alloca = 1;
5211 tmp = create_tmp_var_raw (ptr_type_node);
5212 gimple_add_tmp_var (tmp);
5213 gimple_call_set_lhs (stmt, tmp);
5214
5215 gimple_seq_add_stmt (ilist, stmt);
5216
5217 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5218 gimplify_assign (ptr, x, ilist);
5219 }
5220 }
5221 else if (omp_is_reference (var)
5222 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5223 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5224 {
5225 /* For references that are being privatized for Fortran,
5226 allocate new backing storage for the new pointer
5227 variable. This allows us to avoid changing all the
5228 code that expects a pointer to something that expects
5229 a direct variable. */
5230 if (pass == 0)
5231 continue;
5232
5233 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5234 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5235 {
5236 x = build_receiver_ref (var, false, ctx);
5237 x = build_fold_addr_expr_loc (clause_loc, x);
5238 }
5239 else if (TREE_CONSTANT (x))
5240 {
5241 /* For reduction in SIMD loop, defer adding the
5242 initialization of the reference, because if we decide
5243 to use SIMD array for it, the initilization could cause
5244 expansion ICE. Ditto for other privatization clauses. */
5245 if (is_simd)
5246 x = NULL_TREE;
5247 else
5248 {
5249 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5250 get_name (var));
5251 gimple_add_tmp_var (x);
5252 TREE_ADDRESSABLE (x) = 1;
5253 x = build_fold_addr_expr_loc (clause_loc, x);
5254 }
5255 }
5256 else
5257 {
5258 tree atmp
5259 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5260 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5261 tree al = size_int (TYPE_ALIGN (rtype));
5262 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5263 }
5264
5265 if (x)
5266 {
5267 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5268 gimplify_assign (new_var, x, ilist);
5269 }
5270
5271 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5272 }
5273 else if ((c_kind == OMP_CLAUSE_REDUCTION
5274 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5275 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5276 {
5277 if (pass == 0)
5278 continue;
5279 }
5280 else if (pass != 0)
5281 continue;
5282
5283 switch (OMP_CLAUSE_CODE (c))
5284 {
5285 case OMP_CLAUSE_SHARED:
5286 /* Ignore shared directives in teams construct inside
5287 target construct. */
5288 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5289 && !is_host_teams_ctx (ctx))
5290 continue;
5291 /* Shared global vars are just accessed directly. */
5292 if (is_global_var (new_var))
5293 break;
5294 /* For taskloop firstprivate/lastprivate, represented
5295 as firstprivate and shared clause on the task, new_var
5296 is the firstprivate var. */
5297 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5298 break;
5299 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5300 needs to be delayed until after fixup_child_record_type so
5301 that we get the correct type during the dereference. */
5302 by_ref = use_pointer_for_field (var, ctx);
5303 x = build_receiver_ref (var, by_ref, ctx);
5304 SET_DECL_VALUE_EXPR (new_var, x);
5305 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5306
5307 /* ??? If VAR is not passed by reference, and the variable
5308 hasn't been initialized yet, then we'll get a warning for
5309 the store into the omp_data_s structure. Ideally, we'd be
5310 able to notice this and not store anything at all, but
5311 we're generating code too early. Suppress the warning. */
5312 if (!by_ref)
5313 TREE_NO_WARNING (var) = 1;
5314 break;
5315
5316 case OMP_CLAUSE__CONDTEMP_:
5317 if (is_parallel_ctx (ctx))
5318 {
5319 x = build_receiver_ref (var, false, ctx);
5320 SET_DECL_VALUE_EXPR (new_var, x);
5321 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5322 }
5323 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5324 {
5325 x = build_zero_cst (TREE_TYPE (var));
5326 goto do_private;
5327 }
5328 break;
5329
5330 case OMP_CLAUSE_LASTPRIVATE:
5331 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5332 break;
5333 /* FALLTHRU */
5334
5335 case OMP_CLAUSE_PRIVATE:
5336 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5337 x = build_outer_var_ref (var, ctx);
5338 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5339 {
5340 if (is_task_ctx (ctx))
5341 x = build_receiver_ref (var, false, ctx);
5342 else
5343 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5344 }
5345 else
5346 x = NULL;
5347 do_private:
5348 tree nx;
5349 bool copy_ctor;
5350 copy_ctor = false;
5351 nx = unshare_expr (new_var);
5352 if (is_simd
5353 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5354 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5355 copy_ctor = true;
5356 if (copy_ctor)
5357 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5358 else
5359 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5360 if (is_simd)
5361 {
5362 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5363 if ((TREE_ADDRESSABLE (new_var) || nx || y
5364 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5365 && (gimple_omp_for_collapse (ctx->stmt) != 1
5366 || (gimple_omp_for_index (ctx->stmt, 0)
5367 != new_var)))
5368 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5369 || omp_is_reference (var))
5370 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5371 ivar, lvar))
5372 {
5373 if (omp_is_reference (var))
5374 {
5375 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5376 tree new_vard = TREE_OPERAND (new_var, 0);
5377 gcc_assert (DECL_P (new_vard));
5378 SET_DECL_VALUE_EXPR (new_vard,
5379 build_fold_addr_expr (lvar));
5380 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5381 }
5382
5383 if (nx)
5384 {
5385 tree iv = unshare_expr (ivar);
5386 if (copy_ctor)
5387 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5388 x);
5389 else
5390 x = lang_hooks.decls.omp_clause_default_ctor (c,
5391 iv,
5392 x);
5393 }
5394 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5395 {
5396 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5397 unshare_expr (ivar), x);
5398 nx = x;
5399 }
5400 if (nx && x)
5401 gimplify_and_add (x, &llist[0]);
5402 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5403 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5404 {
5405 tree v = new_var;
5406 if (!DECL_P (v))
5407 {
5408 gcc_assert (TREE_CODE (v) == MEM_REF);
5409 v = TREE_OPERAND (v, 0);
5410 gcc_assert (DECL_P (v));
5411 }
5412 v = *ctx->lastprivate_conditional_map->get (v);
5413 tree t = create_tmp_var (TREE_TYPE (v));
5414 tree z = build_zero_cst (TREE_TYPE (v));
5415 tree orig_v
5416 = build_outer_var_ref (var, ctx,
5417 OMP_CLAUSE_LASTPRIVATE);
5418 gimple_seq_add_stmt (dlist,
5419 gimple_build_assign (t, z));
5420 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5421 tree civar = DECL_VALUE_EXPR (v);
5422 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5423 civar = unshare_expr (civar);
5424 TREE_OPERAND (civar, 1) = sctx.idx;
5425 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5426 unshare_expr (civar));
5427 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5428 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5429 orig_v, unshare_expr (ivar)));
5430 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5431 civar);
5432 x = build3 (COND_EXPR, void_type_node, cond, x,
5433 void_node);
5434 gimple_seq tseq = NULL;
5435 gimplify_and_add (x, &tseq);
5436 if (ctx->outer)
5437 lower_omp (&tseq, ctx->outer);
5438 gimple_seq_add_seq (&llist[1], tseq);
5439 }
5440 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5441 && ctx->for_simd_scan_phase)
5442 {
5443 x = unshare_expr (ivar);
5444 tree orig_v
5445 = build_outer_var_ref (var, ctx,
5446 OMP_CLAUSE_LASTPRIVATE);
5447 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5448 orig_v);
5449 gimplify_and_add (x, &llist[0]);
5450 }
5451 if (y)
5452 {
5453 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5454 if (y)
5455 gimplify_and_add (y, &llist[1]);
5456 }
5457 break;
5458 }
5459 if (omp_is_reference (var))
5460 {
5461 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5462 tree new_vard = TREE_OPERAND (new_var, 0);
5463 gcc_assert (DECL_P (new_vard));
5464 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5465 x = TYPE_SIZE_UNIT (type);
5466 if (TREE_CONSTANT (x))
5467 {
5468 x = create_tmp_var_raw (type, get_name (var));
5469 gimple_add_tmp_var (x);
5470 TREE_ADDRESSABLE (x) = 1;
5471 x = build_fold_addr_expr_loc (clause_loc, x);
5472 x = fold_convert_loc (clause_loc,
5473 TREE_TYPE (new_vard), x);
5474 gimplify_assign (new_vard, x, ilist);
5475 }
5476 }
5477 }
5478 if (nx)
5479 gimplify_and_add (nx, ilist);
5480 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5481 && is_simd
5482 && ctx->for_simd_scan_phase)
5483 {
5484 tree orig_v = build_outer_var_ref (var, ctx,
5485 OMP_CLAUSE_LASTPRIVATE);
5486 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5487 orig_v);
5488 gimplify_and_add (x, ilist);
5489 }
5490 /* FALLTHRU */
5491
5492 do_dtor:
5493 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5494 if (x)
5495 gimplify_and_add (x, dlist);
5496 break;
5497
5498 case OMP_CLAUSE_LINEAR:
5499 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5500 goto do_firstprivate;
5501 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5502 x = NULL;
5503 else
5504 x = build_outer_var_ref (var, ctx);
5505 goto do_private;
5506
5507 case OMP_CLAUSE_FIRSTPRIVATE:
5508 if (is_task_ctx (ctx))
5509 {
5510 if ((omp_is_reference (var)
5511 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5512 || is_variable_sized (var))
5513 goto do_dtor;
5514 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5515 ctx))
5516 || use_pointer_for_field (var, NULL))
5517 {
5518 x = build_receiver_ref (var, false, ctx);
5519 SET_DECL_VALUE_EXPR (new_var, x);
5520 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5521 goto do_dtor;
5522 }
5523 }
5524 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5525 && omp_is_reference (var))
5526 {
5527 x = build_outer_var_ref (var, ctx);
5528 gcc_assert (TREE_CODE (x) == MEM_REF
5529 && integer_zerop (TREE_OPERAND (x, 1)));
5530 x = TREE_OPERAND (x, 0);
5531 x = lang_hooks.decls.omp_clause_copy_ctor
5532 (c, unshare_expr (new_var), x);
5533 gimplify_and_add (x, ilist);
5534 goto do_dtor;
5535 }
5536 do_firstprivate:
5537 x = build_outer_var_ref (var, ctx);
5538 if (is_simd)
5539 {
5540 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5541 && gimple_omp_for_combined_into_p (ctx->stmt))
5542 {
5543 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5544 tree stept = TREE_TYPE (t);
5545 tree ct = omp_find_clause (clauses,
5546 OMP_CLAUSE__LOOPTEMP_);
5547 gcc_assert (ct);
5548 tree l = OMP_CLAUSE_DECL (ct);
5549 tree n1 = fd->loop.n1;
5550 tree step = fd->loop.step;
5551 tree itype = TREE_TYPE (l);
5552 if (POINTER_TYPE_P (itype))
5553 itype = signed_type_for (itype);
5554 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5555 if (TYPE_UNSIGNED (itype)
5556 && fd->loop.cond_code == GT_EXPR)
5557 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5558 fold_build1 (NEGATE_EXPR, itype, l),
5559 fold_build1 (NEGATE_EXPR,
5560 itype, step));
5561 else
5562 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5563 t = fold_build2 (MULT_EXPR, stept,
5564 fold_convert (stept, l), t);
5565
5566 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5567 {
5568 if (omp_is_reference (var))
5569 {
5570 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5571 tree new_vard = TREE_OPERAND (new_var, 0);
5572 gcc_assert (DECL_P (new_vard));
5573 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5574 nx = TYPE_SIZE_UNIT (type);
5575 if (TREE_CONSTANT (nx))
5576 {
5577 nx = create_tmp_var_raw (type,
5578 get_name (var));
5579 gimple_add_tmp_var (nx);
5580 TREE_ADDRESSABLE (nx) = 1;
5581 nx = build_fold_addr_expr_loc (clause_loc,
5582 nx);
5583 nx = fold_convert_loc (clause_loc,
5584 TREE_TYPE (new_vard),
5585 nx);
5586 gimplify_assign (new_vard, nx, ilist);
5587 }
5588 }
5589
5590 x = lang_hooks.decls.omp_clause_linear_ctor
5591 (c, new_var, x, t);
5592 gimplify_and_add (x, ilist);
5593 goto do_dtor;
5594 }
5595
5596 if (POINTER_TYPE_P (TREE_TYPE (x)))
5597 x = fold_build2 (POINTER_PLUS_EXPR,
5598 TREE_TYPE (x), x, t);
5599 else
5600 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5601 }
5602
5603 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5604 || TREE_ADDRESSABLE (new_var)
5605 || omp_is_reference (var))
5606 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5607 ivar, lvar))
5608 {
5609 if (omp_is_reference (var))
5610 {
5611 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5612 tree new_vard = TREE_OPERAND (new_var, 0);
5613 gcc_assert (DECL_P (new_vard));
5614 SET_DECL_VALUE_EXPR (new_vard,
5615 build_fold_addr_expr (lvar));
5616 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5617 }
5618 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5619 {
5620 tree iv = create_tmp_var (TREE_TYPE (new_var));
5621 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5622 gimplify_and_add (x, ilist);
5623 gimple_stmt_iterator gsi
5624 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5625 gassign *g
5626 = gimple_build_assign (unshare_expr (lvar), iv);
5627 gsi_insert_before_without_update (&gsi, g,
5628 GSI_SAME_STMT);
5629 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5630 enum tree_code code = PLUS_EXPR;
5631 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5632 code = POINTER_PLUS_EXPR;
5633 g = gimple_build_assign (iv, code, iv, t);
5634 gsi_insert_before_without_update (&gsi, g,
5635 GSI_SAME_STMT);
5636 break;
5637 }
5638 x = lang_hooks.decls.omp_clause_copy_ctor
5639 (c, unshare_expr (ivar), x);
5640 gimplify_and_add (x, &llist[0]);
5641 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5642 if (x)
5643 gimplify_and_add (x, &llist[1]);
5644 break;
5645 }
5646 if (omp_is_reference (var))
5647 {
5648 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5649 tree new_vard = TREE_OPERAND (new_var, 0);
5650 gcc_assert (DECL_P (new_vard));
5651 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5652 nx = TYPE_SIZE_UNIT (type);
5653 if (TREE_CONSTANT (nx))
5654 {
5655 nx = create_tmp_var_raw (type, get_name (var));
5656 gimple_add_tmp_var (nx);
5657 TREE_ADDRESSABLE (nx) = 1;
5658 nx = build_fold_addr_expr_loc (clause_loc, nx);
5659 nx = fold_convert_loc (clause_loc,
5660 TREE_TYPE (new_vard), nx);
5661 gimplify_assign (new_vard, nx, ilist);
5662 }
5663 }
5664 }
5665 x = lang_hooks.decls.omp_clause_copy_ctor
5666 (c, unshare_expr (new_var), x);
5667 gimplify_and_add (x, ilist);
5668 goto do_dtor;
5669
5670 case OMP_CLAUSE__LOOPTEMP_:
5671 case OMP_CLAUSE__REDUCTEMP_:
5672 gcc_assert (is_taskreg_ctx (ctx));
5673 x = build_outer_var_ref (var, ctx);
5674 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5675 gimplify_and_add (x, ilist);
5676 break;
5677
5678 case OMP_CLAUSE_COPYIN:
5679 by_ref = use_pointer_for_field (var, NULL);
5680 x = build_receiver_ref (var, by_ref, ctx);
5681 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5682 append_to_statement_list (x, &copyin_seq);
5683 copyin_by_ref |= by_ref;
5684 break;
5685
5686 case OMP_CLAUSE_REDUCTION:
5687 case OMP_CLAUSE_IN_REDUCTION:
5688 /* OpenACC reductions are initialized using the
5689 GOACC_REDUCTION internal function. */
5690 if (is_gimple_omp_oacc (ctx->stmt))
5691 break;
5692 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5693 {
5694 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5695 gimple *tseq;
5696 tree ptype = TREE_TYPE (placeholder);
5697 if (cond)
5698 {
5699 x = error_mark_node;
5700 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5701 && !task_reduction_needs_orig_p)
5702 x = var;
5703 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5704 {
5705 tree pptype = build_pointer_type (ptype);
5706 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5707 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5708 size_int (task_reduction_cnt_full
5709 + task_reduction_cntorig - 1),
5710 NULL_TREE, NULL_TREE);
5711 else
5712 {
5713 unsigned int idx
5714 = *ctx->task_reduction_map->get (c);
5715 x = task_reduction_read (ilist, tskred_temp,
5716 pptype, 7 + 3 * idx);
5717 }
5718 x = fold_convert (pptype, x);
5719 x = build_simple_mem_ref (x);
5720 }
5721 }
5722 else
5723 {
5724 x = build_outer_var_ref (var, ctx);
5725
5726 if (omp_is_reference (var)
5727 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5728 x = build_fold_addr_expr_loc (clause_loc, x);
5729 }
5730 SET_DECL_VALUE_EXPR (placeholder, x);
5731 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5732 tree new_vard = new_var;
5733 if (omp_is_reference (var))
5734 {
5735 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5736 new_vard = TREE_OPERAND (new_var, 0);
5737 gcc_assert (DECL_P (new_vard));
5738 }
5739 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5740 if (is_simd
5741 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5742 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5743 rvarp = &rvar;
5744 if (is_simd
5745 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5746 ivar, lvar, rvarp,
5747 &rvar2))
5748 {
5749 if (new_vard == new_var)
5750 {
5751 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5752 SET_DECL_VALUE_EXPR (new_var, ivar);
5753 }
5754 else
5755 {
5756 SET_DECL_VALUE_EXPR (new_vard,
5757 build_fold_addr_expr (ivar));
5758 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5759 }
5760 x = lang_hooks.decls.omp_clause_default_ctor
5761 (c, unshare_expr (ivar),
5762 build_outer_var_ref (var, ctx));
5763 if (rvarp && ctx->for_simd_scan_phase)
5764 {
5765 if (x)
5766 gimplify_and_add (x, &llist[0]);
5767 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5768 if (x)
5769 gimplify_and_add (x, &llist[1]);
5770 break;
5771 }
5772 else if (rvarp)
5773 {
5774 if (x)
5775 {
5776 gimplify_and_add (x, &llist[0]);
5777
5778 tree ivar2 = unshare_expr (lvar);
5779 TREE_OPERAND (ivar2, 1) = sctx.idx;
5780 x = lang_hooks.decls.omp_clause_default_ctor
5781 (c, ivar2, build_outer_var_ref (var, ctx));
5782 gimplify_and_add (x, &llist[0]);
5783
5784 if (rvar2)
5785 {
5786 x = lang_hooks.decls.omp_clause_default_ctor
5787 (c, unshare_expr (rvar2),
5788 build_outer_var_ref (var, ctx));
5789 gimplify_and_add (x, &llist[0]);
5790 }
5791
5792 /* For types that need construction, add another
5793 private var which will be default constructed
5794 and optionally initialized with
5795 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5796 loop we want to assign this value instead of
5797 constructing and destructing it in each
5798 iteration. */
5799 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5800 gimple_add_tmp_var (nv);
5801 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5802 ? rvar2
5803 : ivar, 0),
5804 nv);
5805 x = lang_hooks.decls.omp_clause_default_ctor
5806 (c, nv, build_outer_var_ref (var, ctx));
5807 gimplify_and_add (x, ilist);
5808
5809 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5810 {
5811 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5812 x = DECL_VALUE_EXPR (new_vard);
5813 tree vexpr = nv;
5814 if (new_vard != new_var)
5815 vexpr = build_fold_addr_expr (nv);
5816 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5817 lower_omp (&tseq, ctx);
5818 SET_DECL_VALUE_EXPR (new_vard, x);
5819 gimple_seq_add_seq (ilist, tseq);
5820 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5821 }
5822
5823 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5824 if (x)
5825 gimplify_and_add (x, dlist);
5826 }
5827
5828 tree ref = build_outer_var_ref (var, ctx);
5829 x = unshare_expr (ivar);
5830 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5831 ref);
5832 gimplify_and_add (x, &llist[0]);
5833
5834 ref = build_outer_var_ref (var, ctx);
5835 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5836 rvar);
5837 gimplify_and_add (x, &llist[3]);
5838
5839 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5840 if (new_vard == new_var)
5841 SET_DECL_VALUE_EXPR (new_var, lvar);
5842 else
5843 SET_DECL_VALUE_EXPR (new_vard,
5844 build_fold_addr_expr (lvar));
5845
5846 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5847 if (x)
5848 gimplify_and_add (x, &llist[1]);
5849
5850 tree ivar2 = unshare_expr (lvar);
5851 TREE_OPERAND (ivar2, 1) = sctx.idx;
5852 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5853 if (x)
5854 gimplify_and_add (x, &llist[1]);
5855
5856 if (rvar2)
5857 {
5858 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5859 if (x)
5860 gimplify_and_add (x, &llist[1]);
5861 }
5862 break;
5863 }
5864 if (x)
5865 gimplify_and_add (x, &llist[0]);
5866 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5867 {
5868 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5869 lower_omp (&tseq, ctx);
5870 gimple_seq_add_seq (&llist[0], tseq);
5871 }
5872 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5873 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5874 lower_omp (&tseq, ctx);
5875 gimple_seq_add_seq (&llist[1], tseq);
5876 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5877 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5878 if (new_vard == new_var)
5879 SET_DECL_VALUE_EXPR (new_var, lvar);
5880 else
5881 SET_DECL_VALUE_EXPR (new_vard,
5882 build_fold_addr_expr (lvar));
5883 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5884 if (x)
5885 gimplify_and_add (x, &llist[1]);
5886 break;
5887 }
5888 /* If this is a reference to constant size reduction var
5889 with placeholder, we haven't emitted the initializer
5890 for it because it is undesirable if SIMD arrays are used.
5891 But if they aren't used, we need to emit the deferred
5892 initialization now. */
5893 else if (omp_is_reference (var) && is_simd)
5894 handle_simd_reference (clause_loc, new_vard, ilist);
5895
5896 tree lab2 = NULL_TREE;
5897 if (cond)
5898 {
5899 gimple *g;
5900 if (!is_parallel_ctx (ctx))
5901 {
5902 tree condv = create_tmp_var (boolean_type_node);
5903 tree m = build_simple_mem_ref (cond);
5904 g = gimple_build_assign (condv, m);
5905 gimple_seq_add_stmt (ilist, g);
5906 tree lab1
5907 = create_artificial_label (UNKNOWN_LOCATION);
5908 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5909 g = gimple_build_cond (NE_EXPR, condv,
5910 boolean_false_node,
5911 lab2, lab1);
5912 gimple_seq_add_stmt (ilist, g);
5913 gimple_seq_add_stmt (ilist,
5914 gimple_build_label (lab1));
5915 }
5916 g = gimple_build_assign (build_simple_mem_ref (cond),
5917 boolean_true_node);
5918 gimple_seq_add_stmt (ilist, g);
5919 }
5920 x = lang_hooks.decls.omp_clause_default_ctor
5921 (c, unshare_expr (new_var),
5922 cond ? NULL_TREE
5923 : build_outer_var_ref (var, ctx));
5924 if (x)
5925 gimplify_and_add (x, ilist);
5926
5927 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5928 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5929 {
5930 if (ctx->for_simd_scan_phase)
5931 goto do_dtor;
5932 if (x || (!is_simd
5933 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5934 {
5935 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5936 gimple_add_tmp_var (nv);
5937 ctx->cb.decl_map->put (new_vard, nv);
5938 x = lang_hooks.decls.omp_clause_default_ctor
5939 (c, nv, build_outer_var_ref (var, ctx));
5940 if (x)
5941 gimplify_and_add (x, ilist);
5942 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5943 {
5944 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5945 tree vexpr = nv;
5946 if (new_vard != new_var)
5947 vexpr = build_fold_addr_expr (nv);
5948 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5949 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5950 lower_omp (&tseq, ctx);
5951 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5952 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5953 gimple_seq_add_seq (ilist, tseq);
5954 }
5955 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5956 if (is_simd && ctx->scan_exclusive)
5957 {
5958 tree nv2
5959 = create_tmp_var_raw (TREE_TYPE (new_var));
5960 gimple_add_tmp_var (nv2);
5961 ctx->cb.decl_map->put (nv, nv2);
5962 x = lang_hooks.decls.omp_clause_default_ctor
5963 (c, nv2, build_outer_var_ref (var, ctx));
5964 gimplify_and_add (x, ilist);
5965 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5966 if (x)
5967 gimplify_and_add (x, dlist);
5968 }
5969 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5970 if (x)
5971 gimplify_and_add (x, dlist);
5972 }
5973 else if (is_simd
5974 && ctx->scan_exclusive
5975 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5976 {
5977 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5978 gimple_add_tmp_var (nv2);
5979 ctx->cb.decl_map->put (new_vard, nv2);
5980 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5981 if (x)
5982 gimplify_and_add (x, dlist);
5983 }
5984 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5985 goto do_dtor;
5986 }
5987
5988 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5989 {
5990 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5991 lower_omp (&tseq, ctx);
5992 gimple_seq_add_seq (ilist, tseq);
5993 }
5994 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5995 if (is_simd)
5996 {
5997 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5998 lower_omp (&tseq, ctx);
5999 gimple_seq_add_seq (dlist, tseq);
6000 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6001 }
6002 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6003 if (cond)
6004 {
6005 if (lab2)
6006 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6007 break;
6008 }
6009 goto do_dtor;
6010 }
6011 else
6012 {
6013 x = omp_reduction_init (c, TREE_TYPE (new_var));
6014 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6015 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6016
6017 if (cond)
6018 {
6019 gimple *g;
6020 tree lab2 = NULL_TREE;
6021 /* GOMP_taskgroup_reduction_register memsets the whole
6022 array to zero. If the initializer is zero, we don't
6023 need to initialize it again, just mark it as ever
6024 used unconditionally, i.e. cond = true. */
6025 if (initializer_zerop (x))
6026 {
6027 g = gimple_build_assign (build_simple_mem_ref (cond),
6028 boolean_true_node);
6029 gimple_seq_add_stmt (ilist, g);
6030 break;
6031 }
6032
6033 /* Otherwise, emit
6034 if (!cond) { cond = true; new_var = x; } */
6035 if (!is_parallel_ctx (ctx))
6036 {
6037 tree condv = create_tmp_var (boolean_type_node);
6038 tree m = build_simple_mem_ref (cond);
6039 g = gimple_build_assign (condv, m);
6040 gimple_seq_add_stmt (ilist, g);
6041 tree lab1
6042 = create_artificial_label (UNKNOWN_LOCATION);
6043 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6044 g = gimple_build_cond (NE_EXPR, condv,
6045 boolean_false_node,
6046 lab2, lab1);
6047 gimple_seq_add_stmt (ilist, g);
6048 gimple_seq_add_stmt (ilist,
6049 gimple_build_label (lab1));
6050 }
6051 g = gimple_build_assign (build_simple_mem_ref (cond),
6052 boolean_true_node);
6053 gimple_seq_add_stmt (ilist, g);
6054 gimplify_assign (new_var, x, ilist);
6055 if (lab2)
6056 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6057 break;
6058 }
6059
6060 /* reduction(-:var) sums up the partial results, so it
6061 acts identically to reduction(+:var). */
6062 if (code == MINUS_EXPR)
6063 code = PLUS_EXPR;
6064
6065 tree new_vard = new_var;
6066 if (is_simd && omp_is_reference (var))
6067 {
6068 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6069 new_vard = TREE_OPERAND (new_var, 0);
6070 gcc_assert (DECL_P (new_vard));
6071 }
6072 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6073 if (is_simd
6074 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6075 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6076 rvarp = &rvar;
6077 if (is_simd
6078 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6079 ivar, lvar, rvarp,
6080 &rvar2))
6081 {
6082 if (new_vard != new_var)
6083 {
6084 SET_DECL_VALUE_EXPR (new_vard,
6085 build_fold_addr_expr (lvar));
6086 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6087 }
6088
6089 tree ref = build_outer_var_ref (var, ctx);
6090
6091 if (rvarp)
6092 {
6093 if (ctx->for_simd_scan_phase)
6094 break;
6095 gimplify_assign (ivar, ref, &llist[0]);
6096 ref = build_outer_var_ref (var, ctx);
6097 gimplify_assign (ref, rvar, &llist[3]);
6098 break;
6099 }
6100
6101 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6102
6103 if (sctx.is_simt)
6104 {
6105 if (!simt_lane)
6106 simt_lane = create_tmp_var (unsigned_type_node);
6107 x = build_call_expr_internal_loc
6108 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6109 TREE_TYPE (ivar), 2, ivar, simt_lane);
6110 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6111 gimplify_assign (ivar, x, &llist[2]);
6112 }
6113 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6114 ref = build_outer_var_ref (var, ctx);
6115 gimplify_assign (ref, x, &llist[1]);
6116
6117 }
6118 else
6119 {
6120 if (omp_is_reference (var) && is_simd)
6121 handle_simd_reference (clause_loc, new_vard, ilist);
6122 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6123 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6124 break;
6125 gimplify_assign (new_var, x, ilist);
6126 if (is_simd)
6127 {
6128 tree ref = build_outer_var_ref (var, ctx);
6129
6130 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6131 ref = build_outer_var_ref (var, ctx);
6132 gimplify_assign (ref, x, dlist);
6133 }
6134 }
6135 }
6136 break;
6137
6138 default:
6139 gcc_unreachable ();
6140 }
6141 }
6142 }
6143 if (tskred_avar)
6144 {
6145 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6146 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6147 }
6148
6149 if (known_eq (sctx.max_vf, 1U))
6150 {
6151 sctx.is_simt = false;
6152 if (ctx->lastprivate_conditional_map)
6153 {
6154 if (gimple_omp_for_combined_into_p (ctx->stmt))
6155 {
6156 /* Signal to lower_omp_1 that it should use parent context. */
6157 ctx->combined_into_simd_safelen1 = true;
6158 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6160 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6161 {
6162 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6163 omp_context *outer = ctx->outer;
6164 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6165 outer = outer->outer;
6166 tree *v = ctx->lastprivate_conditional_map->get (o);
6167 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6168 tree *pv = outer->lastprivate_conditional_map->get (po);
6169 *v = *pv;
6170 }
6171 }
6172 else
6173 {
6174 /* When not vectorized, treat lastprivate(conditional:) like
6175 normal lastprivate, as there will be just one simd lane
6176 writing the privatized variable. */
6177 delete ctx->lastprivate_conditional_map;
6178 ctx->lastprivate_conditional_map = NULL;
6179 }
6180 }
6181 }
6182
6183 if (nonconst_simd_if)
6184 {
6185 if (sctx.lane == NULL_TREE)
6186 {
6187 sctx.idx = create_tmp_var (unsigned_type_node);
6188 sctx.lane = create_tmp_var (unsigned_type_node);
6189 }
6190 /* FIXME: For now. */
6191 sctx.is_simt = false;
6192 }
6193
6194 if (sctx.lane || sctx.is_simt)
6195 {
6196 uid = create_tmp_var (ptr_type_node, "simduid");
6197 /* Don't want uninit warnings on simduid, it is always uninitialized,
6198 but we use it not for the value, but for the DECL_UID only. */
6199 TREE_NO_WARNING (uid) = 1;
6200 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6201 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6202 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6203 gimple_omp_for_set_clauses (ctx->stmt, c);
6204 }
6205 /* Emit calls denoting privatized variables and initializing a pointer to
6206 structure that holds private variables as fields after ompdevlow pass. */
6207 if (sctx.is_simt)
6208 {
6209 sctx.simt_eargs[0] = uid;
6210 gimple *g
6211 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6212 gimple_call_set_lhs (g, uid);
6213 gimple_seq_add_stmt (ilist, g);
6214 sctx.simt_eargs.release ();
6215
6216 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6217 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6218 gimple_call_set_lhs (g, simtrec);
6219 gimple_seq_add_stmt (ilist, g);
6220 }
6221 if (sctx.lane)
6222 {
6223 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6224 2 + (nonconst_simd_if != NULL),
6225 uid, integer_zero_node,
6226 nonconst_simd_if);
6227 gimple_call_set_lhs (g, sctx.lane);
6228 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6229 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6230 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6231 build_int_cst (unsigned_type_node, 0));
6232 gimple_seq_add_stmt (ilist, g);
6233 if (sctx.lastlane)
6234 {
6235 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6236 2, uid, sctx.lane);
6237 gimple_call_set_lhs (g, sctx.lastlane);
6238 gimple_seq_add_stmt (dlist, g);
6239 gimple_seq_add_seq (dlist, llist[3]);
6240 }
6241 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6242 if (llist[2])
6243 {
6244 tree simt_vf = create_tmp_var (unsigned_type_node);
6245 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6246 gimple_call_set_lhs (g, simt_vf);
6247 gimple_seq_add_stmt (dlist, g);
6248
6249 tree t = build_int_cst (unsigned_type_node, 1);
6250 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6251 gimple_seq_add_stmt (dlist, g);
6252
6253 t = build_int_cst (unsigned_type_node, 0);
6254 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6255 gimple_seq_add_stmt (dlist, g);
6256
6257 tree body = create_artificial_label (UNKNOWN_LOCATION);
6258 tree header = create_artificial_label (UNKNOWN_LOCATION);
6259 tree end = create_artificial_label (UNKNOWN_LOCATION);
6260 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6261 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6262
6263 gimple_seq_add_seq (dlist, llist[2]);
6264
6265 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6266 gimple_seq_add_stmt (dlist, g);
6267
6268 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6269 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6270 gimple_seq_add_stmt (dlist, g);
6271
6272 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6273 }
6274 for (int i = 0; i < 2; i++)
6275 if (llist[i])
6276 {
6277 tree vf = create_tmp_var (unsigned_type_node);
6278 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6279 gimple_call_set_lhs (g, vf);
6280 gimple_seq *seq = i == 0 ? ilist : dlist;
6281 gimple_seq_add_stmt (seq, g);
6282 tree t = build_int_cst (unsigned_type_node, 0);
6283 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6284 gimple_seq_add_stmt (seq, g);
6285 tree body = create_artificial_label (UNKNOWN_LOCATION);
6286 tree header = create_artificial_label (UNKNOWN_LOCATION);
6287 tree end = create_artificial_label (UNKNOWN_LOCATION);
6288 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6289 gimple_seq_add_stmt (seq, gimple_build_label (body));
6290 gimple_seq_add_seq (seq, llist[i]);
6291 t = build_int_cst (unsigned_type_node, 1);
6292 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6293 gimple_seq_add_stmt (seq, g);
6294 gimple_seq_add_stmt (seq, gimple_build_label (header));
6295 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6296 gimple_seq_add_stmt (seq, g);
6297 gimple_seq_add_stmt (seq, gimple_build_label (end));
6298 }
6299 }
6300 if (sctx.is_simt)
6301 {
6302 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6303 gimple *g
6304 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6305 gimple_seq_add_stmt (dlist, g);
6306 }
6307
6308 /* The copyin sequence is not to be executed by the main thread, since
6309 that would result in self-copies. Perhaps not visible to scalars,
6310 but it certainly is to C++ operator=. */
6311 if (copyin_seq)
6312 {
6313 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6314 0);
6315 x = build2 (NE_EXPR, boolean_type_node, x,
6316 build_int_cst (TREE_TYPE (x), 0));
6317 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6318 gimplify_and_add (x, ilist);
6319 }
6320
6321 /* If any copyin variable is passed by reference, we must ensure the
6322 master thread doesn't modify it before it is copied over in all
6323 threads. Similarly for variables in both firstprivate and
6324 lastprivate clauses we need to ensure the lastprivate copying
6325 happens after firstprivate copying in all threads. And similarly
6326 for UDRs if initializer expression refers to omp_orig. */
6327 if (copyin_by_ref || lastprivate_firstprivate
6328 || (reduction_omp_orig_ref
6329 && !ctx->scan_inclusive
6330 && !ctx->scan_exclusive))
6331 {
6332 /* Don't add any barrier for #pragma omp simd or
6333 #pragma omp distribute. */
6334 if (!is_task_ctx (ctx)
6335 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6336 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6337 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6338 }
6339
6340 /* If max_vf is non-zero, then we can use only a vectorization factor
6341 up to the max_vf we chose. So stick it into the safelen clause. */
6342 if (maybe_ne (sctx.max_vf, 0U))
6343 {
6344 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6345 OMP_CLAUSE_SAFELEN);
6346 poly_uint64 safe_len;
6347 if (c == NULL_TREE
6348 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6349 && maybe_gt (safe_len, sctx.max_vf)))
6350 {
6351 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6352 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6353 sctx.max_vf);
6354 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6355 gimple_omp_for_set_clauses (ctx->stmt, c);
6356 }
6357 }
6358 }
6359
6360 /* Create temporary variables for lastprivate(conditional:) implementation
6361 in context CTX with CLAUSES. */
6362
6363 static void
6364 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6365 {
6366 tree iter_type = NULL_TREE;
6367 tree cond_ptr = NULL_TREE;
6368 tree iter_var = NULL_TREE;
6369 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6370 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6371 tree next = *clauses;
6372 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6373 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6374 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6375 {
6376 if (is_simd)
6377 {
6378 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6379 gcc_assert (cc);
6380 if (iter_type == NULL_TREE)
6381 {
6382 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6383 iter_var = create_tmp_var_raw (iter_type);
6384 DECL_CONTEXT (iter_var) = current_function_decl;
6385 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6386 DECL_CHAIN (iter_var) = ctx->block_vars;
6387 ctx->block_vars = iter_var;
6388 tree c3
6389 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6390 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6391 OMP_CLAUSE_DECL (c3) = iter_var;
6392 OMP_CLAUSE_CHAIN (c3) = *clauses;
6393 *clauses = c3;
6394 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6395 }
6396 next = OMP_CLAUSE_CHAIN (cc);
6397 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6398 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6399 ctx->lastprivate_conditional_map->put (o, v);
6400 continue;
6401 }
6402 if (iter_type == NULL)
6403 {
6404 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6405 {
6406 struct omp_for_data fd;
6407 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6408 NULL);
6409 iter_type = unsigned_type_for (fd.iter_type);
6410 }
6411 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6412 iter_type = unsigned_type_node;
6413 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6414 if (c2)
6415 {
6416 cond_ptr
6417 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6418 OMP_CLAUSE_DECL (c2) = cond_ptr;
6419 }
6420 else
6421 {
6422 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6423 DECL_CONTEXT (cond_ptr) = current_function_decl;
6424 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6425 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6426 ctx->block_vars = cond_ptr;
6427 c2 = build_omp_clause (UNKNOWN_LOCATION,
6428 OMP_CLAUSE__CONDTEMP_);
6429 OMP_CLAUSE_DECL (c2) = cond_ptr;
6430 OMP_CLAUSE_CHAIN (c2) = *clauses;
6431 *clauses = c2;
6432 }
6433 iter_var = create_tmp_var_raw (iter_type);
6434 DECL_CONTEXT (iter_var) = current_function_decl;
6435 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6436 DECL_CHAIN (iter_var) = ctx->block_vars;
6437 ctx->block_vars = iter_var;
6438 tree c3
6439 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6440 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6441 OMP_CLAUSE_DECL (c3) = iter_var;
6442 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6443 OMP_CLAUSE_CHAIN (c2) = c3;
6444 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6445 }
6446 tree v = create_tmp_var_raw (iter_type);
6447 DECL_CONTEXT (v) = current_function_decl;
6448 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6449 DECL_CHAIN (v) = ctx->block_vars;
6450 ctx->block_vars = v;
6451 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6452 ctx->lastprivate_conditional_map->put (o, v);
6453 }
6454 }
6455
6456
6457 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6458 both parallel and workshare constructs. PREDICATE may be NULL if it's
6459 always true. BODY_P is the sequence to insert early initialization
6460 if needed, STMT_LIST is where the non-conditional lastprivate handling
6461 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6462 section. */
6463
6464 static void
6465 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6466 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6467 omp_context *ctx)
6468 {
6469 tree x, c, label = NULL, orig_clauses = clauses;
6470 bool par_clauses = false;
6471 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6472 unsigned HOST_WIDE_INT conditional_off = 0;
6473 gimple_seq post_stmt_list = NULL;
6474
6475 /* Early exit if there are no lastprivate or linear clauses. */
6476 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6477 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6478 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6479 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6480 break;
6481 if (clauses == NULL)
6482 {
6483 /* If this was a workshare clause, see if it had been combined
6484 with its parallel. In that case, look for the clauses on the
6485 parallel statement itself. */
6486 if (is_parallel_ctx (ctx))
6487 return;
6488
6489 ctx = ctx->outer;
6490 if (ctx == NULL || !is_parallel_ctx (ctx))
6491 return;
6492
6493 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6494 OMP_CLAUSE_LASTPRIVATE);
6495 if (clauses == NULL)
6496 return;
6497 par_clauses = true;
6498 }
6499
6500 bool maybe_simt = false;
6501 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6502 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6503 {
6504 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6505 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6506 if (simduid)
6507 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6508 }
6509
6510 if (predicate)
6511 {
6512 gcond *stmt;
6513 tree label_true, arm1, arm2;
6514 enum tree_code pred_code = TREE_CODE (predicate);
6515
6516 label = create_artificial_label (UNKNOWN_LOCATION);
6517 label_true = create_artificial_label (UNKNOWN_LOCATION);
6518 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6519 {
6520 arm1 = TREE_OPERAND (predicate, 0);
6521 arm2 = TREE_OPERAND (predicate, 1);
6522 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6523 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6524 }
6525 else
6526 {
6527 arm1 = predicate;
6528 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6529 arm2 = boolean_false_node;
6530 pred_code = NE_EXPR;
6531 }
6532 if (maybe_simt)
6533 {
6534 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6535 c = fold_convert (integer_type_node, c);
6536 simtcond = create_tmp_var (integer_type_node);
6537 gimplify_assign (simtcond, c, stmt_list);
6538 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6539 1, simtcond);
6540 c = create_tmp_var (integer_type_node);
6541 gimple_call_set_lhs (g, c);
6542 gimple_seq_add_stmt (stmt_list, g);
6543 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6544 label_true, label);
6545 }
6546 else
6547 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6548 gimple_seq_add_stmt (stmt_list, stmt);
6549 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6550 }
6551
6552 tree cond_ptr = NULL_TREE;
6553 for (c = clauses; c ;)
6554 {
6555 tree var, new_var;
6556 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6557 gimple_seq *this_stmt_list = stmt_list;
6558 tree lab2 = NULL_TREE;
6559
6560 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6561 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6562 && ctx->lastprivate_conditional_map
6563 && !ctx->combined_into_simd_safelen1)
6564 {
6565 gcc_assert (body_p);
6566 if (simduid)
6567 goto next;
6568 if (cond_ptr == NULL_TREE)
6569 {
6570 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6571 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6572 }
6573 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6574 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6575 tree v = *ctx->lastprivate_conditional_map->get (o);
6576 gimplify_assign (v, build_zero_cst (type), body_p);
6577 this_stmt_list = cstmt_list;
6578 tree mem;
6579 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6580 {
6581 mem = build2 (MEM_REF, type, cond_ptr,
6582 build_int_cst (TREE_TYPE (cond_ptr),
6583 conditional_off));
6584 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6585 }
6586 else
6587 mem = build4 (ARRAY_REF, type, cond_ptr,
6588 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6589 tree mem2 = copy_node (mem);
6590 gimple_seq seq = NULL;
6591 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6592 gimple_seq_add_seq (this_stmt_list, seq);
6593 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6594 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6595 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6596 gimple_seq_add_stmt (this_stmt_list, g);
6597 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6598 gimplify_assign (mem2, v, this_stmt_list);
6599 }
6600 else if (predicate
6601 && ctx->combined_into_simd_safelen1
6602 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6603 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6604 && ctx->lastprivate_conditional_map)
6605 this_stmt_list = &post_stmt_list;
6606
6607 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6608 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6609 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6610 {
6611 var = OMP_CLAUSE_DECL (c);
6612 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6613 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6614 && is_taskloop_ctx (ctx))
6615 {
6616 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6617 new_var = lookup_decl (var, ctx->outer);
6618 }
6619 else
6620 {
6621 new_var = lookup_decl (var, ctx);
6622 /* Avoid uninitialized warnings for lastprivate and
6623 for linear iterators. */
6624 if (predicate
6625 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6626 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6627 TREE_NO_WARNING (new_var) = 1;
6628 }
6629
6630 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6631 {
6632 tree val = DECL_VALUE_EXPR (new_var);
6633 if (TREE_CODE (val) == ARRAY_REF
6634 && VAR_P (TREE_OPERAND (val, 0))
6635 && lookup_attribute ("omp simd array",
6636 DECL_ATTRIBUTES (TREE_OPERAND (val,
6637 0))))
6638 {
6639 if (lastlane == NULL)
6640 {
6641 lastlane = create_tmp_var (unsigned_type_node);
6642 gcall *g
6643 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6644 2, simduid,
6645 TREE_OPERAND (val, 1));
6646 gimple_call_set_lhs (g, lastlane);
6647 gimple_seq_add_stmt (this_stmt_list, g);
6648 }
6649 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6650 TREE_OPERAND (val, 0), lastlane,
6651 NULL_TREE, NULL_TREE);
6652 TREE_THIS_NOTRAP (new_var) = 1;
6653 }
6654 }
6655 else if (maybe_simt)
6656 {
6657 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6658 ? DECL_VALUE_EXPR (new_var)
6659 : new_var);
6660 if (simtlast == NULL)
6661 {
6662 simtlast = create_tmp_var (unsigned_type_node);
6663 gcall *g = gimple_build_call_internal
6664 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6665 gimple_call_set_lhs (g, simtlast);
6666 gimple_seq_add_stmt (this_stmt_list, g);
6667 }
6668 x = build_call_expr_internal_loc
6669 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6670 TREE_TYPE (val), 2, val, simtlast);
6671 new_var = unshare_expr (new_var);
6672 gimplify_assign (new_var, x, this_stmt_list);
6673 new_var = unshare_expr (new_var);
6674 }
6675
6676 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6677 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6678 {
6679 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6680 gimple_seq_add_seq (this_stmt_list,
6681 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6682 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6683 }
6684 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6685 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6686 {
6687 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6688 gimple_seq_add_seq (this_stmt_list,
6689 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6690 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6691 }
6692
6693 x = NULL_TREE;
6694 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6695 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6696 && is_taskloop_ctx (ctx))
6697 {
6698 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6699 ctx->outer->outer);
6700 if (is_global_var (ovar))
6701 x = ovar;
6702 }
6703 if (!x)
6704 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6705 if (omp_is_reference (var))
6706 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6707 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6708 gimplify_and_add (x, this_stmt_list);
6709
6710 if (lab2)
6711 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6712 }
6713
6714 next:
6715 c = OMP_CLAUSE_CHAIN (c);
6716 if (c == NULL && !par_clauses)
6717 {
6718 /* If this was a workshare clause, see if it had been combined
6719 with its parallel. In that case, continue looking for the
6720 clauses also on the parallel statement itself. */
6721 if (is_parallel_ctx (ctx))
6722 break;
6723
6724 ctx = ctx->outer;
6725 if (ctx == NULL || !is_parallel_ctx (ctx))
6726 break;
6727
6728 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6729 OMP_CLAUSE_LASTPRIVATE);
6730 par_clauses = true;
6731 }
6732 }
6733
6734 if (label)
6735 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6736 gimple_seq_add_seq (stmt_list, post_stmt_list);
6737 }
6738
6739 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6740 (which might be a placeholder). INNER is true if this is an inner
6741 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6742 join markers. Generate the before-loop forking sequence in
6743 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6744 general form of these sequences is
6745
6746 GOACC_REDUCTION_SETUP
6747 GOACC_FORK
6748 GOACC_REDUCTION_INIT
6749 ...
6750 GOACC_REDUCTION_FINI
6751 GOACC_JOIN
6752 GOACC_REDUCTION_TEARDOWN. */
6753
6754 static void
6755 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6756 gcall *fork, gcall *join, gimple_seq *fork_seq,
6757 gimple_seq *join_seq, omp_context *ctx)
6758 {
6759 gimple_seq before_fork = NULL;
6760 gimple_seq after_fork = NULL;
6761 gimple_seq before_join = NULL;
6762 gimple_seq after_join = NULL;
6763 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6764 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6765 unsigned offset = 0;
6766
6767 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6768 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6769 {
6770 tree orig = OMP_CLAUSE_DECL (c);
6771 tree var = maybe_lookup_decl (orig, ctx);
6772 tree ref_to_res = NULL_TREE;
6773 tree incoming, outgoing, v1, v2, v3;
6774 bool is_private = false;
6775
6776 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6777 if (rcode == MINUS_EXPR)
6778 rcode = PLUS_EXPR;
6779 else if (rcode == TRUTH_ANDIF_EXPR)
6780 rcode = BIT_AND_EXPR;
6781 else if (rcode == TRUTH_ORIF_EXPR)
6782 rcode = BIT_IOR_EXPR;
6783 tree op = build_int_cst (unsigned_type_node, rcode);
6784
6785 if (!var)
6786 var = orig;
6787
6788 incoming = outgoing = var;
6789
6790 if (!inner)
6791 {
6792 /* See if an outer construct also reduces this variable. */
6793 omp_context *outer = ctx;
6794
6795 while (omp_context *probe = outer->outer)
6796 {
6797 enum gimple_code type = gimple_code (probe->stmt);
6798 tree cls;
6799
6800 switch (type)
6801 {
6802 case GIMPLE_OMP_FOR:
6803 cls = gimple_omp_for_clauses (probe->stmt);
6804 break;
6805
6806 case GIMPLE_OMP_TARGET:
6807 if ((gimple_omp_target_kind (probe->stmt)
6808 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6809 && (gimple_omp_target_kind (probe->stmt)
6810 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6811 goto do_lookup;
6812
6813 cls = gimple_omp_target_clauses (probe->stmt);
6814 break;
6815
6816 default:
6817 goto do_lookup;
6818 }
6819
6820 outer = probe;
6821 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6822 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6823 && orig == OMP_CLAUSE_DECL (cls))
6824 {
6825 incoming = outgoing = lookup_decl (orig, probe);
6826 goto has_outer_reduction;
6827 }
6828 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6829 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6830 && orig == OMP_CLAUSE_DECL (cls))
6831 {
6832 is_private = true;
6833 goto do_lookup;
6834 }
6835 }
6836
6837 do_lookup:
6838 /* This is the outermost construct with this reduction,
6839 see if there's a mapping for it. */
6840 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6841 && maybe_lookup_field (orig, outer) && !is_private)
6842 {
6843 ref_to_res = build_receiver_ref (orig, false, outer);
6844 if (omp_is_reference (orig))
6845 ref_to_res = build_simple_mem_ref (ref_to_res);
6846
6847 tree type = TREE_TYPE (var);
6848 if (POINTER_TYPE_P (type))
6849 type = TREE_TYPE (type);
6850
6851 outgoing = var;
6852 incoming = omp_reduction_init_op (loc, rcode, type);
6853 }
6854 else
6855 {
6856 /* Try to look at enclosing contexts for reduction var,
6857 use original if no mapping found. */
6858 tree t = NULL_TREE;
6859 omp_context *c = ctx->outer;
6860 while (c && !t)
6861 {
6862 t = maybe_lookup_decl (orig, c);
6863 c = c->outer;
6864 }
6865 incoming = outgoing = (t ? t : orig);
6866 }
6867
6868 has_outer_reduction:;
6869 }
6870
6871 if (!ref_to_res)
6872 ref_to_res = integer_zero_node;
6873
6874 if (omp_is_reference (orig))
6875 {
6876 tree type = TREE_TYPE (var);
6877 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6878
6879 if (!inner)
6880 {
6881 tree x = create_tmp_var (TREE_TYPE (type), id);
6882 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6883 }
6884
6885 v1 = create_tmp_var (type, id);
6886 v2 = create_tmp_var (type, id);
6887 v3 = create_tmp_var (type, id);
6888
6889 gimplify_assign (v1, var, fork_seq);
6890 gimplify_assign (v2, var, fork_seq);
6891 gimplify_assign (v3, var, fork_seq);
6892
6893 var = build_simple_mem_ref (var);
6894 v1 = build_simple_mem_ref (v1);
6895 v2 = build_simple_mem_ref (v2);
6896 v3 = build_simple_mem_ref (v3);
6897 outgoing = build_simple_mem_ref (outgoing);
6898
6899 if (!TREE_CONSTANT (incoming))
6900 incoming = build_simple_mem_ref (incoming);
6901 }
6902 else
6903 v1 = v2 = v3 = var;
6904
6905 /* Determine position in reduction buffer, which may be used
6906 by target. The parser has ensured that this is not a
6907 variable-sized type. */
6908 fixed_size_mode mode
6909 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6910 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6911 offset = (offset + align - 1) & ~(align - 1);
6912 tree off = build_int_cst (sizetype, offset);
6913 offset += GET_MODE_SIZE (mode);
6914
6915 if (!init_code)
6916 {
6917 init_code = build_int_cst (integer_type_node,
6918 IFN_GOACC_REDUCTION_INIT);
6919 fini_code = build_int_cst (integer_type_node,
6920 IFN_GOACC_REDUCTION_FINI);
6921 setup_code = build_int_cst (integer_type_node,
6922 IFN_GOACC_REDUCTION_SETUP);
6923 teardown_code = build_int_cst (integer_type_node,
6924 IFN_GOACC_REDUCTION_TEARDOWN);
6925 }
6926
6927 tree setup_call
6928 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6929 TREE_TYPE (var), 6, setup_code,
6930 unshare_expr (ref_to_res),
6931 incoming, level, op, off);
6932 tree init_call
6933 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6934 TREE_TYPE (var), 6, init_code,
6935 unshare_expr (ref_to_res),
6936 v1, level, op, off);
6937 tree fini_call
6938 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6939 TREE_TYPE (var), 6, fini_code,
6940 unshare_expr (ref_to_res),
6941 v2, level, op, off);
6942 tree teardown_call
6943 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6944 TREE_TYPE (var), 6, teardown_code,
6945 ref_to_res, v3, level, op, off);
6946
6947 gimplify_assign (v1, setup_call, &before_fork);
6948 gimplify_assign (v2, init_call, &after_fork);
6949 gimplify_assign (v3, fini_call, &before_join);
6950 gimplify_assign (outgoing, teardown_call, &after_join);
6951 }
6952
6953 /* Now stitch things together. */
6954 gimple_seq_add_seq (fork_seq, before_fork);
6955 if (fork)
6956 gimple_seq_add_stmt (fork_seq, fork);
6957 gimple_seq_add_seq (fork_seq, after_fork);
6958
6959 gimple_seq_add_seq (join_seq, before_join);
6960 if (join)
6961 gimple_seq_add_stmt (join_seq, join);
6962 gimple_seq_add_seq (join_seq, after_join);
6963 }
6964
6965 /* Generate code to implement the REDUCTION clauses, append it
6966 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6967 that should be emitted also inside of the critical section,
6968 in that case clear *CLIST afterwards, otherwise leave it as is
6969 and let the caller emit it itself. */
6970
6971 static void
6972 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6973 gimple_seq *clist, omp_context *ctx)
6974 {
6975 gimple_seq sub_seq = NULL;
6976 gimple *stmt;
6977 tree x, c;
6978 int count = 0;
6979
6980 /* OpenACC loop reductions are handled elsewhere. */
6981 if (is_gimple_omp_oacc (ctx->stmt))
6982 return;
6983
6984 /* SIMD reductions are handled in lower_rec_input_clauses. */
6985 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6986 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6987 return;
6988
6989 /* inscan reductions are handled elsewhere. */
6990 if (ctx->scan_inclusive || ctx->scan_exclusive)
6991 return;
6992
6993 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6994 update in that case, otherwise use a lock. */
6995 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6996 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6997 && !OMP_CLAUSE_REDUCTION_TASK (c))
6998 {
6999 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7000 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7001 {
7002 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7003 count = -1;
7004 break;
7005 }
7006 count++;
7007 }
7008
7009 if (count == 0)
7010 return;
7011
7012 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7013 {
7014 tree var, ref, new_var, orig_var;
7015 enum tree_code code;
7016 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7017
7018 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7019 || OMP_CLAUSE_REDUCTION_TASK (c))
7020 continue;
7021
7022 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7023 orig_var = var = OMP_CLAUSE_DECL (c);
7024 if (TREE_CODE (var) == MEM_REF)
7025 {
7026 var = TREE_OPERAND (var, 0);
7027 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7028 var = TREE_OPERAND (var, 0);
7029 if (TREE_CODE (var) == ADDR_EXPR)
7030 var = TREE_OPERAND (var, 0);
7031 else
7032 {
7033 /* If this is a pointer or referenced based array
7034 section, the var could be private in the outer
7035 context e.g. on orphaned loop construct. Pretend this
7036 is private variable's outer reference. */
7037 ccode = OMP_CLAUSE_PRIVATE;
7038 if (TREE_CODE (var) == INDIRECT_REF)
7039 var = TREE_OPERAND (var, 0);
7040 }
7041 orig_var = var;
7042 if (is_variable_sized (var))
7043 {
7044 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7045 var = DECL_VALUE_EXPR (var);
7046 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7047 var = TREE_OPERAND (var, 0);
7048 gcc_assert (DECL_P (var));
7049 }
7050 }
7051 new_var = lookup_decl (var, ctx);
7052 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7053 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7054 ref = build_outer_var_ref (var, ctx, ccode);
7055 code = OMP_CLAUSE_REDUCTION_CODE (c);
7056
7057 /* reduction(-:var) sums up the partial results, so it acts
7058 identically to reduction(+:var). */
7059 if (code == MINUS_EXPR)
7060 code = PLUS_EXPR;
7061
7062 if (count == 1)
7063 {
7064 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7065
7066 addr = save_expr (addr);
7067 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7068 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
7069 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7070 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7071 gimplify_and_add (x, stmt_seqp);
7072 return;
7073 }
7074 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7075 {
7076 tree d = OMP_CLAUSE_DECL (c);
7077 tree type = TREE_TYPE (d);
7078 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7079 tree i = create_tmp_var (TREE_TYPE (v));
7080 tree ptype = build_pointer_type (TREE_TYPE (type));
7081 tree bias = TREE_OPERAND (d, 1);
7082 d = TREE_OPERAND (d, 0);
7083 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7084 {
7085 tree b = TREE_OPERAND (d, 1);
7086 b = maybe_lookup_decl (b, ctx);
7087 if (b == NULL)
7088 {
7089 b = TREE_OPERAND (d, 1);
7090 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7091 }
7092 if (integer_zerop (bias))
7093 bias = b;
7094 else
7095 {
7096 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7097 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7098 TREE_TYPE (b), b, bias);
7099 }
7100 d = TREE_OPERAND (d, 0);
7101 }
7102 /* For ref build_outer_var_ref already performs this, so
7103 only new_var needs a dereference. */
7104 if (TREE_CODE (d) == INDIRECT_REF)
7105 {
7106 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7107 gcc_assert (omp_is_reference (var) && var == orig_var);
7108 }
7109 else if (TREE_CODE (d) == ADDR_EXPR)
7110 {
7111 if (orig_var == var)
7112 {
7113 new_var = build_fold_addr_expr (new_var);
7114 ref = build_fold_addr_expr (ref);
7115 }
7116 }
7117 else
7118 {
7119 gcc_assert (orig_var == var);
7120 if (omp_is_reference (var))
7121 ref = build_fold_addr_expr (ref);
7122 }
7123 if (DECL_P (v))
7124 {
7125 tree t = maybe_lookup_decl (v, ctx);
7126 if (t)
7127 v = t;
7128 else
7129 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7130 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7131 }
7132 if (!integer_zerop (bias))
7133 {
7134 bias = fold_convert_loc (clause_loc, sizetype, bias);
7135 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7136 TREE_TYPE (new_var), new_var,
7137 unshare_expr (bias));
7138 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7139 TREE_TYPE (ref), ref, bias);
7140 }
7141 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7142 ref = fold_convert_loc (clause_loc, ptype, ref);
7143 tree m = create_tmp_var (ptype);
7144 gimplify_assign (m, new_var, stmt_seqp);
7145 new_var = m;
7146 m = create_tmp_var (ptype);
7147 gimplify_assign (m, ref, stmt_seqp);
7148 ref = m;
7149 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7150 tree body = create_artificial_label (UNKNOWN_LOCATION);
7151 tree end = create_artificial_label (UNKNOWN_LOCATION);
7152 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7153 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7154 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7155 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7156 {
7157 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7158 tree decl_placeholder
7159 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7160 SET_DECL_VALUE_EXPR (placeholder, out);
7161 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7162 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7163 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7164 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7165 gimple_seq_add_seq (&sub_seq,
7166 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7167 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7168 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7169 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7170 }
7171 else
7172 {
7173 x = build2 (code, TREE_TYPE (out), out, priv);
7174 out = unshare_expr (out);
7175 gimplify_assign (out, x, &sub_seq);
7176 }
7177 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7178 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7179 gimple_seq_add_stmt (&sub_seq, g);
7180 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7181 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7182 gimple_seq_add_stmt (&sub_seq, g);
7183 g = gimple_build_assign (i, PLUS_EXPR, i,
7184 build_int_cst (TREE_TYPE (i), 1));
7185 gimple_seq_add_stmt (&sub_seq, g);
7186 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7187 gimple_seq_add_stmt (&sub_seq, g);
7188 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7189 }
7190 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7191 {
7192 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7193
7194 if (omp_is_reference (var)
7195 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7196 TREE_TYPE (ref)))
7197 ref = build_fold_addr_expr_loc (clause_loc, ref);
7198 SET_DECL_VALUE_EXPR (placeholder, ref);
7199 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7200 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7201 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7202 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7203 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7204 }
7205 else
7206 {
7207 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7208 ref = build_outer_var_ref (var, ctx);
7209 gimplify_assign (ref, x, &sub_seq);
7210 }
7211 }
7212
7213 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7214 0);
7215 gimple_seq_add_stmt (stmt_seqp, stmt);
7216
7217 gimple_seq_add_seq (stmt_seqp, sub_seq);
7218
7219 if (clist)
7220 {
7221 gimple_seq_add_seq (stmt_seqp, *clist);
7222 *clist = NULL;
7223 }
7224
7225 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7226 0);
7227 gimple_seq_add_stmt (stmt_seqp, stmt);
7228 }
7229
7230
7231 /* Generate code to implement the COPYPRIVATE clauses. */
7232
7233 static void
7234 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7235 omp_context *ctx)
7236 {
7237 tree c;
7238
7239 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7240 {
7241 tree var, new_var, ref, x;
7242 bool by_ref;
7243 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7244
7245 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7246 continue;
7247
7248 var = OMP_CLAUSE_DECL (c);
7249 by_ref = use_pointer_for_field (var, NULL);
7250
7251 ref = build_sender_ref (var, ctx);
7252 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7253 if (by_ref)
7254 {
7255 x = build_fold_addr_expr_loc (clause_loc, new_var);
7256 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7257 }
7258 gimplify_assign (ref, x, slist);
7259
7260 ref = build_receiver_ref (var, false, ctx);
7261 if (by_ref)
7262 {
7263 ref = fold_convert_loc (clause_loc,
7264 build_pointer_type (TREE_TYPE (new_var)),
7265 ref);
7266 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7267 }
7268 if (omp_is_reference (var))
7269 {
7270 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7271 ref = build_simple_mem_ref_loc (clause_loc, ref);
7272 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7273 }
7274 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7275 gimplify_and_add (x, rlist);
7276 }
7277 }
7278
7279
7280 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7281 and REDUCTION from the sender (aka parent) side. */
7282
7283 static void
7284 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7285 omp_context *ctx)
7286 {
7287 tree c, t;
7288 int ignored_looptemp = 0;
7289 bool is_taskloop = false;
7290
7291 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7292 by GOMP_taskloop. */
7293 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7294 {
7295 ignored_looptemp = 2;
7296 is_taskloop = true;
7297 }
7298
7299 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7300 {
7301 tree val, ref, x, var;
7302 bool by_ref, do_in = false, do_out = false;
7303 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7304
7305 switch (OMP_CLAUSE_CODE (c))
7306 {
7307 case OMP_CLAUSE_PRIVATE:
7308 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7309 break;
7310 continue;
7311 case OMP_CLAUSE_FIRSTPRIVATE:
7312 case OMP_CLAUSE_COPYIN:
7313 case OMP_CLAUSE_LASTPRIVATE:
7314 case OMP_CLAUSE_IN_REDUCTION:
7315 case OMP_CLAUSE__REDUCTEMP_:
7316 break;
7317 case OMP_CLAUSE_REDUCTION:
7318 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7319 continue;
7320 break;
7321 case OMP_CLAUSE_SHARED:
7322 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7323 break;
7324 continue;
7325 case OMP_CLAUSE__LOOPTEMP_:
7326 if (ignored_looptemp)
7327 {
7328 ignored_looptemp--;
7329 continue;
7330 }
7331 break;
7332 default:
7333 continue;
7334 }
7335
7336 val = OMP_CLAUSE_DECL (c);
7337 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7338 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7339 && TREE_CODE (val) == MEM_REF)
7340 {
7341 val = TREE_OPERAND (val, 0);
7342 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7343 val = TREE_OPERAND (val, 0);
7344 if (TREE_CODE (val) == INDIRECT_REF
7345 || TREE_CODE (val) == ADDR_EXPR)
7346 val = TREE_OPERAND (val, 0);
7347 if (is_variable_sized (val))
7348 continue;
7349 }
7350
7351 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7352 outer taskloop region. */
7353 omp_context *ctx_for_o = ctx;
7354 if (is_taskloop
7355 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7356 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7357 ctx_for_o = ctx->outer;
7358
7359 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7360
7361 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7362 && is_global_var (var)
7363 && (val == OMP_CLAUSE_DECL (c)
7364 || !is_task_ctx (ctx)
7365 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7366 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7367 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7368 != POINTER_TYPE)))))
7369 continue;
7370
7371 t = omp_member_access_dummy_var (var);
7372 if (t)
7373 {
7374 var = DECL_VALUE_EXPR (var);
7375 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7376 if (o != t)
7377 var = unshare_and_remap (var, t, o);
7378 else
7379 var = unshare_expr (var);
7380 }
7381
7382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7383 {
7384 /* Handle taskloop firstprivate/lastprivate, where the
7385 lastprivate on GIMPLE_OMP_TASK is represented as
7386 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7387 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7388 x = omp_build_component_ref (ctx->sender_decl, f);
7389 if (use_pointer_for_field (val, ctx))
7390 var = build_fold_addr_expr (var);
7391 gimplify_assign (x, var, ilist);
7392 DECL_ABSTRACT_ORIGIN (f) = NULL;
7393 continue;
7394 }
7395
7396 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7397 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7398 || val == OMP_CLAUSE_DECL (c))
7399 && is_variable_sized (val))
7400 continue;
7401 by_ref = use_pointer_for_field (val, NULL);
7402
7403 switch (OMP_CLAUSE_CODE (c))
7404 {
7405 case OMP_CLAUSE_FIRSTPRIVATE:
7406 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7407 && !by_ref
7408 && is_task_ctx (ctx))
7409 TREE_NO_WARNING (var) = 1;
7410 do_in = true;
7411 break;
7412
7413 case OMP_CLAUSE_PRIVATE:
7414 case OMP_CLAUSE_COPYIN:
7415 case OMP_CLAUSE__LOOPTEMP_:
7416 case OMP_CLAUSE__REDUCTEMP_:
7417 do_in = true;
7418 break;
7419
7420 case OMP_CLAUSE_LASTPRIVATE:
7421 if (by_ref || omp_is_reference (val))
7422 {
7423 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7424 continue;
7425 do_in = true;
7426 }
7427 else
7428 {
7429 do_out = true;
7430 if (lang_hooks.decls.omp_private_outer_ref (val))
7431 do_in = true;
7432 }
7433 break;
7434
7435 case OMP_CLAUSE_REDUCTION:
7436 case OMP_CLAUSE_IN_REDUCTION:
7437 do_in = true;
7438 if (val == OMP_CLAUSE_DECL (c))
7439 {
7440 if (is_task_ctx (ctx))
7441 by_ref = use_pointer_for_field (val, ctx);
7442 else
7443 do_out = !(by_ref || omp_is_reference (val));
7444 }
7445 else
7446 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7447 break;
7448
7449 default:
7450 gcc_unreachable ();
7451 }
7452
7453 if (do_in)
7454 {
7455 ref = build_sender_ref (val, ctx);
7456 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7457 gimplify_assign (ref, x, ilist);
7458 if (is_task_ctx (ctx))
7459 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7460 }
7461
7462 if (do_out)
7463 {
7464 ref = build_sender_ref (val, ctx);
7465 gimplify_assign (var, ref, olist);
7466 }
7467 }
7468 }
7469
7470 /* Generate code to implement SHARED from the sender (aka parent)
7471 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7472 list things that got automatically shared. */
7473
7474 static void
7475 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7476 {
7477 tree var, ovar, nvar, t, f, x, record_type;
7478
7479 if (ctx->record_type == NULL)
7480 return;
7481
7482 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7483 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7484 {
7485 ovar = DECL_ABSTRACT_ORIGIN (f);
7486 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7487 continue;
7488
7489 nvar = maybe_lookup_decl (ovar, ctx);
7490 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7491 continue;
7492
7493 /* If CTX is a nested parallel directive. Find the immediately
7494 enclosing parallel or workshare construct that contains a
7495 mapping for OVAR. */
7496 var = lookup_decl_in_outer_ctx (ovar, ctx);
7497
7498 t = omp_member_access_dummy_var (var);
7499 if (t)
7500 {
7501 var = DECL_VALUE_EXPR (var);
7502 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7503 if (o != t)
7504 var = unshare_and_remap (var, t, o);
7505 else
7506 var = unshare_expr (var);
7507 }
7508
7509 if (use_pointer_for_field (ovar, ctx))
7510 {
7511 x = build_sender_ref (ovar, ctx);
7512 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7513 && TREE_TYPE (f) == TREE_TYPE (ovar))
7514 {
7515 gcc_assert (is_parallel_ctx (ctx)
7516 && DECL_ARTIFICIAL (ovar));
7517 /* _condtemp_ clause. */
7518 var = build_constructor (TREE_TYPE (x), NULL);
7519 }
7520 else
7521 var = build_fold_addr_expr (var);
7522 gimplify_assign (x, var, ilist);
7523 }
7524 else
7525 {
7526 x = build_sender_ref (ovar, ctx);
7527 gimplify_assign (x, var, ilist);
7528
7529 if (!TREE_READONLY (var)
7530 /* We don't need to receive a new reference to a result
7531 or parm decl. In fact we may not store to it as we will
7532 invalidate any pending RSO and generate wrong gimple
7533 during inlining. */
7534 && !((TREE_CODE (var) == RESULT_DECL
7535 || TREE_CODE (var) == PARM_DECL)
7536 && DECL_BY_REFERENCE (var)))
7537 {
7538 x = build_sender_ref (ovar, ctx);
7539 gimplify_assign (var, x, olist);
7540 }
7541 }
7542 }
7543 }
7544
7545 /* Emit an OpenACC head marker call, encapulating the partitioning and
7546 other information that must be processed by the target compiler.
7547 Return the maximum number of dimensions the associated loop might
7548 be partitioned over. */
7549
7550 static unsigned
7551 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7552 gimple_seq *seq, omp_context *ctx)
7553 {
7554 unsigned levels = 0;
7555 unsigned tag = 0;
7556 tree gang_static = NULL_TREE;
7557 auto_vec<tree, 5> args;
7558
7559 args.quick_push (build_int_cst
7560 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7561 args.quick_push (ddvar);
7562 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7563 {
7564 switch (OMP_CLAUSE_CODE (c))
7565 {
7566 case OMP_CLAUSE_GANG:
7567 tag |= OLF_DIM_GANG;
7568 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7569 /* static:* is represented by -1, and we can ignore it, as
7570 scheduling is always static. */
7571 if (gang_static && integer_minus_onep (gang_static))
7572 gang_static = NULL_TREE;
7573 levels++;
7574 break;
7575
7576 case OMP_CLAUSE_WORKER:
7577 tag |= OLF_DIM_WORKER;
7578 levels++;
7579 break;
7580
7581 case OMP_CLAUSE_VECTOR:
7582 tag |= OLF_DIM_VECTOR;
7583 levels++;
7584 break;
7585
7586 case OMP_CLAUSE_SEQ:
7587 tag |= OLF_SEQ;
7588 break;
7589
7590 case OMP_CLAUSE_AUTO:
7591 tag |= OLF_AUTO;
7592 break;
7593
7594 case OMP_CLAUSE_INDEPENDENT:
7595 tag |= OLF_INDEPENDENT;
7596 break;
7597
7598 case OMP_CLAUSE_TILE:
7599 tag |= OLF_TILE;
7600 break;
7601
7602 default:
7603 continue;
7604 }
7605 }
7606
7607 if (gang_static)
7608 {
7609 if (DECL_P (gang_static))
7610 gang_static = build_outer_var_ref (gang_static, ctx);
7611 tag |= OLF_GANG_STATIC;
7612 }
7613
7614 /* In a parallel region, loops are implicitly INDEPENDENT. */
7615 omp_context *tgt = enclosing_target_ctx (ctx);
7616 if (!tgt || is_oacc_parallel_or_serial (tgt))
7617 tag |= OLF_INDEPENDENT;
7618
7619 if (tag & OLF_TILE)
7620 /* Tiling could use all 3 levels. */
7621 levels = 3;
7622 else
7623 {
7624 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7625 Ensure at least one level, or 2 for possible auto
7626 partitioning */
7627 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7628 << OLF_DIM_BASE) | OLF_SEQ));
7629
7630 if (levels < 1u + maybe_auto)
7631 levels = 1u + maybe_auto;
7632 }
7633
7634 args.quick_push (build_int_cst (integer_type_node, levels));
7635 args.quick_push (build_int_cst (integer_type_node, tag));
7636 if (gang_static)
7637 args.quick_push (gang_static);
7638
7639 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7640 gimple_set_location (call, loc);
7641 gimple_set_lhs (call, ddvar);
7642 gimple_seq_add_stmt (seq, call);
7643
7644 return levels;
7645 }
7646
7647 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7648 partitioning level of the enclosed region. */
7649
7650 static void
7651 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7652 tree tofollow, gimple_seq *seq)
7653 {
7654 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7655 : IFN_UNIQUE_OACC_TAIL_MARK);
7656 tree marker = build_int_cst (integer_type_node, marker_kind);
7657 int nargs = 2 + (tofollow != NULL_TREE);
7658 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7659 marker, ddvar, tofollow);
7660 gimple_set_location (call, loc);
7661 gimple_set_lhs (call, ddvar);
7662 gimple_seq_add_stmt (seq, call);
7663 }
7664
7665 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7666 the loop clauses, from which we extract reductions. Initialize
7667 HEAD and TAIL. */
7668
7669 static void
7670 lower_oacc_head_tail (location_t loc, tree clauses,
7671 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7672 {
7673 bool inner = false;
7674 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7675 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7676
7677 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7678 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7679 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7680
7681 gcc_assert (count);
7682 for (unsigned done = 1; count; count--, done++)
7683 {
7684 gimple_seq fork_seq = NULL;
7685 gimple_seq join_seq = NULL;
7686
7687 tree place = build_int_cst (integer_type_node, -1);
7688 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7689 fork_kind, ddvar, place);
7690 gimple_set_location (fork, loc);
7691 gimple_set_lhs (fork, ddvar);
7692
7693 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7694 join_kind, ddvar, place);
7695 gimple_set_location (join, loc);
7696 gimple_set_lhs (join, ddvar);
7697
7698 /* Mark the beginning of this level sequence. */
7699 if (inner)
7700 lower_oacc_loop_marker (loc, ddvar, true,
7701 build_int_cst (integer_type_node, count),
7702 &fork_seq);
7703 lower_oacc_loop_marker (loc, ddvar, false,
7704 build_int_cst (integer_type_node, done),
7705 &join_seq);
7706
7707 lower_oacc_reductions (loc, clauses, place, inner,
7708 fork, join, &fork_seq, &join_seq, ctx);
7709
7710 /* Append this level to head. */
7711 gimple_seq_add_seq (head, fork_seq);
7712 /* Prepend it to tail. */
7713 gimple_seq_add_seq (&join_seq, *tail);
7714 *tail = join_seq;
7715
7716 inner = true;
7717 }
7718
7719 /* Mark the end of the sequence. */
7720 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7721 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7722 }
7723
7724 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7725 catch handler and return it. This prevents programs from violating the
7726 structured block semantics with throws. */
7727
7728 static gimple_seq
7729 maybe_catch_exception (gimple_seq body)
7730 {
7731 gimple *g;
7732 tree decl;
7733
7734 if (!flag_exceptions)
7735 return body;
7736
7737 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7738 decl = lang_hooks.eh_protect_cleanup_actions ();
7739 else
7740 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7741
7742 g = gimple_build_eh_must_not_throw (decl);
7743 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7744 GIMPLE_TRY_CATCH);
7745
7746 return gimple_seq_alloc_with_stmt (g);
7747 }
7748
7749 \f
7750 /* Routines to lower OMP directives into OMP-GIMPLE. */
7751
7752 /* If ctx is a worksharing context inside of a cancellable parallel
7753 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7754 and conditional branch to parallel's cancel_label to handle
7755 cancellation in the implicit barrier. */
7756
7757 static void
7758 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7759 gimple_seq *body)
7760 {
7761 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7762 if (gimple_omp_return_nowait_p (omp_return))
7763 return;
7764 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7765 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7766 && outer->cancellable)
7767 {
7768 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7769 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7770 tree lhs = create_tmp_var (c_bool_type);
7771 gimple_omp_return_set_lhs (omp_return, lhs);
7772 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7773 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7774 fold_convert (c_bool_type,
7775 boolean_false_node),
7776 outer->cancel_label, fallthru_label);
7777 gimple_seq_add_stmt (body, g);
7778 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7779 }
7780 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7781 return;
7782 }
7783
7784 /* Find the first task_reduction or reduction clause or return NULL
7785 if there are none. */
7786
7787 static inline tree
7788 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7789 enum omp_clause_code ccode)
7790 {
7791 while (1)
7792 {
7793 clauses = omp_find_clause (clauses, ccode);
7794 if (clauses == NULL_TREE)
7795 return NULL_TREE;
7796 if (ccode != OMP_CLAUSE_REDUCTION
7797 || code == OMP_TASKLOOP
7798 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7799 return clauses;
7800 clauses = OMP_CLAUSE_CHAIN (clauses);
7801 }
7802 }
7803
7804 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7805 gimple_seq *, gimple_seq *);
7806
7807 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7808 CTX is the enclosing OMP context for the current statement. */
7809
7810 static void
7811 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7812 {
7813 tree block, control;
7814 gimple_stmt_iterator tgsi;
7815 gomp_sections *stmt;
7816 gimple *t;
7817 gbind *new_stmt, *bind;
7818 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7819
7820 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7821
7822 push_gimplify_context ();
7823
7824 dlist = NULL;
7825 ilist = NULL;
7826
7827 tree rclauses
7828 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7829 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7830 tree rtmp = NULL_TREE;
7831 if (rclauses)
7832 {
7833 tree type = build_pointer_type (pointer_sized_int_node);
7834 tree temp = create_tmp_var (type);
7835 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7836 OMP_CLAUSE_DECL (c) = temp;
7837 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7838 gimple_omp_sections_set_clauses (stmt, c);
7839 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7840 gimple_omp_sections_clauses (stmt),
7841 &ilist, &tred_dlist);
7842 rclauses = c;
7843 rtmp = make_ssa_name (type);
7844 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7845 }
7846
7847 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7848 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7849
7850 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7851 &ilist, &dlist, ctx, NULL);
7852
7853 control = create_tmp_var (unsigned_type_node, ".section");
7854 gimple_omp_sections_set_control (stmt, control);
7855
7856 new_body = gimple_omp_body (stmt);
7857 gimple_omp_set_body (stmt, NULL);
7858 tgsi = gsi_start (new_body);
7859 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7860 {
7861 omp_context *sctx;
7862 gimple *sec_start;
7863
7864 sec_start = gsi_stmt (tgsi);
7865 sctx = maybe_lookup_ctx (sec_start);
7866 gcc_assert (sctx);
7867
7868 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7869 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7870 GSI_CONTINUE_LINKING);
7871 gimple_omp_set_body (sec_start, NULL);
7872
7873 if (gsi_one_before_end_p (tgsi))
7874 {
7875 gimple_seq l = NULL;
7876 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7877 &ilist, &l, &clist, ctx);
7878 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7879 gimple_omp_section_set_last (sec_start);
7880 }
7881
7882 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7883 GSI_CONTINUE_LINKING);
7884 }
7885
7886 block = make_node (BLOCK);
7887 bind = gimple_build_bind (NULL, new_body, block);
7888
7889 olist = NULL;
7890 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7891 &clist, ctx);
7892 if (clist)
7893 {
7894 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7895 gcall *g = gimple_build_call (fndecl, 0);
7896 gimple_seq_add_stmt (&olist, g);
7897 gimple_seq_add_seq (&olist, clist);
7898 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7899 g = gimple_build_call (fndecl, 0);
7900 gimple_seq_add_stmt (&olist, g);
7901 }
7902
7903 block = make_node (BLOCK);
7904 new_stmt = gimple_build_bind (NULL, NULL, block);
7905 gsi_replace (gsi_p, new_stmt, true);
7906
7907 pop_gimplify_context (new_stmt);
7908 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7909 BLOCK_VARS (block) = gimple_bind_vars (bind);
7910 if (BLOCK_VARS (block))
7911 TREE_USED (block) = 1;
7912
7913 new_body = NULL;
7914 gimple_seq_add_seq (&new_body, ilist);
7915 gimple_seq_add_stmt (&new_body, stmt);
7916 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7917 gimple_seq_add_stmt (&new_body, bind);
7918
7919 t = gimple_build_omp_continue (control, control);
7920 gimple_seq_add_stmt (&new_body, t);
7921
7922 gimple_seq_add_seq (&new_body, olist);
7923 if (ctx->cancellable)
7924 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7925 gimple_seq_add_seq (&new_body, dlist);
7926
7927 new_body = maybe_catch_exception (new_body);
7928
7929 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7930 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7931 t = gimple_build_omp_return (nowait);
7932 gimple_seq_add_stmt (&new_body, t);
7933 gimple_seq_add_seq (&new_body, tred_dlist);
7934 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7935
7936 if (rclauses)
7937 OMP_CLAUSE_DECL (rclauses) = rtmp;
7938
7939 gimple_bind_set_body (new_stmt, new_body);
7940 }
7941
7942
7943 /* A subroutine of lower_omp_single. Expand the simple form of
7944 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7945
7946 if (GOMP_single_start ())
7947 BODY;
7948 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7949
7950 FIXME. It may be better to delay expanding the logic of this until
7951 pass_expand_omp. The expanded logic may make the job more difficult
7952 to a synchronization analysis pass. */
7953
7954 static void
7955 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7956 {
7957 location_t loc = gimple_location (single_stmt);
7958 tree tlabel = create_artificial_label (loc);
7959 tree flabel = create_artificial_label (loc);
7960 gimple *call, *cond;
7961 tree lhs, decl;
7962
7963 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7964 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7965 call = gimple_build_call (decl, 0);
7966 gimple_call_set_lhs (call, lhs);
7967 gimple_seq_add_stmt (pre_p, call);
7968
7969 cond = gimple_build_cond (EQ_EXPR, lhs,
7970 fold_convert_loc (loc, TREE_TYPE (lhs),
7971 boolean_true_node),
7972 tlabel, flabel);
7973 gimple_seq_add_stmt (pre_p, cond);
7974 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7975 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7976 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7977 }
7978
7979
7980 /* A subroutine of lower_omp_single. Expand the simple form of
7981 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7982
7983 #pragma omp single copyprivate (a, b, c)
7984
7985 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7986
7987 {
7988 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7989 {
7990 BODY;
7991 copyout.a = a;
7992 copyout.b = b;
7993 copyout.c = c;
7994 GOMP_single_copy_end (&copyout);
7995 }
7996 else
7997 {
7998 a = copyout_p->a;
7999 b = copyout_p->b;
8000 c = copyout_p->c;
8001 }
8002 GOMP_barrier ();
8003 }
8004
8005 FIXME. It may be better to delay expanding the logic of this until
8006 pass_expand_omp. The expanded logic may make the job more difficult
8007 to a synchronization analysis pass. */
8008
8009 static void
8010 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8011 omp_context *ctx)
8012 {
8013 tree ptr_type, t, l0, l1, l2, bfn_decl;
8014 gimple_seq copyin_seq;
8015 location_t loc = gimple_location (single_stmt);
8016
8017 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8018
8019 ptr_type = build_pointer_type (ctx->record_type);
8020 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8021
8022 l0 = create_artificial_label (loc);
8023 l1 = create_artificial_label (loc);
8024 l2 = create_artificial_label (loc);
8025
8026 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8027 t = build_call_expr_loc (loc, bfn_decl, 0);
8028 t = fold_convert_loc (loc, ptr_type, t);
8029 gimplify_assign (ctx->receiver_decl, t, pre_p);
8030
8031 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8032 build_int_cst (ptr_type, 0));
8033 t = build3 (COND_EXPR, void_type_node, t,
8034 build_and_jump (&l0), build_and_jump (&l1));
8035 gimplify_and_add (t, pre_p);
8036
8037 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8038
8039 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8040
8041 copyin_seq = NULL;
8042 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8043 &copyin_seq, ctx);
8044
8045 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8046 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8047 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8048 gimplify_and_add (t, pre_p);
8049
8050 t = build_and_jump (&l2);
8051 gimplify_and_add (t, pre_p);
8052
8053 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8054
8055 gimple_seq_add_seq (pre_p, copyin_seq);
8056
8057 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8058 }
8059
8060
8061 /* Expand code for an OpenMP single directive. */
8062
8063 static void
8064 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8065 {
8066 tree block;
8067 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8068 gbind *bind;
8069 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8070
8071 push_gimplify_context ();
8072
8073 block = make_node (BLOCK);
8074 bind = gimple_build_bind (NULL, NULL, block);
8075 gsi_replace (gsi_p, bind, true);
8076 bind_body = NULL;
8077 dlist = NULL;
8078 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8079 &bind_body, &dlist, ctx, NULL);
8080 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8081
8082 gimple_seq_add_stmt (&bind_body, single_stmt);
8083
8084 if (ctx->record_type)
8085 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8086 else
8087 lower_omp_single_simple (single_stmt, &bind_body);
8088
8089 gimple_omp_set_body (single_stmt, NULL);
8090
8091 gimple_seq_add_seq (&bind_body, dlist);
8092
8093 bind_body = maybe_catch_exception (bind_body);
8094
8095 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8096 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8097 gimple *g = gimple_build_omp_return (nowait);
8098 gimple_seq_add_stmt (&bind_body_tail, g);
8099 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8100 if (ctx->record_type)
8101 {
8102 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8103 tree clobber = build_clobber (ctx->record_type);
8104 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8105 clobber), GSI_SAME_STMT);
8106 }
8107 gimple_seq_add_seq (&bind_body, bind_body_tail);
8108 gimple_bind_set_body (bind, bind_body);
8109
8110 pop_gimplify_context (bind);
8111
8112 gimple_bind_append_vars (bind, ctx->block_vars);
8113 BLOCK_VARS (block) = ctx->block_vars;
8114 if (BLOCK_VARS (block))
8115 TREE_USED (block) = 1;
8116 }
8117
8118
8119 /* Expand code for an OpenMP master directive. */
8120
8121 static void
8122 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8123 {
8124 tree block, lab = NULL, x, bfn_decl;
8125 gimple *stmt = gsi_stmt (*gsi_p);
8126 gbind *bind;
8127 location_t loc = gimple_location (stmt);
8128 gimple_seq tseq;
8129
8130 push_gimplify_context ();
8131
8132 block = make_node (BLOCK);
8133 bind = gimple_build_bind (NULL, NULL, block);
8134 gsi_replace (gsi_p, bind, true);
8135 gimple_bind_add_stmt (bind, stmt);
8136
8137 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8138 x = build_call_expr_loc (loc, bfn_decl, 0);
8139 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8140 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8141 tseq = NULL;
8142 gimplify_and_add (x, &tseq);
8143 gimple_bind_add_seq (bind, tseq);
8144
8145 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8146 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8147 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8148 gimple_omp_set_body (stmt, NULL);
8149
8150 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8151
8152 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8153
8154 pop_gimplify_context (bind);
8155
8156 gimple_bind_append_vars (bind, ctx->block_vars);
8157 BLOCK_VARS (block) = ctx->block_vars;
8158 }
8159
8160 /* Helper function for lower_omp_task_reductions. For a specific PASS
8161 find out the current clause it should be processed, or return false
8162 if all have been processed already. */
8163
8164 static inline bool
8165 omp_task_reduction_iterate (int pass, enum tree_code code,
8166 enum omp_clause_code ccode, tree *c, tree *decl,
8167 tree *type, tree *next)
8168 {
8169 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8170 {
8171 if (ccode == OMP_CLAUSE_REDUCTION
8172 && code != OMP_TASKLOOP
8173 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8174 continue;
8175 *decl = OMP_CLAUSE_DECL (*c);
8176 *type = TREE_TYPE (*decl);
8177 if (TREE_CODE (*decl) == MEM_REF)
8178 {
8179 if (pass != 1)
8180 continue;
8181 }
8182 else
8183 {
8184 if (omp_is_reference (*decl))
8185 *type = TREE_TYPE (*type);
8186 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8187 continue;
8188 }
8189 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8190 return true;
8191 }
8192 *decl = NULL_TREE;
8193 *type = NULL_TREE;
8194 *next = NULL_TREE;
8195 return false;
8196 }
8197
8198 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8199 OMP_TASKGROUP only with task modifier). Register mapping of those in
8200 START sequence and reducing them and unregister them in the END sequence. */
8201
8202 static void
8203 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8204 gimple_seq *start, gimple_seq *end)
8205 {
8206 enum omp_clause_code ccode
8207 = (code == OMP_TASKGROUP
8208 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8209 tree cancellable = NULL_TREE;
8210 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8211 if (clauses == NULL_TREE)
8212 return;
8213 if (code == OMP_FOR || code == OMP_SECTIONS)
8214 {
8215 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8216 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8217 && outer->cancellable)
8218 {
8219 cancellable = error_mark_node;
8220 break;
8221 }
8222 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8223 break;
8224 }
8225 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8226 tree *last = &TYPE_FIELDS (record_type);
8227 unsigned cnt = 0;
8228 if (cancellable)
8229 {
8230 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8231 ptr_type_node);
8232 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8233 integer_type_node);
8234 *last = field;
8235 DECL_CHAIN (field) = ifield;
8236 last = &DECL_CHAIN (ifield);
8237 DECL_CONTEXT (field) = record_type;
8238 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8239 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8240 DECL_CONTEXT (ifield) = record_type;
8241 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8242 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8243 }
8244 for (int pass = 0; pass < 2; pass++)
8245 {
8246 tree decl, type, next;
8247 for (tree c = clauses;
8248 omp_task_reduction_iterate (pass, code, ccode,
8249 &c, &decl, &type, &next); c = next)
8250 {
8251 ++cnt;
8252 tree new_type = type;
8253 if (ctx->outer)
8254 new_type = remap_type (type, &ctx->outer->cb);
8255 tree field
8256 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8257 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8258 new_type);
8259 if (DECL_P (decl) && type == TREE_TYPE (decl))
8260 {
8261 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8262 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8263 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8264 }
8265 else
8266 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8267 DECL_CONTEXT (field) = record_type;
8268 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8269 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8270 *last = field;
8271 last = &DECL_CHAIN (field);
8272 tree bfield
8273 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8274 boolean_type_node);
8275 DECL_CONTEXT (bfield) = record_type;
8276 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8277 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8278 *last = bfield;
8279 last = &DECL_CHAIN (bfield);
8280 }
8281 }
8282 *last = NULL_TREE;
8283 layout_type (record_type);
8284
8285 /* Build up an array which registers with the runtime all the reductions
8286 and deregisters them at the end. Format documented in libgomp/task.c. */
8287 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8288 tree avar = create_tmp_var_raw (atype);
8289 gimple_add_tmp_var (avar);
8290 TREE_ADDRESSABLE (avar) = 1;
8291 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8292 NULL_TREE, NULL_TREE);
8293 tree t = build_int_cst (pointer_sized_int_node, cnt);
8294 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8295 gimple_seq seq = NULL;
8296 tree sz = fold_convert (pointer_sized_int_node,
8297 TYPE_SIZE_UNIT (record_type));
8298 int cachesz = 64;
8299 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8300 build_int_cst (pointer_sized_int_node, cachesz - 1));
8301 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8302 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8303 ctx->task_reductions.create (1 + cnt);
8304 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8305 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8306 ? sz : NULL_TREE);
8307 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8308 gimple_seq_add_seq (start, seq);
8309 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8310 NULL_TREE, NULL_TREE);
8311 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8312 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8313 NULL_TREE, NULL_TREE);
8314 t = build_int_cst (pointer_sized_int_node,
8315 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8316 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8317 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8318 NULL_TREE, NULL_TREE);
8319 t = build_int_cst (pointer_sized_int_node, -1);
8320 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8321 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8322 NULL_TREE, NULL_TREE);
8323 t = build_int_cst (pointer_sized_int_node, 0);
8324 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8325
8326 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8327 and for each task reduction checks a bool right after the private variable
8328 within that thread's chunk; if the bool is clear, it hasn't been
8329 initialized and thus isn't going to be reduced nor destructed, otherwise
8330 reduce and destruct it. */
8331 tree idx = create_tmp_var (size_type_node);
8332 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8333 tree num_thr_sz = create_tmp_var (size_type_node);
8334 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8335 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8336 tree lab3 = NULL_TREE;
8337 gimple *g;
8338 if (code == OMP_FOR || code == OMP_SECTIONS)
8339 {
8340 /* For worksharing constructs, only perform it in the master thread,
8341 with the exception of cancelled implicit barriers - then only handle
8342 the current thread. */
8343 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8344 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8345 tree thr_num = create_tmp_var (integer_type_node);
8346 g = gimple_build_call (t, 0);
8347 gimple_call_set_lhs (g, thr_num);
8348 gimple_seq_add_stmt (end, g);
8349 if (cancellable)
8350 {
8351 tree c;
8352 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8353 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8354 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8355 if (code == OMP_FOR)
8356 c = gimple_omp_for_clauses (ctx->stmt);
8357 else /* if (code == OMP_SECTIONS) */
8358 c = gimple_omp_sections_clauses (ctx->stmt);
8359 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8360 cancellable = c;
8361 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8362 lab5, lab6);
8363 gimple_seq_add_stmt (end, g);
8364 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8365 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8366 gimple_seq_add_stmt (end, g);
8367 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8368 build_one_cst (TREE_TYPE (idx)));
8369 gimple_seq_add_stmt (end, g);
8370 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8371 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8372 }
8373 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8374 gimple_seq_add_stmt (end, g);
8375 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8376 }
8377 if (code != OMP_PARALLEL)
8378 {
8379 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8380 tree num_thr = create_tmp_var (integer_type_node);
8381 g = gimple_build_call (t, 0);
8382 gimple_call_set_lhs (g, num_thr);
8383 gimple_seq_add_stmt (end, g);
8384 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8385 gimple_seq_add_stmt (end, g);
8386 if (cancellable)
8387 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8388 }
8389 else
8390 {
8391 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8392 OMP_CLAUSE__REDUCTEMP_);
8393 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8394 t = fold_convert (size_type_node, t);
8395 gimplify_assign (num_thr_sz, t, end);
8396 }
8397 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8398 NULL_TREE, NULL_TREE);
8399 tree data = create_tmp_var (pointer_sized_int_node);
8400 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8401 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8402 tree ptr;
8403 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8404 ptr = create_tmp_var (build_pointer_type (record_type));
8405 else
8406 ptr = create_tmp_var (ptr_type_node);
8407 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8408
8409 tree field = TYPE_FIELDS (record_type);
8410 cnt = 0;
8411 if (cancellable)
8412 field = DECL_CHAIN (DECL_CHAIN (field));
8413 for (int pass = 0; pass < 2; pass++)
8414 {
8415 tree decl, type, next;
8416 for (tree c = clauses;
8417 omp_task_reduction_iterate (pass, code, ccode,
8418 &c, &decl, &type, &next); c = next)
8419 {
8420 tree var = decl, ref;
8421 if (TREE_CODE (decl) == MEM_REF)
8422 {
8423 var = TREE_OPERAND (var, 0);
8424 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8425 var = TREE_OPERAND (var, 0);
8426 tree v = var;
8427 if (TREE_CODE (var) == ADDR_EXPR)
8428 var = TREE_OPERAND (var, 0);
8429 else if (TREE_CODE (var) == INDIRECT_REF)
8430 var = TREE_OPERAND (var, 0);
8431 tree orig_var = var;
8432 if (is_variable_sized (var))
8433 {
8434 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8435 var = DECL_VALUE_EXPR (var);
8436 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8437 var = TREE_OPERAND (var, 0);
8438 gcc_assert (DECL_P (var));
8439 }
8440 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8441 if (orig_var != var)
8442 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8443 else if (TREE_CODE (v) == ADDR_EXPR)
8444 t = build_fold_addr_expr (t);
8445 else if (TREE_CODE (v) == INDIRECT_REF)
8446 t = build_fold_indirect_ref (t);
8447 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8448 {
8449 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8450 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8451 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8452 }
8453 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8454 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8455 fold_convert (size_type_node,
8456 TREE_OPERAND (decl, 1)));
8457 }
8458 else
8459 {
8460 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8461 if (!omp_is_reference (decl))
8462 t = build_fold_addr_expr (t);
8463 }
8464 t = fold_convert (pointer_sized_int_node, t);
8465 seq = NULL;
8466 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8467 gimple_seq_add_seq (start, seq);
8468 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8469 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8470 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8471 t = unshare_expr (byte_position (field));
8472 t = fold_convert (pointer_sized_int_node, t);
8473 ctx->task_reduction_map->put (c, cnt);
8474 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8475 ? t : NULL_TREE);
8476 seq = NULL;
8477 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8478 gimple_seq_add_seq (start, seq);
8479 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8480 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8481 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8482
8483 tree bfield = DECL_CHAIN (field);
8484 tree cond;
8485 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8486 /* In parallel or worksharing all threads unconditionally
8487 initialize all their task reduction private variables. */
8488 cond = boolean_true_node;
8489 else if (TREE_TYPE (ptr) == ptr_type_node)
8490 {
8491 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8492 unshare_expr (byte_position (bfield)));
8493 seq = NULL;
8494 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8495 gimple_seq_add_seq (end, seq);
8496 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8497 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8498 build_int_cst (pbool, 0));
8499 }
8500 else
8501 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8502 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8503 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8504 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8505 tree condv = create_tmp_var (boolean_type_node);
8506 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8507 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8508 lab3, lab4);
8509 gimple_seq_add_stmt (end, g);
8510 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8511 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8512 {
8513 /* If this reduction doesn't need destruction and parallel
8514 has been cancelled, there is nothing to do for this
8515 reduction, so jump around the merge operation. */
8516 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8517 g = gimple_build_cond (NE_EXPR, cancellable,
8518 build_zero_cst (TREE_TYPE (cancellable)),
8519 lab4, lab5);
8520 gimple_seq_add_stmt (end, g);
8521 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8522 }
8523
8524 tree new_var;
8525 if (TREE_TYPE (ptr) == ptr_type_node)
8526 {
8527 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8528 unshare_expr (byte_position (field)));
8529 seq = NULL;
8530 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8531 gimple_seq_add_seq (end, seq);
8532 tree pbool = build_pointer_type (TREE_TYPE (field));
8533 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8534 build_int_cst (pbool, 0));
8535 }
8536 else
8537 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8538 build_simple_mem_ref (ptr), field, NULL_TREE);
8539
8540 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8541 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8542 ref = build_simple_mem_ref (ref);
8543 /* reduction(-:var) sums up the partial results, so it acts
8544 identically to reduction(+:var). */
8545 if (rcode == MINUS_EXPR)
8546 rcode = PLUS_EXPR;
8547 if (TREE_CODE (decl) == MEM_REF)
8548 {
8549 tree type = TREE_TYPE (new_var);
8550 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8551 tree i = create_tmp_var (TREE_TYPE (v));
8552 tree ptype = build_pointer_type (TREE_TYPE (type));
8553 if (DECL_P (v))
8554 {
8555 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8556 tree vv = create_tmp_var (TREE_TYPE (v));
8557 gimplify_assign (vv, v, start);
8558 v = vv;
8559 }
8560 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8561 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8562 new_var = build_fold_addr_expr (new_var);
8563 new_var = fold_convert (ptype, new_var);
8564 ref = fold_convert (ptype, ref);
8565 tree m = create_tmp_var (ptype);
8566 gimplify_assign (m, new_var, end);
8567 new_var = m;
8568 m = create_tmp_var (ptype);
8569 gimplify_assign (m, ref, end);
8570 ref = m;
8571 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8572 tree body = create_artificial_label (UNKNOWN_LOCATION);
8573 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8574 gimple_seq_add_stmt (end, gimple_build_label (body));
8575 tree priv = build_simple_mem_ref (new_var);
8576 tree out = build_simple_mem_ref (ref);
8577 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8578 {
8579 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8580 tree decl_placeholder
8581 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8582 tree lab6 = NULL_TREE;
8583 if (cancellable)
8584 {
8585 /* If this reduction needs destruction and parallel
8586 has been cancelled, jump around the merge operation
8587 to the destruction. */
8588 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8589 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8590 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8591 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8592 lab6, lab5);
8593 gimple_seq_add_stmt (end, g);
8594 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8595 }
8596 SET_DECL_VALUE_EXPR (placeholder, out);
8597 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8598 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8599 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8600 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8601 gimple_seq_add_seq (end,
8602 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8603 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8604 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8605 {
8606 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8607 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8608 }
8609 if (cancellable)
8610 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8611 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8612 if (x)
8613 {
8614 gimple_seq tseq = NULL;
8615 gimplify_stmt (&x, &tseq);
8616 gimple_seq_add_seq (end, tseq);
8617 }
8618 }
8619 else
8620 {
8621 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8622 out = unshare_expr (out);
8623 gimplify_assign (out, x, end);
8624 }
8625 gimple *g
8626 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8627 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8628 gimple_seq_add_stmt (end, g);
8629 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8630 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8631 gimple_seq_add_stmt (end, g);
8632 g = gimple_build_assign (i, PLUS_EXPR, i,
8633 build_int_cst (TREE_TYPE (i), 1));
8634 gimple_seq_add_stmt (end, g);
8635 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8636 gimple_seq_add_stmt (end, g);
8637 gimple_seq_add_stmt (end, gimple_build_label (endl));
8638 }
8639 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8640 {
8641 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8642 tree oldv = NULL_TREE;
8643 tree lab6 = NULL_TREE;
8644 if (cancellable)
8645 {
8646 /* If this reduction needs destruction and parallel
8647 has been cancelled, jump around the merge operation
8648 to the destruction. */
8649 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8650 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8651 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8652 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8653 lab6, lab5);
8654 gimple_seq_add_stmt (end, g);
8655 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8656 }
8657 if (omp_is_reference (decl)
8658 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8659 TREE_TYPE (ref)))
8660 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8661 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8662 tree refv = create_tmp_var (TREE_TYPE (ref));
8663 gimplify_assign (refv, ref, end);
8664 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8665 SET_DECL_VALUE_EXPR (placeholder, ref);
8666 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8667 tree d = maybe_lookup_decl (decl, ctx);
8668 gcc_assert (d);
8669 if (DECL_HAS_VALUE_EXPR_P (d))
8670 oldv = DECL_VALUE_EXPR (d);
8671 if (omp_is_reference (var))
8672 {
8673 tree v = fold_convert (TREE_TYPE (d),
8674 build_fold_addr_expr (new_var));
8675 SET_DECL_VALUE_EXPR (d, v);
8676 }
8677 else
8678 SET_DECL_VALUE_EXPR (d, new_var);
8679 DECL_HAS_VALUE_EXPR_P (d) = 1;
8680 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8681 if (oldv)
8682 SET_DECL_VALUE_EXPR (d, oldv);
8683 else
8684 {
8685 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8686 DECL_HAS_VALUE_EXPR_P (d) = 0;
8687 }
8688 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8689 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8690 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8691 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8692 if (cancellable)
8693 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8694 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8695 if (x)
8696 {
8697 gimple_seq tseq = NULL;
8698 gimplify_stmt (&x, &tseq);
8699 gimple_seq_add_seq (end, tseq);
8700 }
8701 }
8702 else
8703 {
8704 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8705 ref = unshare_expr (ref);
8706 gimplify_assign (ref, x, end);
8707 }
8708 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8709 ++cnt;
8710 field = DECL_CHAIN (bfield);
8711 }
8712 }
8713
8714 if (code == OMP_TASKGROUP)
8715 {
8716 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8717 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8718 gimple_seq_add_stmt (start, g);
8719 }
8720 else
8721 {
8722 tree c;
8723 if (code == OMP_FOR)
8724 c = gimple_omp_for_clauses (ctx->stmt);
8725 else if (code == OMP_SECTIONS)
8726 c = gimple_omp_sections_clauses (ctx->stmt);
8727 else
8728 c = gimple_omp_taskreg_clauses (ctx->stmt);
8729 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8730 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8731 build_fold_addr_expr (avar));
8732 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8733 }
8734
8735 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8736 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8737 size_one_node));
8738 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8739 gimple_seq_add_stmt (end, g);
8740 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8741 if (code == OMP_FOR || code == OMP_SECTIONS)
8742 {
8743 enum built_in_function bfn
8744 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8745 t = builtin_decl_explicit (bfn);
8746 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8747 tree arg;
8748 if (cancellable)
8749 {
8750 arg = create_tmp_var (c_bool_type);
8751 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8752 cancellable));
8753 }
8754 else
8755 arg = build_int_cst (c_bool_type, 0);
8756 g = gimple_build_call (t, 1, arg);
8757 }
8758 else
8759 {
8760 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8761 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8762 }
8763 gimple_seq_add_stmt (end, g);
8764 t = build_constructor (atype, NULL);
8765 TREE_THIS_VOLATILE (t) = 1;
8766 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8767 }
8768
8769 /* Expand code for an OpenMP taskgroup directive. */
8770
8771 static void
8772 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8773 {
8774 gimple *stmt = gsi_stmt (*gsi_p);
8775 gcall *x;
8776 gbind *bind;
8777 gimple_seq dseq = NULL;
8778 tree block = make_node (BLOCK);
8779
8780 bind = gimple_build_bind (NULL, NULL, block);
8781 gsi_replace (gsi_p, bind, true);
8782 gimple_bind_add_stmt (bind, stmt);
8783
8784 push_gimplify_context ();
8785
8786 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8787 0);
8788 gimple_bind_add_stmt (bind, x);
8789
8790 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8791 gimple_omp_taskgroup_clauses (stmt),
8792 gimple_bind_body_ptr (bind), &dseq);
8793
8794 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8795 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8796 gimple_omp_set_body (stmt, NULL);
8797
8798 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8799 gimple_bind_add_seq (bind, dseq);
8800
8801 pop_gimplify_context (bind);
8802
8803 gimple_bind_append_vars (bind, ctx->block_vars);
8804 BLOCK_VARS (block) = ctx->block_vars;
8805 }
8806
8807
8808 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8809
8810 static void
8811 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8812 omp_context *ctx)
8813 {
8814 struct omp_for_data fd;
8815 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8816 return;
8817
8818 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8819 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8820 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8821 if (!fd.ordered)
8822 return;
8823
8824 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8825 tree c = gimple_omp_ordered_clauses (ord_stmt);
8826 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8827 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8828 {
8829 /* Merge depend clauses from multiple adjacent
8830 #pragma omp ordered depend(sink:...) constructs
8831 into one #pragma omp ordered depend(sink:...), so that
8832 we can optimize them together. */
8833 gimple_stmt_iterator gsi = *gsi_p;
8834 gsi_next (&gsi);
8835 while (!gsi_end_p (gsi))
8836 {
8837 gimple *stmt = gsi_stmt (gsi);
8838 if (is_gimple_debug (stmt)
8839 || gimple_code (stmt) == GIMPLE_NOP)
8840 {
8841 gsi_next (&gsi);
8842 continue;
8843 }
8844 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8845 break;
8846 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8847 c = gimple_omp_ordered_clauses (ord_stmt2);
8848 if (c == NULL_TREE
8849 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8850 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8851 break;
8852 while (*list_p)
8853 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8854 *list_p = c;
8855 gsi_remove (&gsi, true);
8856 }
8857 }
8858
8859 /* Canonicalize sink dependence clauses into one folded clause if
8860 possible.
8861
8862 The basic algorithm is to create a sink vector whose first
8863 element is the GCD of all the first elements, and whose remaining
8864 elements are the minimum of the subsequent columns.
8865
8866 We ignore dependence vectors whose first element is zero because
8867 such dependencies are known to be executed by the same thread.
8868
8869 We take into account the direction of the loop, so a minimum
8870 becomes a maximum if the loop is iterating forwards. We also
8871 ignore sink clauses where the loop direction is unknown, or where
8872 the offsets are clearly invalid because they are not a multiple
8873 of the loop increment.
8874
8875 For example:
8876
8877 #pragma omp for ordered(2)
8878 for (i=0; i < N; ++i)
8879 for (j=0; j < M; ++j)
8880 {
8881 #pragma omp ordered \
8882 depend(sink:i-8,j-2) \
8883 depend(sink:i,j-1) \ // Completely ignored because i+0.
8884 depend(sink:i-4,j-3) \
8885 depend(sink:i-6,j-4)
8886 #pragma omp ordered depend(source)
8887 }
8888
8889 Folded clause is:
8890
8891 depend(sink:-gcd(8,4,6),-min(2,3,4))
8892 -or-
8893 depend(sink:-2,-2)
8894 */
8895
8896 /* FIXME: Computing GCD's where the first element is zero is
8897 non-trivial in the presence of collapsed loops. Do this later. */
8898 if (fd.collapse > 1)
8899 return;
8900
8901 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8902
8903 /* wide_int is not a POD so it must be default-constructed. */
8904 for (unsigned i = 0; i != 2 * len - 1; ++i)
8905 new (static_cast<void*>(folded_deps + i)) wide_int ();
8906
8907 tree folded_dep = NULL_TREE;
8908 /* TRUE if the first dimension's offset is negative. */
8909 bool neg_offset_p = false;
8910
8911 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8912 unsigned int i;
8913 while ((c = *list_p) != NULL)
8914 {
8915 bool remove = false;
8916
8917 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8918 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8919 goto next_ordered_clause;
8920
8921 tree vec;
8922 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8923 vec && TREE_CODE (vec) == TREE_LIST;
8924 vec = TREE_CHAIN (vec), ++i)
8925 {
8926 gcc_assert (i < len);
8927
8928 /* omp_extract_for_data has canonicalized the condition. */
8929 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8930 || fd.loops[i].cond_code == GT_EXPR);
8931 bool forward = fd.loops[i].cond_code == LT_EXPR;
8932 bool maybe_lexically_later = true;
8933
8934 /* While the committee makes up its mind, bail if we have any
8935 non-constant steps. */
8936 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8937 goto lower_omp_ordered_ret;
8938
8939 tree itype = TREE_TYPE (TREE_VALUE (vec));
8940 if (POINTER_TYPE_P (itype))
8941 itype = sizetype;
8942 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8943 TYPE_PRECISION (itype),
8944 TYPE_SIGN (itype));
8945
8946 /* Ignore invalid offsets that are not multiples of the step. */
8947 if (!wi::multiple_of_p (wi::abs (offset),
8948 wi::abs (wi::to_wide (fd.loops[i].step)),
8949 UNSIGNED))
8950 {
8951 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8952 "ignoring sink clause with offset that is not "
8953 "a multiple of the loop step");
8954 remove = true;
8955 goto next_ordered_clause;
8956 }
8957
8958 /* Calculate the first dimension. The first dimension of
8959 the folded dependency vector is the GCD of the first
8960 elements, while ignoring any first elements whose offset
8961 is 0. */
8962 if (i == 0)
8963 {
8964 /* Ignore dependence vectors whose first dimension is 0. */
8965 if (offset == 0)
8966 {
8967 remove = true;
8968 goto next_ordered_clause;
8969 }
8970 else
8971 {
8972 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8973 {
8974 error_at (OMP_CLAUSE_LOCATION (c),
8975 "first offset must be in opposite direction "
8976 "of loop iterations");
8977 goto lower_omp_ordered_ret;
8978 }
8979 if (forward)
8980 offset = -offset;
8981 neg_offset_p = forward;
8982 /* Initialize the first time around. */
8983 if (folded_dep == NULL_TREE)
8984 {
8985 folded_dep = c;
8986 folded_deps[0] = offset;
8987 }
8988 else
8989 folded_deps[0] = wi::gcd (folded_deps[0],
8990 offset, UNSIGNED);
8991 }
8992 }
8993 /* Calculate minimum for the remaining dimensions. */
8994 else
8995 {
8996 folded_deps[len + i - 1] = offset;
8997 if (folded_dep == c)
8998 folded_deps[i] = offset;
8999 else if (maybe_lexically_later
9000 && !wi::eq_p (folded_deps[i], offset))
9001 {
9002 if (forward ^ wi::gts_p (folded_deps[i], offset))
9003 {
9004 unsigned int j;
9005 folded_dep = c;
9006 for (j = 1; j <= i; j++)
9007 folded_deps[j] = folded_deps[len + j - 1];
9008 }
9009 else
9010 maybe_lexically_later = false;
9011 }
9012 }
9013 }
9014 gcc_assert (i == len);
9015
9016 remove = true;
9017
9018 next_ordered_clause:
9019 if (remove)
9020 *list_p = OMP_CLAUSE_CHAIN (c);
9021 else
9022 list_p = &OMP_CLAUSE_CHAIN (c);
9023 }
9024
9025 if (folded_dep)
9026 {
9027 if (neg_offset_p)
9028 folded_deps[0] = -folded_deps[0];
9029
9030 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9031 if (POINTER_TYPE_P (itype))
9032 itype = sizetype;
9033
9034 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9035 = wide_int_to_tree (itype, folded_deps[0]);
9036 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9037 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9038 }
9039
9040 lower_omp_ordered_ret:
9041
9042 /* Ordered without clauses is #pragma omp threads, while we want
9043 a nop instead if we remove all clauses. */
9044 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9045 gsi_replace (gsi_p, gimple_build_nop (), true);
9046 }
9047
9048
9049 /* Expand code for an OpenMP ordered directive. */
9050
9051 static void
9052 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9053 {
9054 tree block;
9055 gimple *stmt = gsi_stmt (*gsi_p), *g;
9056 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9057 gcall *x;
9058 gbind *bind;
9059 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9060 OMP_CLAUSE_SIMD);
9061 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9062 loop. */
9063 bool maybe_simt
9064 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9065 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9066 OMP_CLAUSE_THREADS);
9067
9068 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9069 OMP_CLAUSE_DEPEND))
9070 {
9071 /* FIXME: This is needs to be moved to the expansion to verify various
9072 conditions only testable on cfg with dominators computed, and also
9073 all the depend clauses to be merged still might need to be available
9074 for the runtime checks. */
9075 if (0)
9076 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9077 return;
9078 }
9079
9080 push_gimplify_context ();
9081
9082 block = make_node (BLOCK);
9083 bind = gimple_build_bind (NULL, NULL, block);
9084 gsi_replace (gsi_p, bind, true);
9085 gimple_bind_add_stmt (bind, stmt);
9086
9087 if (simd)
9088 {
9089 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9090 build_int_cst (NULL_TREE, threads));
9091 cfun->has_simduid_loops = true;
9092 }
9093 else
9094 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9095 0);
9096 gimple_bind_add_stmt (bind, x);
9097
9098 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9099 if (maybe_simt)
9100 {
9101 counter = create_tmp_var (integer_type_node);
9102 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9103 gimple_call_set_lhs (g, counter);
9104 gimple_bind_add_stmt (bind, g);
9105
9106 body = create_artificial_label (UNKNOWN_LOCATION);
9107 test = create_artificial_label (UNKNOWN_LOCATION);
9108 gimple_bind_add_stmt (bind, gimple_build_label (body));
9109
9110 tree simt_pred = create_tmp_var (integer_type_node);
9111 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9112 gimple_call_set_lhs (g, simt_pred);
9113 gimple_bind_add_stmt (bind, g);
9114
9115 tree t = create_artificial_label (UNKNOWN_LOCATION);
9116 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9117 gimple_bind_add_stmt (bind, g);
9118
9119 gimple_bind_add_stmt (bind, gimple_build_label (t));
9120 }
9121 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9122 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9123 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9124 gimple_omp_set_body (stmt, NULL);
9125
9126 if (maybe_simt)
9127 {
9128 gimple_bind_add_stmt (bind, gimple_build_label (test));
9129 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9130 gimple_bind_add_stmt (bind, g);
9131
9132 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9133 tree nonneg = create_tmp_var (integer_type_node);
9134 gimple_seq tseq = NULL;
9135 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9136 gimple_bind_add_seq (bind, tseq);
9137
9138 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9139 gimple_call_set_lhs (g, nonneg);
9140 gimple_bind_add_stmt (bind, g);
9141
9142 tree end = create_artificial_label (UNKNOWN_LOCATION);
9143 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9144 gimple_bind_add_stmt (bind, g);
9145
9146 gimple_bind_add_stmt (bind, gimple_build_label (end));
9147 }
9148 if (simd)
9149 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9150 build_int_cst (NULL_TREE, threads));
9151 else
9152 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9153 0);
9154 gimple_bind_add_stmt (bind, x);
9155
9156 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9157
9158 pop_gimplify_context (bind);
9159
9160 gimple_bind_append_vars (bind, ctx->block_vars);
9161 BLOCK_VARS (block) = gimple_bind_vars (bind);
9162 }
9163
9164
9165 /* Expand code for an OpenMP scan directive and the structured block
9166 before the scan directive. */
9167
9168 static void
9169 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9170 {
9171 gimple *stmt = gsi_stmt (*gsi_p);
9172 bool has_clauses
9173 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9174 tree lane = NULL_TREE;
9175 gimple_seq before = NULL;
9176 omp_context *octx = ctx->outer;
9177 gcc_assert (octx);
9178 if (octx->scan_exclusive && !has_clauses)
9179 {
9180 gimple_stmt_iterator gsi2 = *gsi_p;
9181 gsi_next (&gsi2);
9182 gimple *stmt2 = gsi_stmt (gsi2);
9183 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9184 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9185 the one with exclusive clause(s), comes first. */
9186 if (stmt2
9187 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9188 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9189 {
9190 gsi_remove (gsi_p, false);
9191 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9192 ctx = maybe_lookup_ctx (stmt2);
9193 gcc_assert (ctx);
9194 lower_omp_scan (gsi_p, ctx);
9195 return;
9196 }
9197 }
9198
9199 bool input_phase = has_clauses ^ octx->scan_inclusive;
9200 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9201 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9202 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9203 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9204 && !gimple_omp_for_combined_p (octx->stmt));
9205 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9206 if (is_for_simd && octx->for_simd_scan_phase)
9207 is_simd = false;
9208 if (is_simd)
9209 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9210 OMP_CLAUSE__SIMDUID_))
9211 {
9212 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9213 lane = create_tmp_var (unsigned_type_node);
9214 tree t = build_int_cst (integer_type_node,
9215 input_phase ? 1
9216 : octx->scan_inclusive ? 2 : 3);
9217 gimple *g
9218 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9219 gimple_call_set_lhs (g, lane);
9220 gimple_seq_add_stmt (&before, g);
9221 }
9222
9223 if (is_simd || is_for)
9224 {
9225 for (tree c = gimple_omp_for_clauses (octx->stmt);
9226 c; c = OMP_CLAUSE_CHAIN (c))
9227 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9228 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9229 {
9230 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9231 tree var = OMP_CLAUSE_DECL (c);
9232 tree new_var = lookup_decl (var, octx);
9233 tree val = new_var;
9234 tree var2 = NULL_TREE;
9235 tree var3 = NULL_TREE;
9236 tree var4 = NULL_TREE;
9237 tree lane0 = NULL_TREE;
9238 tree new_vard = new_var;
9239 if (omp_is_reference (var))
9240 {
9241 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9242 val = new_var;
9243 }
9244 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9245 {
9246 val = DECL_VALUE_EXPR (new_vard);
9247 if (new_vard != new_var)
9248 {
9249 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9250 val = TREE_OPERAND (val, 0);
9251 }
9252 if (TREE_CODE (val) == ARRAY_REF
9253 && VAR_P (TREE_OPERAND (val, 0)))
9254 {
9255 tree v = TREE_OPERAND (val, 0);
9256 if (lookup_attribute ("omp simd array",
9257 DECL_ATTRIBUTES (v)))
9258 {
9259 val = unshare_expr (val);
9260 lane0 = TREE_OPERAND (val, 1);
9261 TREE_OPERAND (val, 1) = lane;
9262 var2 = lookup_decl (v, octx);
9263 if (octx->scan_exclusive)
9264 var4 = lookup_decl (var2, octx);
9265 if (input_phase
9266 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9267 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9268 if (!input_phase)
9269 {
9270 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9271 var2, lane, NULL_TREE, NULL_TREE);
9272 TREE_THIS_NOTRAP (var2) = 1;
9273 if (octx->scan_exclusive)
9274 {
9275 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9276 var4, lane, NULL_TREE,
9277 NULL_TREE);
9278 TREE_THIS_NOTRAP (var4) = 1;
9279 }
9280 }
9281 else
9282 var2 = val;
9283 }
9284 }
9285 gcc_assert (var2);
9286 }
9287 else
9288 {
9289 var2 = build_outer_var_ref (var, octx);
9290 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9291 {
9292 var3 = maybe_lookup_decl (new_vard, octx);
9293 if (var3 == new_vard || var3 == NULL_TREE)
9294 var3 = NULL_TREE;
9295 else if (is_simd && octx->scan_exclusive && !input_phase)
9296 {
9297 var4 = maybe_lookup_decl (var3, octx);
9298 if (var4 == var3 || var4 == NULL_TREE)
9299 {
9300 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9301 {
9302 var4 = var3;
9303 var3 = NULL_TREE;
9304 }
9305 else
9306 var4 = NULL_TREE;
9307 }
9308 }
9309 }
9310 if (is_simd
9311 && octx->scan_exclusive
9312 && !input_phase
9313 && var4 == NULL_TREE)
9314 var4 = create_tmp_var (TREE_TYPE (val));
9315 }
9316 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9317 {
9318 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9319 if (input_phase)
9320 {
9321 if (var3)
9322 {
9323 /* If we've added a separate identity element
9324 variable, copy it over into val. */
9325 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9326 var3);
9327 gimplify_and_add (x, &before);
9328 }
9329 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9330 {
9331 /* Otherwise, assign to it the identity element. */
9332 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9333 if (is_for)
9334 tseq = copy_gimple_seq_and_replace_locals (tseq);
9335 tree ref = build_outer_var_ref (var, octx);
9336 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9337 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9338 if (x)
9339 {
9340 if (new_vard != new_var)
9341 val = build_fold_addr_expr_loc (clause_loc, val);
9342 SET_DECL_VALUE_EXPR (new_vard, val);
9343 }
9344 SET_DECL_VALUE_EXPR (placeholder, ref);
9345 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9346 lower_omp (&tseq, octx);
9347 if (x)
9348 SET_DECL_VALUE_EXPR (new_vard, x);
9349 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9350 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9351 gimple_seq_add_seq (&before, tseq);
9352 if (is_simd)
9353 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9354 }
9355 }
9356 else if (is_simd)
9357 {
9358 tree x;
9359 if (octx->scan_exclusive)
9360 {
9361 tree v4 = unshare_expr (var4);
9362 tree v2 = unshare_expr (var2);
9363 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9364 gimplify_and_add (x, &before);
9365 }
9366 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9367 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9368 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9369 tree vexpr = val;
9370 if (x && new_vard != new_var)
9371 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9372 if (x)
9373 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9374 SET_DECL_VALUE_EXPR (placeholder, var2);
9375 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9376 lower_omp (&tseq, octx);
9377 gimple_seq_add_seq (&before, tseq);
9378 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9379 if (x)
9380 SET_DECL_VALUE_EXPR (new_vard, x);
9381 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9382 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9383 if (octx->scan_inclusive)
9384 {
9385 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9386 var2);
9387 gimplify_and_add (x, &before);
9388 }
9389 else if (lane0 == NULL_TREE)
9390 {
9391 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9392 var4);
9393 gimplify_and_add (x, &before);
9394 }
9395 }
9396 }
9397 else
9398 {
9399 if (input_phase)
9400 {
9401 /* input phase. Set val to initializer before
9402 the body. */
9403 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9404 gimplify_assign (val, x, &before);
9405 }
9406 else if (is_simd)
9407 {
9408 /* scan phase. */
9409 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9410 if (code == MINUS_EXPR)
9411 code = PLUS_EXPR;
9412
9413 tree x = build2 (code, TREE_TYPE (var2),
9414 unshare_expr (var2), unshare_expr (val));
9415 if (octx->scan_inclusive)
9416 {
9417 gimplify_assign (unshare_expr (var2), x, &before);
9418 gimplify_assign (val, var2, &before);
9419 }
9420 else
9421 {
9422 gimplify_assign (unshare_expr (var4),
9423 unshare_expr (var2), &before);
9424 gimplify_assign (var2, x, &before);
9425 if (lane0 == NULL_TREE)
9426 gimplify_assign (val, var4, &before);
9427 }
9428 }
9429 }
9430 if (octx->scan_exclusive && !input_phase && lane0)
9431 {
9432 tree vexpr = unshare_expr (var4);
9433 TREE_OPERAND (vexpr, 1) = lane0;
9434 if (new_vard != new_var)
9435 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9436 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9437 }
9438 }
9439 }
9440 if (is_simd && !is_for_simd)
9441 {
9442 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9443 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9444 gsi_replace (gsi_p, gimple_build_nop (), true);
9445 return;
9446 }
9447 lower_omp (gimple_omp_body_ptr (stmt), octx);
9448 if (before)
9449 {
9450 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9451 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9452 }
9453 }
9454
9455
9456 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9457 substitution of a couple of function calls. But in the NAMED case,
9458 requires that languages coordinate a symbol name. It is therefore
9459 best put here in common code. */
9460
9461 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9462
9463 static void
9464 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9465 {
9466 tree block;
9467 tree name, lock, unlock;
9468 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9469 gbind *bind;
9470 location_t loc = gimple_location (stmt);
9471 gimple_seq tbody;
9472
9473 name = gimple_omp_critical_name (stmt);
9474 if (name)
9475 {
9476 tree decl;
9477
9478 if (!critical_name_mutexes)
9479 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9480
9481 tree *n = critical_name_mutexes->get (name);
9482 if (n == NULL)
9483 {
9484 char *new_str;
9485
9486 decl = create_tmp_var_raw (ptr_type_node);
9487
9488 new_str = ACONCAT ((".gomp_critical_user_",
9489 IDENTIFIER_POINTER (name), NULL));
9490 DECL_NAME (decl) = get_identifier (new_str);
9491 TREE_PUBLIC (decl) = 1;
9492 TREE_STATIC (decl) = 1;
9493 DECL_COMMON (decl) = 1;
9494 DECL_ARTIFICIAL (decl) = 1;
9495 DECL_IGNORED_P (decl) = 1;
9496
9497 varpool_node::finalize_decl (decl);
9498
9499 critical_name_mutexes->put (name, decl);
9500 }
9501 else
9502 decl = *n;
9503
9504 /* If '#pragma omp critical' is inside offloaded region or
9505 inside function marked as offloadable, the symbol must be
9506 marked as offloadable too. */
9507 omp_context *octx;
9508 if (cgraph_node::get (current_function_decl)->offloadable)
9509 varpool_node::get_create (decl)->offloadable = 1;
9510 else
9511 for (octx = ctx->outer; octx; octx = octx->outer)
9512 if (is_gimple_omp_offloaded (octx->stmt))
9513 {
9514 varpool_node::get_create (decl)->offloadable = 1;
9515 break;
9516 }
9517
9518 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9519 lock = build_call_expr_loc (loc, lock, 1,
9520 build_fold_addr_expr_loc (loc, decl));
9521
9522 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9523 unlock = build_call_expr_loc (loc, unlock, 1,
9524 build_fold_addr_expr_loc (loc, decl));
9525 }
9526 else
9527 {
9528 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9529 lock = build_call_expr_loc (loc, lock, 0);
9530
9531 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9532 unlock = build_call_expr_loc (loc, unlock, 0);
9533 }
9534
9535 push_gimplify_context ();
9536
9537 block = make_node (BLOCK);
9538 bind = gimple_build_bind (NULL, NULL, block);
9539 gsi_replace (gsi_p, bind, true);
9540 gimple_bind_add_stmt (bind, stmt);
9541
9542 tbody = gimple_bind_body (bind);
9543 gimplify_and_add (lock, &tbody);
9544 gimple_bind_set_body (bind, tbody);
9545
9546 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9547 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9548 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9549 gimple_omp_set_body (stmt, NULL);
9550
9551 tbody = gimple_bind_body (bind);
9552 gimplify_and_add (unlock, &tbody);
9553 gimple_bind_set_body (bind, tbody);
9554
9555 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9556
9557 pop_gimplify_context (bind);
9558 gimple_bind_append_vars (bind, ctx->block_vars);
9559 BLOCK_VARS (block) = gimple_bind_vars (bind);
9560 }
9561
9562 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9563 for a lastprivate clause. Given a loop control predicate of (V
9564 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9565 is appended to *DLIST, iterator initialization is appended to
9566 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9567 to be emitted in a critical section. */
9568
9569 static void
9570 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9571 gimple_seq *dlist, gimple_seq *clist,
9572 struct omp_context *ctx)
9573 {
9574 tree clauses, cond, vinit;
9575 enum tree_code cond_code;
9576 gimple_seq stmts;
9577
9578 cond_code = fd->loop.cond_code;
9579 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9580
9581 /* When possible, use a strict equality expression. This can let VRP
9582 type optimizations deduce the value and remove a copy. */
9583 if (tree_fits_shwi_p (fd->loop.step))
9584 {
9585 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9586 if (step == 1 || step == -1)
9587 cond_code = EQ_EXPR;
9588 }
9589
9590 tree n2 = fd->loop.n2;
9591 if (fd->collapse > 1
9592 && TREE_CODE (n2) != INTEGER_CST
9593 && gimple_omp_for_combined_into_p (fd->for_stmt))
9594 {
9595 struct omp_context *taskreg_ctx = NULL;
9596 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9597 {
9598 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9599 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9600 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9601 {
9602 if (gimple_omp_for_combined_into_p (gfor))
9603 {
9604 gcc_assert (ctx->outer->outer
9605 && is_parallel_ctx (ctx->outer->outer));
9606 taskreg_ctx = ctx->outer->outer;
9607 }
9608 else
9609 {
9610 struct omp_for_data outer_fd;
9611 omp_extract_for_data (gfor, &outer_fd, NULL);
9612 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9613 }
9614 }
9615 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9616 taskreg_ctx = ctx->outer->outer;
9617 }
9618 else if (is_taskreg_ctx (ctx->outer))
9619 taskreg_ctx = ctx->outer;
9620 if (taskreg_ctx)
9621 {
9622 int i;
9623 tree taskreg_clauses
9624 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9625 tree innerc = omp_find_clause (taskreg_clauses,
9626 OMP_CLAUSE__LOOPTEMP_);
9627 gcc_assert (innerc);
9628 int count = fd->collapse;
9629 if (fd->non_rect
9630 && fd->last_nonrect == fd->first_nonrect + 1)
9631 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
9632 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
9633 count += 4;
9634 for (i = 0; i < count; i++)
9635 {
9636 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9637 OMP_CLAUSE__LOOPTEMP_);
9638 gcc_assert (innerc);
9639 }
9640 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9641 OMP_CLAUSE__LOOPTEMP_);
9642 if (innerc)
9643 n2 = fold_convert (TREE_TYPE (n2),
9644 lookup_decl (OMP_CLAUSE_DECL (innerc),
9645 taskreg_ctx));
9646 }
9647 }
9648 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9649
9650 clauses = gimple_omp_for_clauses (fd->for_stmt);
9651 stmts = NULL;
9652 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9653 if (!gimple_seq_empty_p (stmts))
9654 {
9655 gimple_seq_add_seq (&stmts, *dlist);
9656 *dlist = stmts;
9657
9658 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9659 vinit = fd->loop.n1;
9660 if (cond_code == EQ_EXPR
9661 && tree_fits_shwi_p (fd->loop.n2)
9662 && ! integer_zerop (fd->loop.n2))
9663 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9664 else
9665 vinit = unshare_expr (vinit);
9666
9667 /* Initialize the iterator variable, so that threads that don't execute
9668 any iterations don't execute the lastprivate clauses by accident. */
9669 gimplify_assign (fd->loop.v, vinit, body_p);
9670 }
9671 }
9672
9673 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9674
9675 static tree
9676 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9677 struct walk_stmt_info *wi)
9678 {
9679 gimple *stmt = gsi_stmt (*gsi_p);
9680
9681 *handled_ops_p = true;
9682 switch (gimple_code (stmt))
9683 {
9684 WALK_SUBSTMTS;
9685
9686 case GIMPLE_OMP_FOR:
9687 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9688 && gimple_omp_for_combined_into_p (stmt))
9689 *handled_ops_p = false;
9690 break;
9691
9692 case GIMPLE_OMP_SCAN:
9693 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9694 return integer_zero_node;
9695 default:
9696 break;
9697 }
9698 return NULL;
9699 }
9700
9701 /* Helper function for lower_omp_for, add transformations for a worksharing
9702 loop with scan directives inside of it.
9703 For worksharing loop not combined with simd, transform:
9704 #pragma omp for reduction(inscan,+:r) private(i)
9705 for (i = 0; i < n; i = i + 1)
9706 {
9707 {
9708 update (r);
9709 }
9710 #pragma omp scan inclusive(r)
9711 {
9712 use (r);
9713 }
9714 }
9715
9716 into two worksharing loops + code to merge results:
9717
9718 num_threads = omp_get_num_threads ();
9719 thread_num = omp_get_thread_num ();
9720 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9721 <D.2099>:
9722 var2 = r;
9723 goto <D.2101>;
9724 <D.2100>:
9725 // For UDRs this is UDR init, or if ctors are needed, copy from
9726 // var3 that has been constructed to contain the neutral element.
9727 var2 = 0;
9728 <D.2101>:
9729 ivar = 0;
9730 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9731 // a shared array with num_threads elements and rprivb to a local array
9732 // number of elements equal to the number of (contiguous) iterations the
9733 // current thread will perform. controlb and controlp variables are
9734 // temporaries to handle deallocation of rprivb at the end of second
9735 // GOMP_FOR.
9736 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9737 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9738 for (i = 0; i < n; i = i + 1)
9739 {
9740 {
9741 // For UDRs this is UDR init or copy from var3.
9742 r = 0;
9743 // This is the input phase from user code.
9744 update (r);
9745 }
9746 {
9747 // For UDRs this is UDR merge.
9748 var2 = var2 + r;
9749 // Rather than handing it over to the user, save to local thread's
9750 // array.
9751 rprivb[ivar] = var2;
9752 // For exclusive scan, the above two statements are swapped.
9753 ivar = ivar + 1;
9754 }
9755 }
9756 // And remember the final value from this thread's into the shared
9757 // rpriva array.
9758 rpriva[(sizetype) thread_num] = var2;
9759 // If more than one thread, compute using Work-Efficient prefix sum
9760 // the inclusive parallel scan of the rpriva array.
9761 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9762 <D.2102>:
9763 GOMP_barrier ();
9764 down = 0;
9765 k = 1;
9766 num_threadsu = (unsigned int) num_threads;
9767 thread_numup1 = (unsigned int) thread_num + 1;
9768 <D.2108>:
9769 twok = k << 1;
9770 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9771 <D.2110>:
9772 down = 4294967295;
9773 k = k >> 1;
9774 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9775 <D.2112>:
9776 k = k >> 1;
9777 <D.2111>:
9778 twok = k << 1;
9779 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9780 mul = REALPART_EXPR <cplx>;
9781 ovf = IMAGPART_EXPR <cplx>;
9782 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9783 <D.2116>:
9784 andv = k & down;
9785 andvm1 = andv + 4294967295;
9786 l = mul + andvm1;
9787 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9788 <D.2120>:
9789 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9790 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9791 rpriva[l] = rpriva[l - k] + rpriva[l];
9792 <D.2117>:
9793 if (down == 0) goto <D.2121>; else goto <D.2122>;
9794 <D.2121>:
9795 k = k << 1;
9796 goto <D.2123>;
9797 <D.2122>:
9798 k = k >> 1;
9799 <D.2123>:
9800 GOMP_barrier ();
9801 if (k != 0) goto <D.2108>; else goto <D.2103>;
9802 <D.2103>:
9803 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9804 <D.2124>:
9805 // For UDRs this is UDR init or copy from var3.
9806 var2 = 0;
9807 goto <D.2126>;
9808 <D.2125>:
9809 var2 = rpriva[thread_num - 1];
9810 <D.2126>:
9811 ivar = 0;
9812 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9813 reduction(inscan,+:r) private(i)
9814 for (i = 0; i < n; i = i + 1)
9815 {
9816 {
9817 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9818 r = var2 + rprivb[ivar];
9819 }
9820 {
9821 // This is the scan phase from user code.
9822 use (r);
9823 // Plus a bump of the iterator.
9824 ivar = ivar + 1;
9825 }
9826 } */
9827
9828 static void
9829 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9830 struct omp_for_data *fd, omp_context *ctx)
9831 {
9832 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9833 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9834
9835 gimple_seq body = gimple_omp_body (stmt);
9836 gimple_stmt_iterator input1_gsi = gsi_none ();
9837 struct walk_stmt_info wi;
9838 memset (&wi, 0, sizeof (wi));
9839 wi.val_only = true;
9840 wi.info = (void *) &input1_gsi;
9841 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9842 gcc_assert (!gsi_end_p (input1_gsi));
9843
9844 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9845 gimple_stmt_iterator gsi = input1_gsi;
9846 gsi_next (&gsi);
9847 gimple_stmt_iterator scan1_gsi = gsi;
9848 gimple *scan_stmt1 = gsi_stmt (gsi);
9849 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9850
9851 gimple_seq input_body = gimple_omp_body (input_stmt1);
9852 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9853 gimple_omp_set_body (input_stmt1, NULL);
9854 gimple_omp_set_body (scan_stmt1, NULL);
9855 gimple_omp_set_body (stmt, NULL);
9856
9857 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9858 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9859 gimple_omp_set_body (stmt, body);
9860 gimple_omp_set_body (input_stmt1, input_body);
9861
9862 gimple_stmt_iterator input2_gsi = gsi_none ();
9863 memset (&wi, 0, sizeof (wi));
9864 wi.val_only = true;
9865 wi.info = (void *) &input2_gsi;
9866 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9867 gcc_assert (!gsi_end_p (input2_gsi));
9868
9869 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9870 gsi = input2_gsi;
9871 gsi_next (&gsi);
9872 gimple_stmt_iterator scan2_gsi = gsi;
9873 gimple *scan_stmt2 = gsi_stmt (gsi);
9874 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9875 gimple_omp_set_body (scan_stmt2, scan_body);
9876
9877 gimple_stmt_iterator input3_gsi = gsi_none ();
9878 gimple_stmt_iterator scan3_gsi = gsi_none ();
9879 gimple_stmt_iterator input4_gsi = gsi_none ();
9880 gimple_stmt_iterator scan4_gsi = gsi_none ();
9881 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9882 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9883 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9884 if (is_for_simd)
9885 {
9886 memset (&wi, 0, sizeof (wi));
9887 wi.val_only = true;
9888 wi.info = (void *) &input3_gsi;
9889 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9890 gcc_assert (!gsi_end_p (input3_gsi));
9891
9892 input_stmt3 = gsi_stmt (input3_gsi);
9893 gsi = input3_gsi;
9894 gsi_next (&gsi);
9895 scan3_gsi = gsi;
9896 scan_stmt3 = gsi_stmt (gsi);
9897 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9898
9899 memset (&wi, 0, sizeof (wi));
9900 wi.val_only = true;
9901 wi.info = (void *) &input4_gsi;
9902 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9903 gcc_assert (!gsi_end_p (input4_gsi));
9904
9905 input_stmt4 = gsi_stmt (input4_gsi);
9906 gsi = input4_gsi;
9907 gsi_next (&gsi);
9908 scan4_gsi = gsi;
9909 scan_stmt4 = gsi_stmt (gsi);
9910 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9911
9912 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9913 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9914 }
9915
9916 tree num_threads = create_tmp_var (integer_type_node);
9917 tree thread_num = create_tmp_var (integer_type_node);
9918 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9919 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9920 gimple *g = gimple_build_call (nthreads_decl, 0);
9921 gimple_call_set_lhs (g, num_threads);
9922 gimple_seq_add_stmt (body_p, g);
9923 g = gimple_build_call (threadnum_decl, 0);
9924 gimple_call_set_lhs (g, thread_num);
9925 gimple_seq_add_stmt (body_p, g);
9926
9927 tree ivar = create_tmp_var (sizetype);
9928 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9929 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9930 tree k = create_tmp_var (unsigned_type_node);
9931 tree l = create_tmp_var (unsigned_type_node);
9932
9933 gimple_seq clist = NULL, mdlist = NULL;
9934 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9935 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9936 gimple_seq scan1_list = NULL, input2_list = NULL;
9937 gimple_seq last_list = NULL, reduc_list = NULL;
9938 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9939 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9940 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9941 {
9942 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9943 tree var = OMP_CLAUSE_DECL (c);
9944 tree new_var = lookup_decl (var, ctx);
9945 tree var3 = NULL_TREE;
9946 tree new_vard = new_var;
9947 if (omp_is_reference (var))
9948 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9949 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9950 {
9951 var3 = maybe_lookup_decl (new_vard, ctx);
9952 if (var3 == new_vard)
9953 var3 = NULL_TREE;
9954 }
9955
9956 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9957 tree rpriva = create_tmp_var (ptype);
9958 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9959 OMP_CLAUSE_DECL (nc) = rpriva;
9960 *cp1 = nc;
9961 cp1 = &OMP_CLAUSE_CHAIN (nc);
9962
9963 tree rprivb = create_tmp_var (ptype);
9964 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9965 OMP_CLAUSE_DECL (nc) = rprivb;
9966 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9967 *cp1 = nc;
9968 cp1 = &OMP_CLAUSE_CHAIN (nc);
9969
9970 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9971 if (new_vard != new_var)
9972 TREE_ADDRESSABLE (var2) = 1;
9973 gimple_add_tmp_var (var2);
9974
9975 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9976 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9977 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9978 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9979 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9980
9981 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9982 thread_num, integer_minus_one_node);
9983 x = fold_convert_loc (clause_loc, sizetype, x);
9984 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9985 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9986 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9987 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9988
9989 x = fold_convert_loc (clause_loc, sizetype, l);
9990 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9991 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9992 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9993 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9994
9995 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9996 x = fold_convert_loc (clause_loc, sizetype, x);
9997 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9998 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9999 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10000 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10001
10002 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10003 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10004 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10005 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10006
10007 tree var4 = is_for_simd ? new_var : var2;
10008 tree var5 = NULL_TREE, var6 = NULL_TREE;
10009 if (is_for_simd)
10010 {
10011 var5 = lookup_decl (var, input_simd_ctx);
10012 var6 = lookup_decl (var, scan_simd_ctx);
10013 if (new_vard != new_var)
10014 {
10015 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10016 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10017 }
10018 }
10019 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10020 {
10021 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10022 tree val = var2;
10023
10024 x = lang_hooks.decls.omp_clause_default_ctor
10025 (c, var2, build_outer_var_ref (var, ctx));
10026 if (x)
10027 gimplify_and_add (x, &clist);
10028
10029 x = build_outer_var_ref (var, ctx);
10030 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10031 x);
10032 gimplify_and_add (x, &thr01_list);
10033
10034 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10035 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10036 if (var3)
10037 {
10038 x = unshare_expr (var4);
10039 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10040 gimplify_and_add (x, &thrn1_list);
10041 x = unshare_expr (var4);
10042 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10043 gimplify_and_add (x, &thr02_list);
10044 }
10045 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10046 {
10047 /* Otherwise, assign to it the identity element. */
10048 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10049 tseq = copy_gimple_seq_and_replace_locals (tseq);
10050 if (!is_for_simd)
10051 {
10052 if (new_vard != new_var)
10053 val = build_fold_addr_expr_loc (clause_loc, val);
10054 SET_DECL_VALUE_EXPR (new_vard, val);
10055 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10056 }
10057 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10058 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10059 lower_omp (&tseq, ctx);
10060 gimple_seq_add_seq (&thrn1_list, tseq);
10061 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10062 lower_omp (&tseq, ctx);
10063 gimple_seq_add_seq (&thr02_list, tseq);
10064 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10065 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10066 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10067 if (y)
10068 SET_DECL_VALUE_EXPR (new_vard, y);
10069 else
10070 {
10071 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10072 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10073 }
10074 }
10075
10076 x = unshare_expr (var4);
10077 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10078 gimplify_and_add (x, &thrn2_list);
10079
10080 if (is_for_simd)
10081 {
10082 x = unshare_expr (rprivb_ref);
10083 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10084 gimplify_and_add (x, &scan1_list);
10085 }
10086 else
10087 {
10088 if (ctx->scan_exclusive)
10089 {
10090 x = unshare_expr (rprivb_ref);
10091 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10092 gimplify_and_add (x, &scan1_list);
10093 }
10094
10095 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10096 tseq = copy_gimple_seq_and_replace_locals (tseq);
10097 SET_DECL_VALUE_EXPR (placeholder, var2);
10098 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10099 lower_omp (&tseq, ctx);
10100 gimple_seq_add_seq (&scan1_list, tseq);
10101
10102 if (ctx->scan_inclusive)
10103 {
10104 x = unshare_expr (rprivb_ref);
10105 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10106 gimplify_and_add (x, &scan1_list);
10107 }
10108 }
10109
10110 x = unshare_expr (rpriva_ref);
10111 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10112 unshare_expr (var4));
10113 gimplify_and_add (x, &mdlist);
10114
10115 x = unshare_expr (is_for_simd ? var6 : new_var);
10116 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10117 gimplify_and_add (x, &input2_list);
10118
10119 val = rprivb_ref;
10120 if (new_vard != new_var)
10121 val = build_fold_addr_expr_loc (clause_loc, val);
10122
10123 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10124 tseq = copy_gimple_seq_and_replace_locals (tseq);
10125 SET_DECL_VALUE_EXPR (new_vard, val);
10126 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10127 if (is_for_simd)
10128 {
10129 SET_DECL_VALUE_EXPR (placeholder, var6);
10130 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10131 }
10132 else
10133 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10134 lower_omp (&tseq, ctx);
10135 if (y)
10136 SET_DECL_VALUE_EXPR (new_vard, y);
10137 else
10138 {
10139 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10140 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10141 }
10142 if (!is_for_simd)
10143 {
10144 SET_DECL_VALUE_EXPR (placeholder, new_var);
10145 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10146 lower_omp (&tseq, ctx);
10147 }
10148 gimple_seq_add_seq (&input2_list, tseq);
10149
10150 x = build_outer_var_ref (var, ctx);
10151 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10152 gimplify_and_add (x, &last_list);
10153
10154 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10155 gimplify_and_add (x, &reduc_list);
10156 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10157 tseq = copy_gimple_seq_and_replace_locals (tseq);
10158 val = rprival_ref;
10159 if (new_vard != new_var)
10160 val = build_fold_addr_expr_loc (clause_loc, val);
10161 SET_DECL_VALUE_EXPR (new_vard, val);
10162 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10163 SET_DECL_VALUE_EXPR (placeholder, var2);
10164 lower_omp (&tseq, ctx);
10165 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10166 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10167 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10168 if (y)
10169 SET_DECL_VALUE_EXPR (new_vard, y);
10170 else
10171 {
10172 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10173 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10174 }
10175 gimple_seq_add_seq (&reduc_list, tseq);
10176 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10177 gimplify_and_add (x, &reduc_list);
10178
10179 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10180 if (x)
10181 gimplify_and_add (x, dlist);
10182 }
10183 else
10184 {
10185 x = build_outer_var_ref (var, ctx);
10186 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10187
10188 x = omp_reduction_init (c, TREE_TYPE (new_var));
10189 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10190 &thrn1_list);
10191 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10192
10193 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10194
10195 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10196 if (code == MINUS_EXPR)
10197 code = PLUS_EXPR;
10198
10199 if (is_for_simd)
10200 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10201 else
10202 {
10203 if (ctx->scan_exclusive)
10204 gimplify_assign (unshare_expr (rprivb_ref), var2,
10205 &scan1_list);
10206 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10207 gimplify_assign (var2, x, &scan1_list);
10208 if (ctx->scan_inclusive)
10209 gimplify_assign (unshare_expr (rprivb_ref), var2,
10210 &scan1_list);
10211 }
10212
10213 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10214 &mdlist);
10215
10216 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10217 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10218
10219 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10220 &last_list);
10221
10222 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10223 unshare_expr (rprival_ref));
10224 gimplify_assign (rprival_ref, x, &reduc_list);
10225 }
10226 }
10227
10228 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10229 gimple_seq_add_stmt (&scan1_list, g);
10230 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10231 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10232 ? scan_stmt4 : scan_stmt2), g);
10233
10234 tree controlb = create_tmp_var (boolean_type_node);
10235 tree controlp = create_tmp_var (ptr_type_node);
10236 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10237 OMP_CLAUSE_DECL (nc) = controlb;
10238 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10239 *cp1 = nc;
10240 cp1 = &OMP_CLAUSE_CHAIN (nc);
10241 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10242 OMP_CLAUSE_DECL (nc) = controlp;
10243 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10244 *cp1 = nc;
10245 cp1 = &OMP_CLAUSE_CHAIN (nc);
10246 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10247 OMP_CLAUSE_DECL (nc) = controlb;
10248 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10249 *cp2 = nc;
10250 cp2 = &OMP_CLAUSE_CHAIN (nc);
10251 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10252 OMP_CLAUSE_DECL (nc) = controlp;
10253 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10254 *cp2 = nc;
10255 cp2 = &OMP_CLAUSE_CHAIN (nc);
10256
10257 *cp1 = gimple_omp_for_clauses (stmt);
10258 gimple_omp_for_set_clauses (stmt, new_clauses1);
10259 *cp2 = gimple_omp_for_clauses (new_stmt);
10260 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10261
10262 if (is_for_simd)
10263 {
10264 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10265 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10266
10267 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10268 GSI_SAME_STMT);
10269 gsi_remove (&input3_gsi, true);
10270 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10271 GSI_SAME_STMT);
10272 gsi_remove (&scan3_gsi, true);
10273 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10274 GSI_SAME_STMT);
10275 gsi_remove (&input4_gsi, true);
10276 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10277 GSI_SAME_STMT);
10278 gsi_remove (&scan4_gsi, true);
10279 }
10280 else
10281 {
10282 gimple_omp_set_body (scan_stmt1, scan1_list);
10283 gimple_omp_set_body (input_stmt2, input2_list);
10284 }
10285
10286 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10287 GSI_SAME_STMT);
10288 gsi_remove (&input1_gsi, true);
10289 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10290 GSI_SAME_STMT);
10291 gsi_remove (&scan1_gsi, true);
10292 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10293 GSI_SAME_STMT);
10294 gsi_remove (&input2_gsi, true);
10295 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10296 GSI_SAME_STMT);
10297 gsi_remove (&scan2_gsi, true);
10298
10299 gimple_seq_add_seq (body_p, clist);
10300
10301 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10302 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10303 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10304 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10305 gimple_seq_add_stmt (body_p, g);
10306 g = gimple_build_label (lab1);
10307 gimple_seq_add_stmt (body_p, g);
10308 gimple_seq_add_seq (body_p, thr01_list);
10309 g = gimple_build_goto (lab3);
10310 gimple_seq_add_stmt (body_p, g);
10311 g = gimple_build_label (lab2);
10312 gimple_seq_add_stmt (body_p, g);
10313 gimple_seq_add_seq (body_p, thrn1_list);
10314 g = gimple_build_label (lab3);
10315 gimple_seq_add_stmt (body_p, g);
10316
10317 g = gimple_build_assign (ivar, size_zero_node);
10318 gimple_seq_add_stmt (body_p, g);
10319
10320 gimple_seq_add_stmt (body_p, stmt);
10321 gimple_seq_add_seq (body_p, body);
10322 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10323 fd->loop.v));
10324
10325 g = gimple_build_omp_return (true);
10326 gimple_seq_add_stmt (body_p, g);
10327 gimple_seq_add_seq (body_p, mdlist);
10328
10329 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10330 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10331 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10332 gimple_seq_add_stmt (body_p, g);
10333 g = gimple_build_label (lab1);
10334 gimple_seq_add_stmt (body_p, g);
10335
10336 g = omp_build_barrier (NULL);
10337 gimple_seq_add_stmt (body_p, g);
10338
10339 tree down = create_tmp_var (unsigned_type_node);
10340 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10341 gimple_seq_add_stmt (body_p, g);
10342
10343 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10344 gimple_seq_add_stmt (body_p, g);
10345
10346 tree num_threadsu = create_tmp_var (unsigned_type_node);
10347 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10348 gimple_seq_add_stmt (body_p, g);
10349
10350 tree thread_numu = create_tmp_var (unsigned_type_node);
10351 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10352 gimple_seq_add_stmt (body_p, g);
10353
10354 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10355 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10356 build_int_cst (unsigned_type_node, 1));
10357 gimple_seq_add_stmt (body_p, g);
10358
10359 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10360 g = gimple_build_label (lab3);
10361 gimple_seq_add_stmt (body_p, g);
10362
10363 tree twok = create_tmp_var (unsigned_type_node);
10364 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10365 gimple_seq_add_stmt (body_p, g);
10366
10367 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10368 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10369 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10370 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10371 gimple_seq_add_stmt (body_p, g);
10372 g = gimple_build_label (lab4);
10373 gimple_seq_add_stmt (body_p, g);
10374 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10375 gimple_seq_add_stmt (body_p, g);
10376 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10377 gimple_seq_add_stmt (body_p, g);
10378
10379 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10380 gimple_seq_add_stmt (body_p, g);
10381 g = gimple_build_label (lab6);
10382 gimple_seq_add_stmt (body_p, g);
10383
10384 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10385 gimple_seq_add_stmt (body_p, g);
10386
10387 g = gimple_build_label (lab5);
10388 gimple_seq_add_stmt (body_p, g);
10389
10390 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10391 gimple_seq_add_stmt (body_p, g);
10392
10393 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10394 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10395 gimple_call_set_lhs (g, cplx);
10396 gimple_seq_add_stmt (body_p, g);
10397 tree mul = create_tmp_var (unsigned_type_node);
10398 g = gimple_build_assign (mul, REALPART_EXPR,
10399 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10400 gimple_seq_add_stmt (body_p, g);
10401 tree ovf = create_tmp_var (unsigned_type_node);
10402 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10403 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10404 gimple_seq_add_stmt (body_p, g);
10405
10406 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10407 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10408 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10409 lab7, lab8);
10410 gimple_seq_add_stmt (body_p, g);
10411 g = gimple_build_label (lab7);
10412 gimple_seq_add_stmt (body_p, g);
10413
10414 tree andv = create_tmp_var (unsigned_type_node);
10415 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10416 gimple_seq_add_stmt (body_p, g);
10417 tree andvm1 = create_tmp_var (unsigned_type_node);
10418 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10419 build_minus_one_cst (unsigned_type_node));
10420 gimple_seq_add_stmt (body_p, g);
10421
10422 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10423 gimple_seq_add_stmt (body_p, g);
10424
10425 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10426 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10427 gimple_seq_add_stmt (body_p, g);
10428 g = gimple_build_label (lab9);
10429 gimple_seq_add_stmt (body_p, g);
10430 gimple_seq_add_seq (body_p, reduc_list);
10431 g = gimple_build_label (lab8);
10432 gimple_seq_add_stmt (body_p, g);
10433
10434 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10435 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10436 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10437 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10438 lab10, lab11);
10439 gimple_seq_add_stmt (body_p, g);
10440 g = gimple_build_label (lab10);
10441 gimple_seq_add_stmt (body_p, g);
10442 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10443 gimple_seq_add_stmt (body_p, g);
10444 g = gimple_build_goto (lab12);
10445 gimple_seq_add_stmt (body_p, g);
10446 g = gimple_build_label (lab11);
10447 gimple_seq_add_stmt (body_p, g);
10448 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10449 gimple_seq_add_stmt (body_p, g);
10450 g = gimple_build_label (lab12);
10451 gimple_seq_add_stmt (body_p, g);
10452
10453 g = omp_build_barrier (NULL);
10454 gimple_seq_add_stmt (body_p, g);
10455
10456 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10457 lab3, lab2);
10458 gimple_seq_add_stmt (body_p, g);
10459
10460 g = gimple_build_label (lab2);
10461 gimple_seq_add_stmt (body_p, g);
10462
10463 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10464 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10465 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10466 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10467 gimple_seq_add_stmt (body_p, g);
10468 g = gimple_build_label (lab1);
10469 gimple_seq_add_stmt (body_p, g);
10470 gimple_seq_add_seq (body_p, thr02_list);
10471 g = gimple_build_goto (lab3);
10472 gimple_seq_add_stmt (body_p, g);
10473 g = gimple_build_label (lab2);
10474 gimple_seq_add_stmt (body_p, g);
10475 gimple_seq_add_seq (body_p, thrn2_list);
10476 g = gimple_build_label (lab3);
10477 gimple_seq_add_stmt (body_p, g);
10478
10479 g = gimple_build_assign (ivar, size_zero_node);
10480 gimple_seq_add_stmt (body_p, g);
10481 gimple_seq_add_stmt (body_p, new_stmt);
10482 gimple_seq_add_seq (body_p, new_body);
10483
10484 gimple_seq new_dlist = NULL;
10485 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10486 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10487 tree num_threadsm1 = create_tmp_var (integer_type_node);
10488 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10489 integer_minus_one_node);
10490 gimple_seq_add_stmt (&new_dlist, g);
10491 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10492 gimple_seq_add_stmt (&new_dlist, g);
10493 g = gimple_build_label (lab1);
10494 gimple_seq_add_stmt (&new_dlist, g);
10495 gimple_seq_add_seq (&new_dlist, last_list);
10496 g = gimple_build_label (lab2);
10497 gimple_seq_add_stmt (&new_dlist, g);
10498 gimple_seq_add_seq (&new_dlist, *dlist);
10499 *dlist = new_dlist;
10500 }
10501
10502 /* Lower code for an OMP loop directive. */
10503
10504 static void
10505 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10506 {
10507 tree *rhs_p, block;
10508 struct omp_for_data fd, *fdp = NULL;
10509 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10510 gbind *new_stmt;
10511 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10512 gimple_seq cnt_list = NULL, clist = NULL;
10513 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10514 size_t i;
10515
10516 push_gimplify_context ();
10517
10518 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10519
10520 block = make_node (BLOCK);
10521 new_stmt = gimple_build_bind (NULL, NULL, block);
10522 /* Replace at gsi right away, so that 'stmt' is no member
10523 of a sequence anymore as we're going to add to a different
10524 one below. */
10525 gsi_replace (gsi_p, new_stmt, true);
10526
10527 /* Move declaration of temporaries in the loop body before we make
10528 it go away. */
10529 omp_for_body = gimple_omp_body (stmt);
10530 if (!gimple_seq_empty_p (omp_for_body)
10531 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10532 {
10533 gbind *inner_bind
10534 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10535 tree vars = gimple_bind_vars (inner_bind);
10536 gimple_bind_append_vars (new_stmt, vars);
10537 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10538 keep them on the inner_bind and it's block. */
10539 gimple_bind_set_vars (inner_bind, NULL_TREE);
10540 if (gimple_bind_block (inner_bind))
10541 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10542 }
10543
10544 if (gimple_omp_for_combined_into_p (stmt))
10545 {
10546 omp_extract_for_data (stmt, &fd, NULL);
10547 fdp = &fd;
10548
10549 /* We need two temporaries with fd.loop.v type (istart/iend)
10550 and then (fd.collapse - 1) temporaries with the same
10551 type for count2 ... countN-1 vars if not constant. */
10552 size_t count = 2;
10553 tree type = fd.iter_type;
10554 if (fd.collapse > 1
10555 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10556 count += fd.collapse - 1;
10557 size_t count2 = 0;
10558 tree type2 = NULL_TREE;
10559 bool taskreg_for
10560 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10561 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10562 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10563 tree simtc = NULL;
10564 tree clauses = *pc;
10565 if (fd.collapse > 1
10566 && fd.non_rect
10567 && fd.last_nonrect == fd.first_nonrect + 1
10568 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10569 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
10570 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10571 {
10572 v = gimple_omp_for_index (stmt, fd.first_nonrect);
10573 type2 = TREE_TYPE (v);
10574 count++;
10575 count2 = 3;
10576 }
10577 if (taskreg_for)
10578 outerc
10579 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10580 OMP_CLAUSE__LOOPTEMP_);
10581 if (ctx->simt_stmt)
10582 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10583 OMP_CLAUSE__LOOPTEMP_);
10584 for (i = 0; i < count + count2; i++)
10585 {
10586 tree temp;
10587 if (taskreg_for)
10588 {
10589 gcc_assert (outerc);
10590 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10591 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10592 OMP_CLAUSE__LOOPTEMP_);
10593 }
10594 else
10595 {
10596 /* If there are 2 adjacent SIMD stmts, one with _simt_
10597 clause, another without, make sure they have the same
10598 decls in _looptemp_ clauses, because the outer stmt
10599 they are combined into will look up just one inner_stmt. */
10600 if (ctx->simt_stmt)
10601 temp = OMP_CLAUSE_DECL (simtc);
10602 else
10603 temp = create_tmp_var (i >= count ? type2 : type);
10604 insert_decl_map (&ctx->outer->cb, temp, temp);
10605 }
10606 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10607 OMP_CLAUSE_DECL (*pc) = temp;
10608 pc = &OMP_CLAUSE_CHAIN (*pc);
10609 if (ctx->simt_stmt)
10610 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10611 OMP_CLAUSE__LOOPTEMP_);
10612 }
10613 *pc = clauses;
10614 }
10615
10616 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10617 dlist = NULL;
10618 body = NULL;
10619 tree rclauses
10620 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10621 OMP_CLAUSE_REDUCTION);
10622 tree rtmp = NULL_TREE;
10623 if (rclauses)
10624 {
10625 tree type = build_pointer_type (pointer_sized_int_node);
10626 tree temp = create_tmp_var (type);
10627 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10628 OMP_CLAUSE_DECL (c) = temp;
10629 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10630 gimple_omp_for_set_clauses (stmt, c);
10631 lower_omp_task_reductions (ctx, OMP_FOR,
10632 gimple_omp_for_clauses (stmt),
10633 &tred_ilist, &tred_dlist);
10634 rclauses = c;
10635 rtmp = make_ssa_name (type);
10636 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10637 }
10638
10639 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10640 ctx);
10641
10642 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10643 fdp);
10644 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10645 gimple_omp_for_pre_body (stmt));
10646
10647 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10648
10649 /* Lower the header expressions. At this point, we can assume that
10650 the header is of the form:
10651
10652 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10653
10654 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10655 using the .omp_data_s mapping, if needed. */
10656 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10657 {
10658 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10659 if (TREE_CODE (*rhs_p) == TREE_VEC)
10660 {
10661 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10662 TREE_VEC_ELT (*rhs_p, 1)
10663 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10664 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10665 TREE_VEC_ELT (*rhs_p, 2)
10666 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10667 }
10668 else if (!is_gimple_min_invariant (*rhs_p))
10669 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10670 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10671 recompute_tree_invariant_for_addr_expr (*rhs_p);
10672
10673 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10674 if (TREE_CODE (*rhs_p) == TREE_VEC)
10675 {
10676 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10677 TREE_VEC_ELT (*rhs_p, 1)
10678 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10679 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10680 TREE_VEC_ELT (*rhs_p, 2)
10681 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10682 }
10683 else if (!is_gimple_min_invariant (*rhs_p))
10684 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10685 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10686 recompute_tree_invariant_for_addr_expr (*rhs_p);
10687
10688 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10689 if (!is_gimple_min_invariant (*rhs_p))
10690 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10691 }
10692 if (rclauses)
10693 gimple_seq_add_seq (&tred_ilist, cnt_list);
10694 else
10695 gimple_seq_add_seq (&body, cnt_list);
10696
10697 /* Once lowered, extract the bounds and clauses. */
10698 omp_extract_for_data (stmt, &fd, NULL);
10699
10700 if (is_gimple_omp_oacc (ctx->stmt)
10701 && !ctx_in_oacc_kernels_region (ctx))
10702 lower_oacc_head_tail (gimple_location (stmt),
10703 gimple_omp_for_clauses (stmt),
10704 &oacc_head, &oacc_tail, ctx);
10705
10706 /* Add OpenACC partitioning and reduction markers just before the loop. */
10707 if (oacc_head)
10708 gimple_seq_add_seq (&body, oacc_head);
10709
10710 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10711
10712 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10713 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10714 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10715 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10716 {
10717 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10718 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10719 OMP_CLAUSE_LINEAR_STEP (c)
10720 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10721 ctx);
10722 }
10723
10724 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10725 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10726 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10727 else
10728 {
10729 gimple_seq_add_stmt (&body, stmt);
10730 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10731 }
10732
10733 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10734 fd.loop.v));
10735
10736 /* After the loop, add exit clauses. */
10737 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10738
10739 if (clist)
10740 {
10741 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10742 gcall *g = gimple_build_call (fndecl, 0);
10743 gimple_seq_add_stmt (&body, g);
10744 gimple_seq_add_seq (&body, clist);
10745 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10746 g = gimple_build_call (fndecl, 0);
10747 gimple_seq_add_stmt (&body, g);
10748 }
10749
10750 if (ctx->cancellable)
10751 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10752
10753 gimple_seq_add_seq (&body, dlist);
10754
10755 if (rclauses)
10756 {
10757 gimple_seq_add_seq (&tred_ilist, body);
10758 body = tred_ilist;
10759 }
10760
10761 body = maybe_catch_exception (body);
10762
10763 /* Region exit marker goes at the end of the loop body. */
10764 gimple *g = gimple_build_omp_return (fd.have_nowait);
10765 gimple_seq_add_stmt (&body, g);
10766
10767 gimple_seq_add_seq (&body, tred_dlist);
10768
10769 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10770
10771 if (rclauses)
10772 OMP_CLAUSE_DECL (rclauses) = rtmp;
10773
10774 /* Add OpenACC joining and reduction markers just after the loop. */
10775 if (oacc_tail)
10776 gimple_seq_add_seq (&body, oacc_tail);
10777
10778 pop_gimplify_context (new_stmt);
10779
10780 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10781 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10782 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10783 if (BLOCK_VARS (block))
10784 TREE_USED (block) = 1;
10785
10786 gimple_bind_set_body (new_stmt, body);
10787 gimple_omp_set_body (stmt, NULL);
10788 gimple_omp_for_set_pre_body (stmt, NULL);
10789 }
10790
10791 /* Callback for walk_stmts. Check if the current statement only contains
10792 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10793
10794 static tree
10795 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10796 bool *handled_ops_p,
10797 struct walk_stmt_info *wi)
10798 {
10799 int *info = (int *) wi->info;
10800 gimple *stmt = gsi_stmt (*gsi_p);
10801
10802 *handled_ops_p = true;
10803 switch (gimple_code (stmt))
10804 {
10805 WALK_SUBSTMTS;
10806
10807 case GIMPLE_DEBUG:
10808 break;
10809 case GIMPLE_OMP_FOR:
10810 case GIMPLE_OMP_SECTIONS:
10811 *info = *info == 0 ? 1 : -1;
10812 break;
10813 default:
10814 *info = -1;
10815 break;
10816 }
10817 return NULL;
10818 }
10819
10820 struct omp_taskcopy_context
10821 {
10822 /* This field must be at the beginning, as we do "inheritance": Some
10823 callback functions for tree-inline.c (e.g., omp_copy_decl)
10824 receive a copy_body_data pointer that is up-casted to an
10825 omp_context pointer. */
10826 copy_body_data cb;
10827 omp_context *ctx;
10828 };
10829
10830 static tree
10831 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10832 {
10833 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10834
10835 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10836 return create_tmp_var (TREE_TYPE (var));
10837
10838 return var;
10839 }
10840
10841 static tree
10842 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10843 {
10844 tree name, new_fields = NULL, type, f;
10845
10846 type = lang_hooks.types.make_type (RECORD_TYPE);
10847 name = DECL_NAME (TYPE_NAME (orig_type));
10848 name = build_decl (gimple_location (tcctx->ctx->stmt),
10849 TYPE_DECL, name, type);
10850 TYPE_NAME (type) = name;
10851
10852 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10853 {
10854 tree new_f = copy_node (f);
10855 DECL_CONTEXT (new_f) = type;
10856 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10857 TREE_CHAIN (new_f) = new_fields;
10858 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10859 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10860 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10861 &tcctx->cb, NULL);
10862 new_fields = new_f;
10863 tcctx->cb.decl_map->put (f, new_f);
10864 }
10865 TYPE_FIELDS (type) = nreverse (new_fields);
10866 layout_type (type);
10867 return type;
10868 }
10869
10870 /* Create task copyfn. */
10871
10872 static void
10873 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10874 {
10875 struct function *child_cfun;
10876 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10877 tree record_type, srecord_type, bind, list;
10878 bool record_needs_remap = false, srecord_needs_remap = false;
10879 splay_tree_node n;
10880 struct omp_taskcopy_context tcctx;
10881 location_t loc = gimple_location (task_stmt);
10882 size_t looptempno = 0;
10883
10884 child_fn = gimple_omp_task_copy_fn (task_stmt);
10885 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10886 gcc_assert (child_cfun->cfg == NULL);
10887 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10888
10889 /* Reset DECL_CONTEXT on function arguments. */
10890 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10891 DECL_CONTEXT (t) = child_fn;
10892
10893 /* Populate the function. */
10894 push_gimplify_context ();
10895 push_cfun (child_cfun);
10896
10897 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10898 TREE_SIDE_EFFECTS (bind) = 1;
10899 list = NULL;
10900 DECL_SAVED_TREE (child_fn) = bind;
10901 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10902
10903 /* Remap src and dst argument types if needed. */
10904 record_type = ctx->record_type;
10905 srecord_type = ctx->srecord_type;
10906 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10907 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10908 {
10909 record_needs_remap = true;
10910 break;
10911 }
10912 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10913 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10914 {
10915 srecord_needs_remap = true;
10916 break;
10917 }
10918
10919 if (record_needs_remap || srecord_needs_remap)
10920 {
10921 memset (&tcctx, '\0', sizeof (tcctx));
10922 tcctx.cb.src_fn = ctx->cb.src_fn;
10923 tcctx.cb.dst_fn = child_fn;
10924 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10925 gcc_checking_assert (tcctx.cb.src_node);
10926 tcctx.cb.dst_node = tcctx.cb.src_node;
10927 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10928 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10929 tcctx.cb.eh_lp_nr = 0;
10930 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10931 tcctx.cb.decl_map = new hash_map<tree, tree>;
10932 tcctx.ctx = ctx;
10933
10934 if (record_needs_remap)
10935 record_type = task_copyfn_remap_type (&tcctx, record_type);
10936 if (srecord_needs_remap)
10937 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10938 }
10939 else
10940 tcctx.cb.decl_map = NULL;
10941
10942 arg = DECL_ARGUMENTS (child_fn);
10943 TREE_TYPE (arg) = build_pointer_type (record_type);
10944 sarg = DECL_CHAIN (arg);
10945 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10946
10947 /* First pass: initialize temporaries used in record_type and srecord_type
10948 sizes and field offsets. */
10949 if (tcctx.cb.decl_map)
10950 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10952 {
10953 tree *p;
10954
10955 decl = OMP_CLAUSE_DECL (c);
10956 p = tcctx.cb.decl_map->get (decl);
10957 if (p == NULL)
10958 continue;
10959 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10960 sf = (tree) n->value;
10961 sf = *tcctx.cb.decl_map->get (sf);
10962 src = build_simple_mem_ref_loc (loc, sarg);
10963 src = omp_build_component_ref (src, sf);
10964 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10965 append_to_statement_list (t, &list);
10966 }
10967
10968 /* Second pass: copy shared var pointers and copy construct non-VLA
10969 firstprivate vars. */
10970 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10971 switch (OMP_CLAUSE_CODE (c))
10972 {
10973 splay_tree_key key;
10974 case OMP_CLAUSE_SHARED:
10975 decl = OMP_CLAUSE_DECL (c);
10976 key = (splay_tree_key) decl;
10977 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10978 key = (splay_tree_key) &DECL_UID (decl);
10979 n = splay_tree_lookup (ctx->field_map, key);
10980 if (n == NULL)
10981 break;
10982 f = (tree) n->value;
10983 if (tcctx.cb.decl_map)
10984 f = *tcctx.cb.decl_map->get (f);
10985 n = splay_tree_lookup (ctx->sfield_map, key);
10986 sf = (tree) n->value;
10987 if (tcctx.cb.decl_map)
10988 sf = *tcctx.cb.decl_map->get (sf);
10989 src = build_simple_mem_ref_loc (loc, sarg);
10990 src = omp_build_component_ref (src, sf);
10991 dst = build_simple_mem_ref_loc (loc, arg);
10992 dst = omp_build_component_ref (dst, f);
10993 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10994 append_to_statement_list (t, &list);
10995 break;
10996 case OMP_CLAUSE_REDUCTION:
10997 case OMP_CLAUSE_IN_REDUCTION:
10998 decl = OMP_CLAUSE_DECL (c);
10999 if (TREE_CODE (decl) == MEM_REF)
11000 {
11001 decl = TREE_OPERAND (decl, 0);
11002 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11003 decl = TREE_OPERAND (decl, 0);
11004 if (TREE_CODE (decl) == INDIRECT_REF
11005 || TREE_CODE (decl) == ADDR_EXPR)
11006 decl = TREE_OPERAND (decl, 0);
11007 }
11008 key = (splay_tree_key) decl;
11009 n = splay_tree_lookup (ctx->field_map, key);
11010 if (n == NULL)
11011 break;
11012 f = (tree) n->value;
11013 if (tcctx.cb.decl_map)
11014 f = *tcctx.cb.decl_map->get (f);
11015 n = splay_tree_lookup (ctx->sfield_map, key);
11016 sf = (tree) n->value;
11017 if (tcctx.cb.decl_map)
11018 sf = *tcctx.cb.decl_map->get (sf);
11019 src = build_simple_mem_ref_loc (loc, sarg);
11020 src = omp_build_component_ref (src, sf);
11021 if (decl != OMP_CLAUSE_DECL (c)
11022 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11023 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11024 src = build_simple_mem_ref_loc (loc, src);
11025 dst = build_simple_mem_ref_loc (loc, arg);
11026 dst = omp_build_component_ref (dst, f);
11027 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11028 append_to_statement_list (t, &list);
11029 break;
11030 case OMP_CLAUSE__LOOPTEMP_:
11031 /* Fields for first two _looptemp_ clauses are initialized by
11032 GOMP_taskloop*, the rest are handled like firstprivate. */
11033 if (looptempno < 2)
11034 {
11035 looptempno++;
11036 break;
11037 }
11038 /* FALLTHRU */
11039 case OMP_CLAUSE__REDUCTEMP_:
11040 case OMP_CLAUSE_FIRSTPRIVATE:
11041 decl = OMP_CLAUSE_DECL (c);
11042 if (is_variable_sized (decl))
11043 break;
11044 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11045 if (n == NULL)
11046 break;
11047 f = (tree) n->value;
11048 if (tcctx.cb.decl_map)
11049 f = *tcctx.cb.decl_map->get (f);
11050 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11051 if (n != NULL)
11052 {
11053 sf = (tree) n->value;
11054 if (tcctx.cb.decl_map)
11055 sf = *tcctx.cb.decl_map->get (sf);
11056 src = build_simple_mem_ref_loc (loc, sarg);
11057 src = omp_build_component_ref (src, sf);
11058 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11059 src = build_simple_mem_ref_loc (loc, src);
11060 }
11061 else
11062 src = decl;
11063 dst = build_simple_mem_ref_loc (loc, arg);
11064 dst = omp_build_component_ref (dst, f);
11065 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11066 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11067 else
11068 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11069 append_to_statement_list (t, &list);
11070 break;
11071 case OMP_CLAUSE_PRIVATE:
11072 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11073 break;
11074 decl = OMP_CLAUSE_DECL (c);
11075 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11076 f = (tree) n->value;
11077 if (tcctx.cb.decl_map)
11078 f = *tcctx.cb.decl_map->get (f);
11079 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11080 if (n != NULL)
11081 {
11082 sf = (tree) n->value;
11083 if (tcctx.cb.decl_map)
11084 sf = *tcctx.cb.decl_map->get (sf);
11085 src = build_simple_mem_ref_loc (loc, sarg);
11086 src = omp_build_component_ref (src, sf);
11087 if (use_pointer_for_field (decl, NULL))
11088 src = build_simple_mem_ref_loc (loc, src);
11089 }
11090 else
11091 src = decl;
11092 dst = build_simple_mem_ref_loc (loc, arg);
11093 dst = omp_build_component_ref (dst, f);
11094 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11095 append_to_statement_list (t, &list);
11096 break;
11097 default:
11098 break;
11099 }
11100
11101 /* Last pass: handle VLA firstprivates. */
11102 if (tcctx.cb.decl_map)
11103 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11104 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11105 {
11106 tree ind, ptr, df;
11107
11108 decl = OMP_CLAUSE_DECL (c);
11109 if (!is_variable_sized (decl))
11110 continue;
11111 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11112 if (n == NULL)
11113 continue;
11114 f = (tree) n->value;
11115 f = *tcctx.cb.decl_map->get (f);
11116 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11117 ind = DECL_VALUE_EXPR (decl);
11118 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11119 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11120 n = splay_tree_lookup (ctx->sfield_map,
11121 (splay_tree_key) TREE_OPERAND (ind, 0));
11122 sf = (tree) n->value;
11123 sf = *tcctx.cb.decl_map->get (sf);
11124 src = build_simple_mem_ref_loc (loc, sarg);
11125 src = omp_build_component_ref (src, sf);
11126 src = build_simple_mem_ref_loc (loc, src);
11127 dst = build_simple_mem_ref_loc (loc, arg);
11128 dst = omp_build_component_ref (dst, f);
11129 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11130 append_to_statement_list (t, &list);
11131 n = splay_tree_lookup (ctx->field_map,
11132 (splay_tree_key) TREE_OPERAND (ind, 0));
11133 df = (tree) n->value;
11134 df = *tcctx.cb.decl_map->get (df);
11135 ptr = build_simple_mem_ref_loc (loc, arg);
11136 ptr = omp_build_component_ref (ptr, df);
11137 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11138 build_fold_addr_expr_loc (loc, dst));
11139 append_to_statement_list (t, &list);
11140 }
11141
11142 t = build1 (RETURN_EXPR, void_type_node, NULL);
11143 append_to_statement_list (t, &list);
11144
11145 if (tcctx.cb.decl_map)
11146 delete tcctx.cb.decl_map;
11147 pop_gimplify_context (NULL);
11148 BIND_EXPR_BODY (bind) = list;
11149 pop_cfun ();
11150 }
11151
11152 static void
11153 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11154 {
11155 tree c, clauses;
11156 gimple *g;
11157 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11158
11159 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11160 gcc_assert (clauses);
11161 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11162 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11163 switch (OMP_CLAUSE_DEPEND_KIND (c))
11164 {
11165 case OMP_CLAUSE_DEPEND_LAST:
11166 /* Lowering already done at gimplification. */
11167 return;
11168 case OMP_CLAUSE_DEPEND_IN:
11169 cnt[2]++;
11170 break;
11171 case OMP_CLAUSE_DEPEND_OUT:
11172 case OMP_CLAUSE_DEPEND_INOUT:
11173 cnt[0]++;
11174 break;
11175 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11176 cnt[1]++;
11177 break;
11178 case OMP_CLAUSE_DEPEND_DEPOBJ:
11179 cnt[3]++;
11180 break;
11181 case OMP_CLAUSE_DEPEND_SOURCE:
11182 case OMP_CLAUSE_DEPEND_SINK:
11183 /* FALLTHRU */
11184 default:
11185 gcc_unreachable ();
11186 }
11187 if (cnt[1] || cnt[3])
11188 idx = 5;
11189 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11190 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11191 tree array = create_tmp_var (type);
11192 TREE_ADDRESSABLE (array) = 1;
11193 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11194 NULL_TREE);
11195 if (idx == 5)
11196 {
11197 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11198 gimple_seq_add_stmt (iseq, g);
11199 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11200 NULL_TREE);
11201 }
11202 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11203 gimple_seq_add_stmt (iseq, g);
11204 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11205 {
11206 r = build4 (ARRAY_REF, ptr_type_node, array,
11207 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11208 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11209 gimple_seq_add_stmt (iseq, g);
11210 }
11211 for (i = 0; i < 4; i++)
11212 {
11213 if (cnt[i] == 0)
11214 continue;
11215 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11216 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11217 continue;
11218 else
11219 {
11220 switch (OMP_CLAUSE_DEPEND_KIND (c))
11221 {
11222 case OMP_CLAUSE_DEPEND_IN:
11223 if (i != 2)
11224 continue;
11225 break;
11226 case OMP_CLAUSE_DEPEND_OUT:
11227 case OMP_CLAUSE_DEPEND_INOUT:
11228 if (i != 0)
11229 continue;
11230 break;
11231 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11232 if (i != 1)
11233 continue;
11234 break;
11235 case OMP_CLAUSE_DEPEND_DEPOBJ:
11236 if (i != 3)
11237 continue;
11238 break;
11239 default:
11240 gcc_unreachable ();
11241 }
11242 tree t = OMP_CLAUSE_DECL (c);
11243 t = fold_convert (ptr_type_node, t);
11244 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11245 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11246 NULL_TREE, NULL_TREE);
11247 g = gimple_build_assign (r, t);
11248 gimple_seq_add_stmt (iseq, g);
11249 }
11250 }
11251 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11252 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11253 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11254 OMP_CLAUSE_CHAIN (c) = *pclauses;
11255 *pclauses = c;
11256 tree clobber = build_clobber (type);
11257 g = gimple_build_assign (array, clobber);
11258 gimple_seq_add_stmt (oseq, g);
11259 }
11260
11261 /* Lower the OpenMP parallel or task directive in the current statement
11262 in GSI_P. CTX holds context information for the directive. */
11263
11264 static void
11265 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11266 {
11267 tree clauses;
11268 tree child_fn, t;
11269 gimple *stmt = gsi_stmt (*gsi_p);
11270 gbind *par_bind, *bind, *dep_bind = NULL;
11271 gimple_seq par_body;
11272 location_t loc = gimple_location (stmt);
11273
11274 clauses = gimple_omp_taskreg_clauses (stmt);
11275 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11276 && gimple_omp_task_taskwait_p (stmt))
11277 {
11278 par_bind = NULL;
11279 par_body = NULL;
11280 }
11281 else
11282 {
11283 par_bind
11284 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11285 par_body = gimple_bind_body (par_bind);
11286 }
11287 child_fn = ctx->cb.dst_fn;
11288 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11289 && !gimple_omp_parallel_combined_p (stmt))
11290 {
11291 struct walk_stmt_info wi;
11292 int ws_num = 0;
11293
11294 memset (&wi, 0, sizeof (wi));
11295 wi.info = &ws_num;
11296 wi.val_only = true;
11297 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11298 if (ws_num == 1)
11299 gimple_omp_parallel_set_combined_p (stmt, true);
11300 }
11301 gimple_seq dep_ilist = NULL;
11302 gimple_seq dep_olist = NULL;
11303 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11304 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11305 {
11306 push_gimplify_context ();
11307 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11308 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11309 &dep_ilist, &dep_olist);
11310 }
11311
11312 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11313 && gimple_omp_task_taskwait_p (stmt))
11314 {
11315 if (dep_bind)
11316 {
11317 gsi_replace (gsi_p, dep_bind, true);
11318 gimple_bind_add_seq (dep_bind, dep_ilist);
11319 gimple_bind_add_stmt (dep_bind, stmt);
11320 gimple_bind_add_seq (dep_bind, dep_olist);
11321 pop_gimplify_context (dep_bind);
11322 }
11323 return;
11324 }
11325
11326 if (ctx->srecord_type)
11327 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11328
11329 gimple_seq tskred_ilist = NULL;
11330 gimple_seq tskred_olist = NULL;
11331 if ((is_task_ctx (ctx)
11332 && gimple_omp_task_taskloop_p (ctx->stmt)
11333 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11334 OMP_CLAUSE_REDUCTION))
11335 || (is_parallel_ctx (ctx)
11336 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11337 OMP_CLAUSE__REDUCTEMP_)))
11338 {
11339 if (dep_bind == NULL)
11340 {
11341 push_gimplify_context ();
11342 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11343 }
11344 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11345 : OMP_PARALLEL,
11346 gimple_omp_taskreg_clauses (ctx->stmt),
11347 &tskred_ilist, &tskred_olist);
11348 }
11349
11350 push_gimplify_context ();
11351
11352 gimple_seq par_olist = NULL;
11353 gimple_seq par_ilist = NULL;
11354 gimple_seq par_rlist = NULL;
11355 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11356 lower_omp (&par_body, ctx);
11357 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11358 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11359
11360 /* Declare all the variables created by mapping and the variables
11361 declared in the scope of the parallel body. */
11362 record_vars_into (ctx->block_vars, child_fn);
11363 maybe_remove_omp_member_access_dummy_vars (par_bind);
11364 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11365
11366 if (ctx->record_type)
11367 {
11368 ctx->sender_decl
11369 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11370 : ctx->record_type, ".omp_data_o");
11371 DECL_NAMELESS (ctx->sender_decl) = 1;
11372 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11373 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11374 }
11375
11376 gimple_seq olist = NULL;
11377 gimple_seq ilist = NULL;
11378 lower_send_clauses (clauses, &ilist, &olist, ctx);
11379 lower_send_shared_vars (&ilist, &olist, ctx);
11380
11381 if (ctx->record_type)
11382 {
11383 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11384 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11385 clobber));
11386 }
11387
11388 /* Once all the expansions are done, sequence all the different
11389 fragments inside gimple_omp_body. */
11390
11391 gimple_seq new_body = NULL;
11392
11393 if (ctx->record_type)
11394 {
11395 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11396 /* fixup_child_record_type might have changed receiver_decl's type. */
11397 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11398 gimple_seq_add_stmt (&new_body,
11399 gimple_build_assign (ctx->receiver_decl, t));
11400 }
11401
11402 gimple_seq_add_seq (&new_body, par_ilist);
11403 gimple_seq_add_seq (&new_body, par_body);
11404 gimple_seq_add_seq (&new_body, par_rlist);
11405 if (ctx->cancellable)
11406 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11407 gimple_seq_add_seq (&new_body, par_olist);
11408 new_body = maybe_catch_exception (new_body);
11409 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11410 gimple_seq_add_stmt (&new_body,
11411 gimple_build_omp_continue (integer_zero_node,
11412 integer_zero_node));
11413 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11414 gimple_omp_set_body (stmt, new_body);
11415
11416 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11417 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11418 else
11419 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11420 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11421 gimple_bind_add_seq (bind, ilist);
11422 gimple_bind_add_stmt (bind, stmt);
11423 gimple_bind_add_seq (bind, olist);
11424
11425 pop_gimplify_context (NULL);
11426
11427 if (dep_bind)
11428 {
11429 gimple_bind_add_seq (dep_bind, dep_ilist);
11430 gimple_bind_add_seq (dep_bind, tskred_ilist);
11431 gimple_bind_add_stmt (dep_bind, bind);
11432 gimple_bind_add_seq (dep_bind, tskred_olist);
11433 gimple_bind_add_seq (dep_bind, dep_olist);
11434 pop_gimplify_context (dep_bind);
11435 }
11436 }
11437
11438 /* Lower the GIMPLE_OMP_TARGET in the current statement
11439 in GSI_P. CTX holds context information for the directive. */
11440
11441 static void
11442 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11443 {
11444 tree clauses;
11445 tree child_fn, t, c;
11446 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11447 gbind *tgt_bind, *bind, *dep_bind = NULL;
11448 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11449 location_t loc = gimple_location (stmt);
11450 bool offloaded, data_region;
11451 unsigned int map_cnt = 0;
11452
11453 offloaded = is_gimple_omp_offloaded (stmt);
11454 switch (gimple_omp_target_kind (stmt))
11455 {
11456 case GF_OMP_TARGET_KIND_REGION:
11457 case GF_OMP_TARGET_KIND_UPDATE:
11458 case GF_OMP_TARGET_KIND_ENTER_DATA:
11459 case GF_OMP_TARGET_KIND_EXIT_DATA:
11460 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11461 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11462 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11463 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11464 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11465 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11466 data_region = false;
11467 break;
11468 case GF_OMP_TARGET_KIND_DATA:
11469 case GF_OMP_TARGET_KIND_OACC_DATA:
11470 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11471 data_region = true;
11472 break;
11473 default:
11474 gcc_unreachable ();
11475 }
11476
11477 clauses = gimple_omp_target_clauses (stmt);
11478
11479 gimple_seq dep_ilist = NULL;
11480 gimple_seq dep_olist = NULL;
11481 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11482 {
11483 push_gimplify_context ();
11484 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11485 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11486 &dep_ilist, &dep_olist);
11487 }
11488
11489 tgt_bind = NULL;
11490 tgt_body = NULL;
11491 if (offloaded)
11492 {
11493 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11494 tgt_body = gimple_bind_body (tgt_bind);
11495 }
11496 else if (data_region)
11497 tgt_body = gimple_omp_body (stmt);
11498 child_fn = ctx->cb.dst_fn;
11499
11500 push_gimplify_context ();
11501 fplist = NULL;
11502
11503 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11504 switch (OMP_CLAUSE_CODE (c))
11505 {
11506 tree var, x;
11507
11508 default:
11509 break;
11510 case OMP_CLAUSE_MAP:
11511 #if CHECKING_P
11512 /* First check what we're prepared to handle in the following. */
11513 switch (OMP_CLAUSE_MAP_KIND (c))
11514 {
11515 case GOMP_MAP_ALLOC:
11516 case GOMP_MAP_TO:
11517 case GOMP_MAP_FROM:
11518 case GOMP_MAP_TOFROM:
11519 case GOMP_MAP_POINTER:
11520 case GOMP_MAP_TO_PSET:
11521 case GOMP_MAP_DELETE:
11522 case GOMP_MAP_RELEASE:
11523 case GOMP_MAP_ALWAYS_TO:
11524 case GOMP_MAP_ALWAYS_FROM:
11525 case GOMP_MAP_ALWAYS_TOFROM:
11526 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11527 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11528 case GOMP_MAP_STRUCT:
11529 case GOMP_MAP_ALWAYS_POINTER:
11530 case GOMP_MAP_ATTACH:
11531 case GOMP_MAP_DETACH:
11532 break;
11533 case GOMP_MAP_IF_PRESENT:
11534 case GOMP_MAP_FORCE_ALLOC:
11535 case GOMP_MAP_FORCE_TO:
11536 case GOMP_MAP_FORCE_FROM:
11537 case GOMP_MAP_FORCE_TOFROM:
11538 case GOMP_MAP_FORCE_PRESENT:
11539 case GOMP_MAP_FORCE_DEVICEPTR:
11540 case GOMP_MAP_DEVICE_RESIDENT:
11541 case GOMP_MAP_LINK:
11542 case GOMP_MAP_FORCE_DETACH:
11543 gcc_assert (is_gimple_omp_oacc (stmt));
11544 break;
11545 default:
11546 gcc_unreachable ();
11547 }
11548 #endif
11549 /* FALLTHRU */
11550 case OMP_CLAUSE_TO:
11551 case OMP_CLAUSE_FROM:
11552 oacc_firstprivate:
11553 var = OMP_CLAUSE_DECL (c);
11554 if (!DECL_P (var))
11555 {
11556 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11557 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11558 && (OMP_CLAUSE_MAP_KIND (c)
11559 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11560 map_cnt++;
11561 continue;
11562 }
11563
11564 if (DECL_SIZE (var)
11565 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11566 {
11567 tree var2 = DECL_VALUE_EXPR (var);
11568 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11569 var2 = TREE_OPERAND (var2, 0);
11570 gcc_assert (DECL_P (var2));
11571 var = var2;
11572 }
11573
11574 if (offloaded
11575 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11576 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11577 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11578 {
11579 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11580 {
11581 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11582 && varpool_node::get_create (var)->offloadable)
11583 continue;
11584
11585 tree type = build_pointer_type (TREE_TYPE (var));
11586 tree new_var = lookup_decl (var, ctx);
11587 x = create_tmp_var_raw (type, get_name (new_var));
11588 gimple_add_tmp_var (x);
11589 x = build_simple_mem_ref (x);
11590 SET_DECL_VALUE_EXPR (new_var, x);
11591 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11592 }
11593 continue;
11594 }
11595
11596 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11597 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11598 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
11599 && is_omp_target (stmt))
11600 {
11601 gcc_assert (maybe_lookup_field (c, ctx));
11602 map_cnt++;
11603 continue;
11604 }
11605
11606 if (!maybe_lookup_field (var, ctx))
11607 continue;
11608
11609 /* Don't remap compute constructs' reduction variables, because the
11610 intermediate result must be local to each gang. */
11611 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11612 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11613 {
11614 x = build_receiver_ref (var, true, ctx);
11615 tree new_var = lookup_decl (var, ctx);
11616
11617 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11618 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11619 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11620 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11621 x = build_simple_mem_ref (x);
11622 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11623 {
11624 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11625 if (omp_is_reference (new_var)
11626 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11627 || DECL_BY_REFERENCE (var)))
11628 {
11629 /* Create a local object to hold the instance
11630 value. */
11631 tree type = TREE_TYPE (TREE_TYPE (new_var));
11632 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11633 tree inst = create_tmp_var (type, id);
11634 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11635 x = build_fold_addr_expr (inst);
11636 }
11637 gimplify_assign (new_var, x, &fplist);
11638 }
11639 else if (DECL_P (new_var))
11640 {
11641 SET_DECL_VALUE_EXPR (new_var, x);
11642 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11643 }
11644 else
11645 gcc_unreachable ();
11646 }
11647 map_cnt++;
11648 break;
11649
11650 case OMP_CLAUSE_FIRSTPRIVATE:
11651 if (is_oacc_parallel_or_serial (ctx))
11652 goto oacc_firstprivate;
11653 map_cnt++;
11654 var = OMP_CLAUSE_DECL (c);
11655 if (!omp_is_reference (var)
11656 && !is_gimple_reg_type (TREE_TYPE (var)))
11657 {
11658 tree new_var = lookup_decl (var, ctx);
11659 if (is_variable_sized (var))
11660 {
11661 tree pvar = DECL_VALUE_EXPR (var);
11662 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11663 pvar = TREE_OPERAND (pvar, 0);
11664 gcc_assert (DECL_P (pvar));
11665 tree new_pvar = lookup_decl (pvar, ctx);
11666 x = build_fold_indirect_ref (new_pvar);
11667 TREE_THIS_NOTRAP (x) = 1;
11668 }
11669 else
11670 x = build_receiver_ref (var, true, ctx);
11671 SET_DECL_VALUE_EXPR (new_var, x);
11672 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11673 }
11674 break;
11675
11676 case OMP_CLAUSE_PRIVATE:
11677 if (is_gimple_omp_oacc (ctx->stmt))
11678 break;
11679 var = OMP_CLAUSE_DECL (c);
11680 if (is_variable_sized (var))
11681 {
11682 tree new_var = lookup_decl (var, ctx);
11683 tree pvar = DECL_VALUE_EXPR (var);
11684 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11685 pvar = TREE_OPERAND (pvar, 0);
11686 gcc_assert (DECL_P (pvar));
11687 tree new_pvar = lookup_decl (pvar, ctx);
11688 x = build_fold_indirect_ref (new_pvar);
11689 TREE_THIS_NOTRAP (x) = 1;
11690 SET_DECL_VALUE_EXPR (new_var, x);
11691 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11692 }
11693 break;
11694
11695 case OMP_CLAUSE_USE_DEVICE_PTR:
11696 case OMP_CLAUSE_USE_DEVICE_ADDR:
11697 case OMP_CLAUSE_IS_DEVICE_PTR:
11698 var = OMP_CLAUSE_DECL (c);
11699 map_cnt++;
11700 if (is_variable_sized (var))
11701 {
11702 tree new_var = lookup_decl (var, ctx);
11703 tree pvar = DECL_VALUE_EXPR (var);
11704 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11705 pvar = TREE_OPERAND (pvar, 0);
11706 gcc_assert (DECL_P (pvar));
11707 tree new_pvar = lookup_decl (pvar, ctx);
11708 x = build_fold_indirect_ref (new_pvar);
11709 TREE_THIS_NOTRAP (x) = 1;
11710 SET_DECL_VALUE_EXPR (new_var, x);
11711 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11712 }
11713 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11714 && !omp_is_reference (var)
11715 && !omp_is_allocatable_or_ptr (var)
11716 && !lang_hooks.decls.omp_array_data (var, true))
11717 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11718 {
11719 tree new_var = lookup_decl (var, ctx);
11720 tree type = build_pointer_type (TREE_TYPE (var));
11721 x = create_tmp_var_raw (type, get_name (new_var));
11722 gimple_add_tmp_var (x);
11723 x = build_simple_mem_ref (x);
11724 SET_DECL_VALUE_EXPR (new_var, x);
11725 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11726 }
11727 else
11728 {
11729 tree new_var = lookup_decl (var, ctx);
11730 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11731 gimple_add_tmp_var (x);
11732 SET_DECL_VALUE_EXPR (new_var, x);
11733 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11734 }
11735 break;
11736 }
11737
11738 if (offloaded)
11739 {
11740 target_nesting_level++;
11741 lower_omp (&tgt_body, ctx);
11742 target_nesting_level--;
11743 }
11744 else if (data_region)
11745 lower_omp (&tgt_body, ctx);
11746
11747 if (offloaded)
11748 {
11749 /* Declare all the variables created by mapping and the variables
11750 declared in the scope of the target body. */
11751 record_vars_into (ctx->block_vars, child_fn);
11752 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11753 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11754 }
11755
11756 olist = NULL;
11757 ilist = NULL;
11758 if (ctx->record_type)
11759 {
11760 ctx->sender_decl
11761 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11762 DECL_NAMELESS (ctx->sender_decl) = 1;
11763 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11764 t = make_tree_vec (3);
11765 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11766 TREE_VEC_ELT (t, 1)
11767 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11768 ".omp_data_sizes");
11769 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11770 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11771 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11772 tree tkind_type = short_unsigned_type_node;
11773 int talign_shift = 8;
11774 TREE_VEC_ELT (t, 2)
11775 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11776 ".omp_data_kinds");
11777 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11778 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11779 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11780 gimple_omp_target_set_data_arg (stmt, t);
11781
11782 vec<constructor_elt, va_gc> *vsize;
11783 vec<constructor_elt, va_gc> *vkind;
11784 vec_alloc (vsize, map_cnt);
11785 vec_alloc (vkind, map_cnt);
11786 unsigned int map_idx = 0;
11787
11788 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11789 switch (OMP_CLAUSE_CODE (c))
11790 {
11791 tree ovar, nc, s, purpose, var, x, type;
11792 unsigned int talign;
11793
11794 default:
11795 break;
11796
11797 case OMP_CLAUSE_MAP:
11798 case OMP_CLAUSE_TO:
11799 case OMP_CLAUSE_FROM:
11800 oacc_firstprivate_map:
11801 nc = c;
11802 ovar = OMP_CLAUSE_DECL (c);
11803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11804 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11805 || (OMP_CLAUSE_MAP_KIND (c)
11806 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11807 break;
11808 if (!DECL_P (ovar))
11809 {
11810 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11811 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11812 {
11813 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11814 == get_base_address (ovar));
11815 nc = OMP_CLAUSE_CHAIN (c);
11816 ovar = OMP_CLAUSE_DECL (nc);
11817 }
11818 else
11819 {
11820 tree x = build_sender_ref (ovar, ctx);
11821 tree v
11822 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11823 gimplify_assign (x, v, &ilist);
11824 nc = NULL_TREE;
11825 }
11826 }
11827 else
11828 {
11829 if (DECL_SIZE (ovar)
11830 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11831 {
11832 tree ovar2 = DECL_VALUE_EXPR (ovar);
11833 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11834 ovar2 = TREE_OPERAND (ovar2, 0);
11835 gcc_assert (DECL_P (ovar2));
11836 ovar = ovar2;
11837 }
11838 if (!maybe_lookup_field (ovar, ctx)
11839 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11840 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11841 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
11842 continue;
11843 }
11844
11845 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11846 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11847 talign = DECL_ALIGN_UNIT (ovar);
11848
11849 if (nc
11850 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11851 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11852 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
11853 && is_omp_target (stmt))
11854 {
11855 var = lookup_decl_in_outer_ctx (ovar, ctx);
11856 x = build_sender_ref (c, ctx);
11857 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
11858 }
11859 else if (nc)
11860 {
11861 var = lookup_decl_in_outer_ctx (ovar, ctx);
11862 x = build_sender_ref (ovar, ctx);
11863
11864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11865 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11866 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11867 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11868 {
11869 gcc_assert (offloaded);
11870 tree avar
11871 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11872 mark_addressable (avar);
11873 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11874 talign = DECL_ALIGN_UNIT (avar);
11875 avar = build_fold_addr_expr (avar);
11876 gimplify_assign (x, avar, &ilist);
11877 }
11878 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11879 {
11880 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11881 if (!omp_is_reference (var))
11882 {
11883 if (is_gimple_reg (var)
11884 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11885 TREE_NO_WARNING (var) = 1;
11886 var = build_fold_addr_expr (var);
11887 }
11888 else
11889 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11890 gimplify_assign (x, var, &ilist);
11891 }
11892 else if (is_gimple_reg (var))
11893 {
11894 gcc_assert (offloaded);
11895 tree avar = create_tmp_var (TREE_TYPE (var));
11896 mark_addressable (avar);
11897 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11898 if (GOMP_MAP_COPY_TO_P (map_kind)
11899 || map_kind == GOMP_MAP_POINTER
11900 || map_kind == GOMP_MAP_TO_PSET
11901 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11902 {
11903 /* If we need to initialize a temporary
11904 with VAR because it is not addressable, and
11905 the variable hasn't been initialized yet, then
11906 we'll get a warning for the store to avar.
11907 Don't warn in that case, the mapping might
11908 be implicit. */
11909 TREE_NO_WARNING (var) = 1;
11910 gimplify_assign (avar, var, &ilist);
11911 }
11912 avar = build_fold_addr_expr (avar);
11913 gimplify_assign (x, avar, &ilist);
11914 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11915 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11916 && !TYPE_READONLY (TREE_TYPE (var)))
11917 {
11918 x = unshare_expr (x);
11919 x = build_simple_mem_ref (x);
11920 gimplify_assign (var, x, &olist);
11921 }
11922 }
11923 else
11924 {
11925 /* While MAP is handled explicitly by the FE,
11926 for 'target update', only the identified is passed. */
11927 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11928 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11929 && (omp_is_allocatable_or_ptr (var)
11930 && omp_check_optional_argument (var, false)))
11931 var = build_fold_indirect_ref (var);
11932 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11933 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11934 || (!omp_is_allocatable_or_ptr (var)
11935 && !omp_check_optional_argument (var, false)))
11936 var = build_fold_addr_expr (var);
11937 gimplify_assign (x, var, &ilist);
11938 }
11939 }
11940 s = NULL_TREE;
11941 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11942 {
11943 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11944 s = TREE_TYPE (ovar);
11945 if (TREE_CODE (s) == REFERENCE_TYPE
11946 || omp_check_optional_argument (ovar, false))
11947 s = TREE_TYPE (s);
11948 s = TYPE_SIZE_UNIT (s);
11949 }
11950 else
11951 s = OMP_CLAUSE_SIZE (c);
11952 if (s == NULL_TREE)
11953 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11954 s = fold_convert (size_type_node, s);
11955 purpose = size_int (map_idx++);
11956 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11957 if (TREE_CODE (s) != INTEGER_CST)
11958 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11959
11960 unsigned HOST_WIDE_INT tkind, tkind_zero;
11961 switch (OMP_CLAUSE_CODE (c))
11962 {
11963 case OMP_CLAUSE_MAP:
11964 tkind = OMP_CLAUSE_MAP_KIND (c);
11965 tkind_zero = tkind;
11966 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11967 switch (tkind)
11968 {
11969 case GOMP_MAP_ALLOC:
11970 case GOMP_MAP_IF_PRESENT:
11971 case GOMP_MAP_TO:
11972 case GOMP_MAP_FROM:
11973 case GOMP_MAP_TOFROM:
11974 case GOMP_MAP_ALWAYS_TO:
11975 case GOMP_MAP_ALWAYS_FROM:
11976 case GOMP_MAP_ALWAYS_TOFROM:
11977 case GOMP_MAP_RELEASE:
11978 case GOMP_MAP_FORCE_TO:
11979 case GOMP_MAP_FORCE_FROM:
11980 case GOMP_MAP_FORCE_TOFROM:
11981 case GOMP_MAP_FORCE_PRESENT:
11982 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11983 break;
11984 case GOMP_MAP_DELETE:
11985 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11986 default:
11987 break;
11988 }
11989 if (tkind_zero != tkind)
11990 {
11991 if (integer_zerop (s))
11992 tkind = tkind_zero;
11993 else if (integer_nonzerop (s))
11994 tkind_zero = tkind;
11995 }
11996 break;
11997 case OMP_CLAUSE_FIRSTPRIVATE:
11998 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11999 tkind = GOMP_MAP_TO;
12000 tkind_zero = tkind;
12001 break;
12002 case OMP_CLAUSE_TO:
12003 tkind = GOMP_MAP_TO;
12004 tkind_zero = tkind;
12005 break;
12006 case OMP_CLAUSE_FROM:
12007 tkind = GOMP_MAP_FROM;
12008 tkind_zero = tkind;
12009 break;
12010 default:
12011 gcc_unreachable ();
12012 }
12013 gcc_checking_assert (tkind
12014 < (HOST_WIDE_INT_C (1U) << talign_shift));
12015 gcc_checking_assert (tkind_zero
12016 < (HOST_WIDE_INT_C (1U) << talign_shift));
12017 talign = ceil_log2 (talign);
12018 tkind |= talign << talign_shift;
12019 tkind_zero |= talign << talign_shift;
12020 gcc_checking_assert (tkind
12021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12022 gcc_checking_assert (tkind_zero
12023 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12024 if (tkind == tkind_zero)
12025 x = build_int_cstu (tkind_type, tkind);
12026 else
12027 {
12028 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12029 x = build3 (COND_EXPR, tkind_type,
12030 fold_build2 (EQ_EXPR, boolean_type_node,
12031 unshare_expr (s), size_zero_node),
12032 build_int_cstu (tkind_type, tkind_zero),
12033 build_int_cstu (tkind_type, tkind));
12034 }
12035 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12036 if (nc && nc != c)
12037 c = nc;
12038 break;
12039
12040 case OMP_CLAUSE_FIRSTPRIVATE:
12041 if (is_oacc_parallel_or_serial (ctx))
12042 goto oacc_firstprivate_map;
12043 ovar = OMP_CLAUSE_DECL (c);
12044 if (omp_is_reference (ovar))
12045 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12046 else
12047 talign = DECL_ALIGN_UNIT (ovar);
12048 var = lookup_decl_in_outer_ctx (ovar, ctx);
12049 x = build_sender_ref (ovar, ctx);
12050 tkind = GOMP_MAP_FIRSTPRIVATE;
12051 type = TREE_TYPE (ovar);
12052 if (omp_is_reference (ovar))
12053 type = TREE_TYPE (type);
12054 if ((INTEGRAL_TYPE_P (type)
12055 && TYPE_PRECISION (type) <= POINTER_SIZE)
12056 || TREE_CODE (type) == POINTER_TYPE)
12057 {
12058 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12059 tree t = var;
12060 if (omp_is_reference (var))
12061 t = build_simple_mem_ref (var);
12062 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12063 TREE_NO_WARNING (var) = 1;
12064 if (TREE_CODE (type) != POINTER_TYPE)
12065 t = fold_convert (pointer_sized_int_node, t);
12066 t = fold_convert (TREE_TYPE (x), t);
12067 gimplify_assign (x, t, &ilist);
12068 }
12069 else if (omp_is_reference (var))
12070 gimplify_assign (x, var, &ilist);
12071 else if (is_gimple_reg (var))
12072 {
12073 tree avar = create_tmp_var (TREE_TYPE (var));
12074 mark_addressable (avar);
12075 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12076 TREE_NO_WARNING (var) = 1;
12077 gimplify_assign (avar, var, &ilist);
12078 avar = build_fold_addr_expr (avar);
12079 gimplify_assign (x, avar, &ilist);
12080 }
12081 else
12082 {
12083 var = build_fold_addr_expr (var);
12084 gimplify_assign (x, var, &ilist);
12085 }
12086 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12087 s = size_int (0);
12088 else if (omp_is_reference (ovar))
12089 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12090 else
12091 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12092 s = fold_convert (size_type_node, s);
12093 purpose = size_int (map_idx++);
12094 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12095 if (TREE_CODE (s) != INTEGER_CST)
12096 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12097
12098 gcc_checking_assert (tkind
12099 < (HOST_WIDE_INT_C (1U) << talign_shift));
12100 talign = ceil_log2 (talign);
12101 tkind |= talign << talign_shift;
12102 gcc_checking_assert (tkind
12103 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12104 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12105 build_int_cstu (tkind_type, tkind));
12106 break;
12107
12108 case OMP_CLAUSE_USE_DEVICE_PTR:
12109 case OMP_CLAUSE_USE_DEVICE_ADDR:
12110 case OMP_CLAUSE_IS_DEVICE_PTR:
12111 ovar = OMP_CLAUSE_DECL (c);
12112 var = lookup_decl_in_outer_ctx (ovar, ctx);
12113
12114 if (lang_hooks.decls.omp_array_data (ovar, true))
12115 {
12116 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12117 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12118 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12119 }
12120 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12121 {
12122 tkind = GOMP_MAP_USE_DEVICE_PTR;
12123 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12124 }
12125 else
12126 {
12127 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12128 x = build_sender_ref (ovar, ctx);
12129 }
12130
12131 if (is_gimple_omp_oacc (ctx->stmt))
12132 {
12133 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12134
12135 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12136 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12137 }
12138
12139 type = TREE_TYPE (ovar);
12140 if (lang_hooks.decls.omp_array_data (ovar, true))
12141 var = lang_hooks.decls.omp_array_data (ovar, false);
12142 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12143 && !omp_is_reference (ovar)
12144 && !omp_is_allocatable_or_ptr (ovar))
12145 || TREE_CODE (type) == ARRAY_TYPE)
12146 var = build_fold_addr_expr (var);
12147 else
12148 {
12149 if (omp_is_reference (ovar)
12150 || omp_check_optional_argument (ovar, false)
12151 || omp_is_allocatable_or_ptr (ovar))
12152 {
12153 type = TREE_TYPE (type);
12154 if (TREE_CODE (type) != ARRAY_TYPE
12155 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12156 && !omp_is_allocatable_or_ptr (ovar))
12157 || (omp_is_reference (ovar)
12158 && omp_is_allocatable_or_ptr (ovar))))
12159 var = build_simple_mem_ref (var);
12160 var = fold_convert (TREE_TYPE (x), var);
12161 }
12162 }
12163 tree present;
12164 present = omp_check_optional_argument (ovar, true);
12165 if (present)
12166 {
12167 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12168 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12169 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12170 tree new_x = unshare_expr (x);
12171 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12172 fb_rvalue);
12173 gcond *cond = gimple_build_cond_from_tree (present,
12174 notnull_label,
12175 null_label);
12176 gimple_seq_add_stmt (&ilist, cond);
12177 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12178 gimplify_assign (new_x, null_pointer_node, &ilist);
12179 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12180 gimple_seq_add_stmt (&ilist,
12181 gimple_build_label (notnull_label));
12182 gimplify_assign (x, var, &ilist);
12183 gimple_seq_add_stmt (&ilist,
12184 gimple_build_label (opt_arg_label));
12185 }
12186 else
12187 gimplify_assign (x, var, &ilist);
12188 s = size_int (0);
12189 purpose = size_int (map_idx++);
12190 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12191 gcc_checking_assert (tkind
12192 < (HOST_WIDE_INT_C (1U) << talign_shift));
12193 gcc_checking_assert (tkind
12194 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12195 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12196 build_int_cstu (tkind_type, tkind));
12197 break;
12198 }
12199
12200 gcc_assert (map_idx == map_cnt);
12201
12202 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12203 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12204 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12205 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12206 for (int i = 1; i <= 2; i++)
12207 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12208 {
12209 gimple_seq initlist = NULL;
12210 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12211 TREE_VEC_ELT (t, i)),
12212 &initlist, true, NULL_TREE);
12213 gimple_seq_add_seq (&ilist, initlist);
12214
12215 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12216 gimple_seq_add_stmt (&olist,
12217 gimple_build_assign (TREE_VEC_ELT (t, i),
12218 clobber));
12219 }
12220
12221 tree clobber = build_clobber (ctx->record_type);
12222 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12223 clobber));
12224 }
12225
12226 /* Once all the expansions are done, sequence all the different
12227 fragments inside gimple_omp_body. */
12228
12229 new_body = NULL;
12230
12231 if (offloaded
12232 && ctx->record_type)
12233 {
12234 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12235 /* fixup_child_record_type might have changed receiver_decl's type. */
12236 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12237 gimple_seq_add_stmt (&new_body,
12238 gimple_build_assign (ctx->receiver_decl, t));
12239 }
12240 gimple_seq_add_seq (&new_body, fplist);
12241
12242 if (offloaded || data_region)
12243 {
12244 tree prev = NULL_TREE;
12245 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12246 switch (OMP_CLAUSE_CODE (c))
12247 {
12248 tree var, x;
12249 default:
12250 break;
12251 case OMP_CLAUSE_FIRSTPRIVATE:
12252 if (is_gimple_omp_oacc (ctx->stmt))
12253 break;
12254 var = OMP_CLAUSE_DECL (c);
12255 if (omp_is_reference (var)
12256 || is_gimple_reg_type (TREE_TYPE (var)))
12257 {
12258 tree new_var = lookup_decl (var, ctx);
12259 tree type;
12260 type = TREE_TYPE (var);
12261 if (omp_is_reference (var))
12262 type = TREE_TYPE (type);
12263 if ((INTEGRAL_TYPE_P (type)
12264 && TYPE_PRECISION (type) <= POINTER_SIZE)
12265 || TREE_CODE (type) == POINTER_TYPE)
12266 {
12267 x = build_receiver_ref (var, false, ctx);
12268 if (TREE_CODE (type) != POINTER_TYPE)
12269 x = fold_convert (pointer_sized_int_node, x);
12270 x = fold_convert (type, x);
12271 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12272 fb_rvalue);
12273 if (omp_is_reference (var))
12274 {
12275 tree v = create_tmp_var_raw (type, get_name (var));
12276 gimple_add_tmp_var (v);
12277 TREE_ADDRESSABLE (v) = 1;
12278 gimple_seq_add_stmt (&new_body,
12279 gimple_build_assign (v, x));
12280 x = build_fold_addr_expr (v);
12281 }
12282 gimple_seq_add_stmt (&new_body,
12283 gimple_build_assign (new_var, x));
12284 }
12285 else
12286 {
12287 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12288 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12289 fb_rvalue);
12290 gimple_seq_add_stmt (&new_body,
12291 gimple_build_assign (new_var, x));
12292 }
12293 }
12294 else if (is_variable_sized (var))
12295 {
12296 tree pvar = DECL_VALUE_EXPR (var);
12297 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12298 pvar = TREE_OPERAND (pvar, 0);
12299 gcc_assert (DECL_P (pvar));
12300 tree new_var = lookup_decl (pvar, ctx);
12301 x = build_receiver_ref (var, false, ctx);
12302 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12303 gimple_seq_add_stmt (&new_body,
12304 gimple_build_assign (new_var, x));
12305 }
12306 break;
12307 case OMP_CLAUSE_PRIVATE:
12308 if (is_gimple_omp_oacc (ctx->stmt))
12309 break;
12310 var = OMP_CLAUSE_DECL (c);
12311 if (omp_is_reference (var))
12312 {
12313 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12314 tree new_var = lookup_decl (var, ctx);
12315 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12316 if (TREE_CONSTANT (x))
12317 {
12318 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12319 get_name (var));
12320 gimple_add_tmp_var (x);
12321 TREE_ADDRESSABLE (x) = 1;
12322 x = build_fold_addr_expr_loc (clause_loc, x);
12323 }
12324 else
12325 break;
12326
12327 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12328 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12329 gimple_seq_add_stmt (&new_body,
12330 gimple_build_assign (new_var, x));
12331 }
12332 break;
12333 case OMP_CLAUSE_USE_DEVICE_PTR:
12334 case OMP_CLAUSE_USE_DEVICE_ADDR:
12335 case OMP_CLAUSE_IS_DEVICE_PTR:
12336 tree new_var;
12337 gimple_seq assign_body;
12338 bool is_array_data;
12339 bool do_optional_check;
12340 assign_body = NULL;
12341 do_optional_check = false;
12342 var = OMP_CLAUSE_DECL (c);
12343 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12344
12345 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12346 x = build_sender_ref (is_array_data
12347 ? (splay_tree_key) &DECL_NAME (var)
12348 : (splay_tree_key) &DECL_UID (var), ctx);
12349 else
12350 x = build_receiver_ref (var, false, ctx);
12351
12352 if (is_array_data)
12353 {
12354 bool is_ref = omp_is_reference (var);
12355 do_optional_check = true;
12356 /* First, we copy the descriptor data from the host; then
12357 we update its data to point to the target address. */
12358 new_var = lookup_decl (var, ctx);
12359 new_var = DECL_VALUE_EXPR (new_var);
12360 tree v = new_var;
12361
12362 if (is_ref)
12363 {
12364 var = build_fold_indirect_ref (var);
12365 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12366 fb_rvalue);
12367 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12368 gimple_add_tmp_var (v);
12369 TREE_ADDRESSABLE (v) = 1;
12370 gimple_seq_add_stmt (&assign_body,
12371 gimple_build_assign (v, var));
12372 tree rhs = build_fold_addr_expr (v);
12373 gimple_seq_add_stmt (&assign_body,
12374 gimple_build_assign (new_var, rhs));
12375 }
12376 else
12377 gimple_seq_add_stmt (&assign_body,
12378 gimple_build_assign (new_var, var));
12379
12380 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12381 gcc_assert (v2);
12382 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12383 gimple_seq_add_stmt (&assign_body,
12384 gimple_build_assign (v2, x));
12385 }
12386 else if (is_variable_sized (var))
12387 {
12388 tree pvar = DECL_VALUE_EXPR (var);
12389 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12390 pvar = TREE_OPERAND (pvar, 0);
12391 gcc_assert (DECL_P (pvar));
12392 new_var = lookup_decl (pvar, ctx);
12393 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12394 gimple_seq_add_stmt (&assign_body,
12395 gimple_build_assign (new_var, x));
12396 }
12397 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12398 && !omp_is_reference (var)
12399 && !omp_is_allocatable_or_ptr (var))
12400 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12401 {
12402 new_var = lookup_decl (var, ctx);
12403 new_var = DECL_VALUE_EXPR (new_var);
12404 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12405 new_var = TREE_OPERAND (new_var, 0);
12406 gcc_assert (DECL_P (new_var));
12407 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12408 gimple_seq_add_stmt (&assign_body,
12409 gimple_build_assign (new_var, x));
12410 }
12411 else
12412 {
12413 tree type = TREE_TYPE (var);
12414 new_var = lookup_decl (var, ctx);
12415 if (omp_is_reference (var))
12416 {
12417 type = TREE_TYPE (type);
12418 if (TREE_CODE (type) != ARRAY_TYPE
12419 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12420 || (omp_is_reference (var)
12421 && omp_is_allocatable_or_ptr (var))))
12422 {
12423 tree v = create_tmp_var_raw (type, get_name (var));
12424 gimple_add_tmp_var (v);
12425 TREE_ADDRESSABLE (v) = 1;
12426 x = fold_convert (type, x);
12427 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12428 fb_rvalue);
12429 gimple_seq_add_stmt (&assign_body,
12430 gimple_build_assign (v, x));
12431 x = build_fold_addr_expr (v);
12432 do_optional_check = true;
12433 }
12434 }
12435 new_var = DECL_VALUE_EXPR (new_var);
12436 x = fold_convert (TREE_TYPE (new_var), x);
12437 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12438 gimple_seq_add_stmt (&assign_body,
12439 gimple_build_assign (new_var, x));
12440 }
12441 tree present;
12442 present = (do_optional_check
12443 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12444 : NULL_TREE);
12445 if (present)
12446 {
12447 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12448 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12449 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12450 glabel *null_glabel = gimple_build_label (null_label);
12451 glabel *notnull_glabel = gimple_build_label (notnull_label);
12452 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12453 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12454 fb_rvalue);
12455 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12456 fb_rvalue);
12457 gcond *cond = gimple_build_cond_from_tree (present,
12458 notnull_label,
12459 null_label);
12460 gimple_seq_add_stmt (&new_body, cond);
12461 gimple_seq_add_stmt (&new_body, null_glabel);
12462 gimplify_assign (new_var, null_pointer_node, &new_body);
12463 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12464 gimple_seq_add_stmt (&new_body, notnull_glabel);
12465 gimple_seq_add_seq (&new_body, assign_body);
12466 gimple_seq_add_stmt (&new_body,
12467 gimple_build_label (opt_arg_label));
12468 }
12469 else
12470 gimple_seq_add_seq (&new_body, assign_body);
12471 break;
12472 }
12473 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12474 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12475 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12476 or references to VLAs. */
12477 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12478 switch (OMP_CLAUSE_CODE (c))
12479 {
12480 tree var;
12481 default:
12482 break;
12483 case OMP_CLAUSE_MAP:
12484 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12485 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12486 {
12487 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12488 poly_int64 offset = 0;
12489 gcc_assert (prev);
12490 var = OMP_CLAUSE_DECL (c);
12491 if (DECL_P (var)
12492 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12493 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12494 ctx))
12495 && varpool_node::get_create (var)->offloadable)
12496 break;
12497 if (TREE_CODE (var) == INDIRECT_REF
12498 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12499 var = TREE_OPERAND (var, 0);
12500 if (TREE_CODE (var) == COMPONENT_REF)
12501 {
12502 var = get_addr_base_and_unit_offset (var, &offset);
12503 gcc_assert (var != NULL_TREE && DECL_P (var));
12504 }
12505 else if (DECL_SIZE (var)
12506 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12507 {
12508 tree var2 = DECL_VALUE_EXPR (var);
12509 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12510 var2 = TREE_OPERAND (var2, 0);
12511 gcc_assert (DECL_P (var2));
12512 var = var2;
12513 }
12514 tree new_var = lookup_decl (var, ctx), x;
12515 tree type = TREE_TYPE (new_var);
12516 bool is_ref;
12517 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12518 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12519 == COMPONENT_REF))
12520 {
12521 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12522 is_ref = true;
12523 new_var = build2 (MEM_REF, type,
12524 build_fold_addr_expr (new_var),
12525 build_int_cst (build_pointer_type (type),
12526 offset));
12527 }
12528 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12529 {
12530 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12531 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12532 new_var = build2 (MEM_REF, type,
12533 build_fold_addr_expr (new_var),
12534 build_int_cst (build_pointer_type (type),
12535 offset));
12536 }
12537 else
12538 is_ref = omp_is_reference (var);
12539 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12540 is_ref = false;
12541 bool ref_to_array = false;
12542 if (is_ref)
12543 {
12544 type = TREE_TYPE (type);
12545 if (TREE_CODE (type) == ARRAY_TYPE)
12546 {
12547 type = build_pointer_type (type);
12548 ref_to_array = true;
12549 }
12550 }
12551 else if (TREE_CODE (type) == ARRAY_TYPE)
12552 {
12553 tree decl2 = DECL_VALUE_EXPR (new_var);
12554 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12555 decl2 = TREE_OPERAND (decl2, 0);
12556 gcc_assert (DECL_P (decl2));
12557 new_var = decl2;
12558 type = TREE_TYPE (new_var);
12559 }
12560 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12561 x = fold_convert_loc (clause_loc, type, x);
12562 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12563 {
12564 tree bias = OMP_CLAUSE_SIZE (c);
12565 if (DECL_P (bias))
12566 bias = lookup_decl (bias, ctx);
12567 bias = fold_convert_loc (clause_loc, sizetype, bias);
12568 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12569 bias);
12570 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12571 TREE_TYPE (x), x, bias);
12572 }
12573 if (ref_to_array)
12574 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12575 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12576 if (is_ref && !ref_to_array)
12577 {
12578 tree t = create_tmp_var_raw (type, get_name (var));
12579 gimple_add_tmp_var (t);
12580 TREE_ADDRESSABLE (t) = 1;
12581 gimple_seq_add_stmt (&new_body,
12582 gimple_build_assign (t, x));
12583 x = build_fold_addr_expr_loc (clause_loc, t);
12584 }
12585 gimple_seq_add_stmt (&new_body,
12586 gimple_build_assign (new_var, x));
12587 prev = NULL_TREE;
12588 }
12589 else if (OMP_CLAUSE_CHAIN (c)
12590 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12591 == OMP_CLAUSE_MAP
12592 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12593 == GOMP_MAP_FIRSTPRIVATE_POINTER
12594 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12595 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12596 prev = c;
12597 break;
12598 case OMP_CLAUSE_PRIVATE:
12599 var = OMP_CLAUSE_DECL (c);
12600 if (is_variable_sized (var))
12601 {
12602 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12603 tree new_var = lookup_decl (var, ctx);
12604 tree pvar = DECL_VALUE_EXPR (var);
12605 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12606 pvar = TREE_OPERAND (pvar, 0);
12607 gcc_assert (DECL_P (pvar));
12608 tree new_pvar = lookup_decl (pvar, ctx);
12609 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12610 tree al = size_int (DECL_ALIGN (var));
12611 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12612 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12613 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12614 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12615 gimple_seq_add_stmt (&new_body,
12616 gimple_build_assign (new_pvar, x));
12617 }
12618 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12619 {
12620 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12621 tree new_var = lookup_decl (var, ctx);
12622 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12623 if (TREE_CONSTANT (x))
12624 break;
12625 else
12626 {
12627 tree atmp
12628 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12629 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12630 tree al = size_int (TYPE_ALIGN (rtype));
12631 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12632 }
12633
12634 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12635 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12636 gimple_seq_add_stmt (&new_body,
12637 gimple_build_assign (new_var, x));
12638 }
12639 break;
12640 }
12641
12642 gimple_seq fork_seq = NULL;
12643 gimple_seq join_seq = NULL;
12644
12645 if (is_oacc_parallel_or_serial (ctx))
12646 {
12647 /* If there are reductions on the offloaded region itself, treat
12648 them as a dummy GANG loop. */
12649 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12650
12651 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12652 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12653 }
12654
12655 gimple_seq_add_seq (&new_body, fork_seq);
12656 gimple_seq_add_seq (&new_body, tgt_body);
12657 gimple_seq_add_seq (&new_body, join_seq);
12658
12659 if (offloaded)
12660 new_body = maybe_catch_exception (new_body);
12661
12662 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12663 gimple_omp_set_body (stmt, new_body);
12664 }
12665
12666 bind = gimple_build_bind (NULL, NULL,
12667 tgt_bind ? gimple_bind_block (tgt_bind)
12668 : NULL_TREE);
12669 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12670 gimple_bind_add_seq (bind, ilist);
12671 gimple_bind_add_stmt (bind, stmt);
12672 gimple_bind_add_seq (bind, olist);
12673
12674 pop_gimplify_context (NULL);
12675
12676 if (dep_bind)
12677 {
12678 gimple_bind_add_seq (dep_bind, dep_ilist);
12679 gimple_bind_add_stmt (dep_bind, bind);
12680 gimple_bind_add_seq (dep_bind, dep_olist);
12681 pop_gimplify_context (dep_bind);
12682 }
12683 }
12684
12685 /* Expand code for an OpenMP teams directive. */
12686
12687 static void
12688 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12689 {
12690 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12691 push_gimplify_context ();
12692
12693 tree block = make_node (BLOCK);
12694 gbind *bind = gimple_build_bind (NULL, NULL, block);
12695 gsi_replace (gsi_p, bind, true);
12696 gimple_seq bind_body = NULL;
12697 gimple_seq dlist = NULL;
12698 gimple_seq olist = NULL;
12699
12700 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12701 OMP_CLAUSE_NUM_TEAMS);
12702 if (num_teams == NULL_TREE)
12703 num_teams = build_int_cst (unsigned_type_node, 0);
12704 else
12705 {
12706 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12707 num_teams = fold_convert (unsigned_type_node, num_teams);
12708 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12709 }
12710 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12711 OMP_CLAUSE_THREAD_LIMIT);
12712 if (thread_limit == NULL_TREE)
12713 thread_limit = build_int_cst (unsigned_type_node, 0);
12714 else
12715 {
12716 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12717 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12718 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12719 fb_rvalue);
12720 }
12721
12722 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12723 &bind_body, &dlist, ctx, NULL);
12724 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12725 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12726 NULL, ctx);
12727 gimple_seq_add_stmt (&bind_body, teams_stmt);
12728
12729 location_t loc = gimple_location (teams_stmt);
12730 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12731 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12732 gimple_set_location (call, loc);
12733 gimple_seq_add_stmt (&bind_body, call);
12734
12735 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12736 gimple_omp_set_body (teams_stmt, NULL);
12737 gimple_seq_add_seq (&bind_body, olist);
12738 gimple_seq_add_seq (&bind_body, dlist);
12739 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12740 gimple_bind_set_body (bind, bind_body);
12741
12742 pop_gimplify_context (bind);
12743
12744 gimple_bind_append_vars (bind, ctx->block_vars);
12745 BLOCK_VARS (block) = ctx->block_vars;
12746 if (BLOCK_VARS (block))
12747 TREE_USED (block) = 1;
12748 }
12749
12750 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12751 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12752 of OMP context, but with task_shared_vars set. */
12753
12754 static tree
12755 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12756 void *data)
12757 {
12758 tree t = *tp;
12759
12760 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12761 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12762 return t;
12763
12764 if (task_shared_vars
12765 && DECL_P (t)
12766 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12767 return t;
12768
12769 /* If a global variable has been privatized, TREE_CONSTANT on
12770 ADDR_EXPR might be wrong. */
12771 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12772 recompute_tree_invariant_for_addr_expr (t);
12773
12774 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12775 return NULL_TREE;
12776 }
12777
12778 /* Data to be communicated between lower_omp_regimplify_operands and
12779 lower_omp_regimplify_operands_p. */
12780
12781 struct lower_omp_regimplify_operands_data
12782 {
12783 omp_context *ctx;
12784 vec<tree> *decls;
12785 };
12786
12787 /* Helper function for lower_omp_regimplify_operands. Find
12788 omp_member_access_dummy_var vars and adjust temporarily their
12789 DECL_VALUE_EXPRs if needed. */
12790
12791 static tree
12792 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12793 void *data)
12794 {
12795 tree t = omp_member_access_dummy_var (*tp);
12796 if (t)
12797 {
12798 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12799 lower_omp_regimplify_operands_data *ldata
12800 = (lower_omp_regimplify_operands_data *) wi->info;
12801 tree o = maybe_lookup_decl (t, ldata->ctx);
12802 if (o != t)
12803 {
12804 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12805 ldata->decls->safe_push (*tp);
12806 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12807 SET_DECL_VALUE_EXPR (*tp, v);
12808 }
12809 }
12810 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12811 return NULL_TREE;
12812 }
12813
12814 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12815 of omp_member_access_dummy_var vars during regimplification. */
12816
12817 static void
12818 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12819 gimple_stmt_iterator *gsi_p)
12820 {
12821 auto_vec<tree, 10> decls;
12822 if (ctx)
12823 {
12824 struct walk_stmt_info wi;
12825 memset (&wi, '\0', sizeof (wi));
12826 struct lower_omp_regimplify_operands_data data;
12827 data.ctx = ctx;
12828 data.decls = &decls;
12829 wi.info = &data;
12830 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12831 }
12832 gimple_regimplify_operands (stmt, gsi_p);
12833 while (!decls.is_empty ())
12834 {
12835 tree t = decls.pop ();
12836 tree v = decls.pop ();
12837 SET_DECL_VALUE_EXPR (t, v);
12838 }
12839 }
12840
12841 static void
12842 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12843 {
12844 gimple *stmt = gsi_stmt (*gsi_p);
12845 struct walk_stmt_info wi;
12846 gcall *call_stmt;
12847
12848 if (gimple_has_location (stmt))
12849 input_location = gimple_location (stmt);
12850
12851 if (task_shared_vars)
12852 memset (&wi, '\0', sizeof (wi));
12853
12854 /* If we have issued syntax errors, avoid doing any heavy lifting.
12855 Just replace the OMP directives with a NOP to avoid
12856 confusing RTL expansion. */
12857 if (seen_error () && is_gimple_omp (stmt))
12858 {
12859 gsi_replace (gsi_p, gimple_build_nop (), true);
12860 return;
12861 }
12862
12863 switch (gimple_code (stmt))
12864 {
12865 case GIMPLE_COND:
12866 {
12867 gcond *cond_stmt = as_a <gcond *> (stmt);
12868 if ((ctx || task_shared_vars)
12869 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12870 lower_omp_regimplify_p,
12871 ctx ? NULL : &wi, NULL)
12872 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12873 lower_omp_regimplify_p,
12874 ctx ? NULL : &wi, NULL)))
12875 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12876 }
12877 break;
12878 case GIMPLE_CATCH:
12879 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12880 break;
12881 case GIMPLE_EH_FILTER:
12882 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12883 break;
12884 case GIMPLE_TRY:
12885 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12886 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12887 break;
12888 case GIMPLE_TRANSACTION:
12889 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12890 ctx);
12891 break;
12892 case GIMPLE_BIND:
12893 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12894 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12895 break;
12896 case GIMPLE_OMP_PARALLEL:
12897 case GIMPLE_OMP_TASK:
12898 ctx = maybe_lookup_ctx (stmt);
12899 gcc_assert (ctx);
12900 if (ctx->cancellable)
12901 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12902 lower_omp_taskreg (gsi_p, ctx);
12903 break;
12904 case GIMPLE_OMP_FOR:
12905 ctx = maybe_lookup_ctx (stmt);
12906 gcc_assert (ctx);
12907 if (ctx->cancellable)
12908 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12909 lower_omp_for (gsi_p, ctx);
12910 break;
12911 case GIMPLE_OMP_SECTIONS:
12912 ctx = maybe_lookup_ctx (stmt);
12913 gcc_assert (ctx);
12914 if (ctx->cancellable)
12915 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12916 lower_omp_sections (gsi_p, ctx);
12917 break;
12918 case GIMPLE_OMP_SINGLE:
12919 ctx = maybe_lookup_ctx (stmt);
12920 gcc_assert (ctx);
12921 lower_omp_single (gsi_p, ctx);
12922 break;
12923 case GIMPLE_OMP_MASTER:
12924 ctx = maybe_lookup_ctx (stmt);
12925 gcc_assert (ctx);
12926 lower_omp_master (gsi_p, ctx);
12927 break;
12928 case GIMPLE_OMP_TASKGROUP:
12929 ctx = maybe_lookup_ctx (stmt);
12930 gcc_assert (ctx);
12931 lower_omp_taskgroup (gsi_p, ctx);
12932 break;
12933 case GIMPLE_OMP_ORDERED:
12934 ctx = maybe_lookup_ctx (stmt);
12935 gcc_assert (ctx);
12936 lower_omp_ordered (gsi_p, ctx);
12937 break;
12938 case GIMPLE_OMP_SCAN:
12939 ctx = maybe_lookup_ctx (stmt);
12940 gcc_assert (ctx);
12941 lower_omp_scan (gsi_p, ctx);
12942 break;
12943 case GIMPLE_OMP_CRITICAL:
12944 ctx = maybe_lookup_ctx (stmt);
12945 gcc_assert (ctx);
12946 lower_omp_critical (gsi_p, ctx);
12947 break;
12948 case GIMPLE_OMP_ATOMIC_LOAD:
12949 if ((ctx || task_shared_vars)
12950 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12951 as_a <gomp_atomic_load *> (stmt)),
12952 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12953 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12954 break;
12955 case GIMPLE_OMP_TARGET:
12956 ctx = maybe_lookup_ctx (stmt);
12957 gcc_assert (ctx);
12958 lower_omp_target (gsi_p, ctx);
12959 break;
12960 case GIMPLE_OMP_TEAMS:
12961 ctx = maybe_lookup_ctx (stmt);
12962 gcc_assert (ctx);
12963 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12964 lower_omp_taskreg (gsi_p, ctx);
12965 else
12966 lower_omp_teams (gsi_p, ctx);
12967 break;
12968 case GIMPLE_CALL:
12969 tree fndecl;
12970 call_stmt = as_a <gcall *> (stmt);
12971 fndecl = gimple_call_fndecl (call_stmt);
12972 if (fndecl
12973 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12974 switch (DECL_FUNCTION_CODE (fndecl))
12975 {
12976 case BUILT_IN_GOMP_BARRIER:
12977 if (ctx == NULL)
12978 break;
12979 /* FALLTHRU */
12980 case BUILT_IN_GOMP_CANCEL:
12981 case BUILT_IN_GOMP_CANCELLATION_POINT:
12982 omp_context *cctx;
12983 cctx = ctx;
12984 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12985 cctx = cctx->outer;
12986 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12987 if (!cctx->cancellable)
12988 {
12989 if (DECL_FUNCTION_CODE (fndecl)
12990 == BUILT_IN_GOMP_CANCELLATION_POINT)
12991 {
12992 stmt = gimple_build_nop ();
12993 gsi_replace (gsi_p, stmt, false);
12994 }
12995 break;
12996 }
12997 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12998 {
12999 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13000 gimple_call_set_fndecl (call_stmt, fndecl);
13001 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13002 }
13003 tree lhs;
13004 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13005 gimple_call_set_lhs (call_stmt, lhs);
13006 tree fallthru_label;
13007 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13008 gimple *g;
13009 g = gimple_build_label (fallthru_label);
13010 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13011 g = gimple_build_cond (NE_EXPR, lhs,
13012 fold_convert (TREE_TYPE (lhs),
13013 boolean_false_node),
13014 cctx->cancel_label, fallthru_label);
13015 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13016 break;
13017 default:
13018 break;
13019 }
13020 goto regimplify;
13021
13022 case GIMPLE_ASSIGN:
13023 for (omp_context *up = ctx; up; up = up->outer)
13024 {
13025 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13026 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13027 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13028 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13029 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13030 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13031 && (gimple_omp_target_kind (up->stmt)
13032 == GF_OMP_TARGET_KIND_DATA)))
13033 continue;
13034 else if (!up->lastprivate_conditional_map)
13035 break;
13036 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13037 if (TREE_CODE (lhs) == MEM_REF
13038 && DECL_P (TREE_OPERAND (lhs, 0))
13039 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13040 0))) == REFERENCE_TYPE)
13041 lhs = TREE_OPERAND (lhs, 0);
13042 if (DECL_P (lhs))
13043 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13044 {
13045 tree clauses;
13046 if (up->combined_into_simd_safelen1)
13047 {
13048 up = up->outer;
13049 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13050 up = up->outer;
13051 }
13052 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13053 clauses = gimple_omp_for_clauses (up->stmt);
13054 else
13055 clauses = gimple_omp_sections_clauses (up->stmt);
13056 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13057 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13058 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13059 OMP_CLAUSE__CONDTEMP_);
13060 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13061 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13062 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13063 }
13064 }
13065 /* FALLTHRU */
13066
13067 default:
13068 regimplify:
13069 if ((ctx || task_shared_vars)
13070 && walk_gimple_op (stmt, lower_omp_regimplify_p,
13071 ctx ? NULL : &wi))
13072 {
13073 /* Just remove clobbers, this should happen only if we have
13074 "privatized" local addressable variables in SIMD regions,
13075 the clobber isn't needed in that case and gimplifying address
13076 of the ARRAY_REF into a pointer and creating MEM_REF based
13077 clobber would create worse code than we get with the clobber
13078 dropped. */
13079 if (gimple_clobber_p (stmt))
13080 {
13081 gsi_replace (gsi_p, gimple_build_nop (), true);
13082 break;
13083 }
13084 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13085 }
13086 break;
13087 }
13088 }
13089
13090 static void
13091 lower_omp (gimple_seq *body, omp_context *ctx)
13092 {
13093 location_t saved_location = input_location;
13094 gimple_stmt_iterator gsi;
13095 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13096 lower_omp_1 (&gsi, ctx);
13097 /* During gimplification, we haven't folded statments inside offloading
13098 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13099 if (target_nesting_level || taskreg_nesting_level)
13100 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13101 fold_stmt (&gsi);
13102 input_location = saved_location;
13103 }
13104
13105 /* Main entry point. */
13106
13107 static unsigned int
13108 execute_lower_omp (void)
13109 {
13110 gimple_seq body;
13111 int i;
13112 omp_context *ctx;
13113
13114 /* This pass always runs, to provide PROP_gimple_lomp.
13115 But often, there is nothing to do. */
13116 if (flag_openacc == 0 && flag_openmp == 0
13117 && flag_openmp_simd == 0)
13118 return 0;
13119
13120 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13121 delete_omp_context);
13122
13123 body = gimple_body (current_function_decl);
13124
13125 scan_omp (&body, NULL);
13126 gcc_assert (taskreg_nesting_level == 0);
13127 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13128 finish_taskreg_scan (ctx);
13129 taskreg_contexts.release ();
13130
13131 if (all_contexts->root)
13132 {
13133 if (task_shared_vars)
13134 push_gimplify_context ();
13135 lower_omp (&body, NULL);
13136 if (task_shared_vars)
13137 pop_gimplify_context (NULL);
13138 }
13139
13140 if (all_contexts)
13141 {
13142 splay_tree_delete (all_contexts);
13143 all_contexts = NULL;
13144 }
13145 BITMAP_FREE (task_shared_vars);
13146 BITMAP_FREE (global_nonaddressable_vars);
13147
13148 /* If current function is a method, remove artificial dummy VAR_DECL created
13149 for non-static data member privatization, they aren't needed for
13150 debuginfo nor anything else, have been already replaced everywhere in the
13151 IL and cause problems with LTO. */
13152 if (DECL_ARGUMENTS (current_function_decl)
13153 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13154 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13155 == POINTER_TYPE))
13156 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13157 return 0;
13158 }
13159
13160 namespace {
13161
13162 const pass_data pass_data_lower_omp =
13163 {
13164 GIMPLE_PASS, /* type */
13165 "omplower", /* name */
13166 OPTGROUP_OMP, /* optinfo_flags */
13167 TV_NONE, /* tv_id */
13168 PROP_gimple_any, /* properties_required */
13169 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13170 0, /* properties_destroyed */
13171 0, /* todo_flags_start */
13172 0, /* todo_flags_finish */
13173 };
13174
13175 class pass_lower_omp : public gimple_opt_pass
13176 {
13177 public:
13178 pass_lower_omp (gcc::context *ctxt)
13179 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13180 {}
13181
13182 /* opt_pass methods: */
13183 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13184
13185 }; // class pass_lower_omp
13186
13187 } // anon namespace
13188
13189 gimple_opt_pass *
13190 make_pass_lower_omp (gcc::context *ctxt)
13191 {
13192 return new pass_lower_omp (ctxt);
13193 }
13194 \f
13195 /* The following is a utility to diagnose structured block violations.
13196 It is not part of the "omplower" pass, as that's invoked too late. It
13197 should be invoked by the respective front ends after gimplification. */
13198
13199 static splay_tree all_labels;
13200
13201 /* Check for mismatched contexts and generate an error if needed. Return
13202 true if an error is detected. */
13203
13204 static bool
13205 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13206 gimple *branch_ctx, gimple *label_ctx)
13207 {
13208 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13209 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13210
13211 if (label_ctx == branch_ctx)
13212 return false;
13213
13214 const char* kind = NULL;
13215
13216 if (flag_openacc)
13217 {
13218 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13219 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13220 {
13221 gcc_checking_assert (kind == NULL);
13222 kind = "OpenACC";
13223 }
13224 }
13225 if (kind == NULL)
13226 {
13227 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13228 kind = "OpenMP";
13229 }
13230
13231 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13232 so we could traverse it and issue a correct "exit" or "enter" error
13233 message upon a structured block violation.
13234
13235 We built the context by building a list with tree_cons'ing, but there is
13236 no easy counterpart in gimple tuples. It seems like far too much work
13237 for issuing exit/enter error messages. If someone really misses the
13238 distinct error message... patches welcome. */
13239
13240 #if 0
13241 /* Try to avoid confusing the user by producing and error message
13242 with correct "exit" or "enter" verbiage. We prefer "exit"
13243 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13244 if (branch_ctx == NULL)
13245 exit_p = false;
13246 else
13247 {
13248 while (label_ctx)
13249 {
13250 if (TREE_VALUE (label_ctx) == branch_ctx)
13251 {
13252 exit_p = false;
13253 break;
13254 }
13255 label_ctx = TREE_CHAIN (label_ctx);
13256 }
13257 }
13258
13259 if (exit_p)
13260 error ("invalid exit from %s structured block", kind);
13261 else
13262 error ("invalid entry to %s structured block", kind);
13263 #endif
13264
13265 /* If it's obvious we have an invalid entry, be specific about the error. */
13266 if (branch_ctx == NULL)
13267 error ("invalid entry to %s structured block", kind);
13268 else
13269 {
13270 /* Otherwise, be vague and lazy, but efficient. */
13271 error ("invalid branch to/from %s structured block", kind);
13272 }
13273
13274 gsi_replace (gsi_p, gimple_build_nop (), false);
13275 return true;
13276 }
13277
13278 /* Pass 1: Create a minimal tree of structured blocks, and record
13279 where each label is found. */
13280
13281 static tree
13282 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13283 struct walk_stmt_info *wi)
13284 {
13285 gimple *context = (gimple *) wi->info;
13286 gimple *inner_context;
13287 gimple *stmt = gsi_stmt (*gsi_p);
13288
13289 *handled_ops_p = true;
13290
13291 switch (gimple_code (stmt))
13292 {
13293 WALK_SUBSTMTS;
13294
13295 case GIMPLE_OMP_PARALLEL:
13296 case GIMPLE_OMP_TASK:
13297 case GIMPLE_OMP_SECTIONS:
13298 case GIMPLE_OMP_SINGLE:
13299 case GIMPLE_OMP_SECTION:
13300 case GIMPLE_OMP_MASTER:
13301 case GIMPLE_OMP_ORDERED:
13302 case GIMPLE_OMP_SCAN:
13303 case GIMPLE_OMP_CRITICAL:
13304 case GIMPLE_OMP_TARGET:
13305 case GIMPLE_OMP_TEAMS:
13306 case GIMPLE_OMP_TASKGROUP:
13307 /* The minimal context here is just the current OMP construct. */
13308 inner_context = stmt;
13309 wi->info = inner_context;
13310 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13311 wi->info = context;
13312 break;
13313
13314 case GIMPLE_OMP_FOR:
13315 inner_context = stmt;
13316 wi->info = inner_context;
13317 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13318 walk them. */
13319 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13320 diagnose_sb_1, NULL, wi);
13321 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13322 wi->info = context;
13323 break;
13324
13325 case GIMPLE_LABEL:
13326 splay_tree_insert (all_labels,
13327 (splay_tree_key) gimple_label_label (
13328 as_a <glabel *> (stmt)),
13329 (splay_tree_value) context);
13330 break;
13331
13332 default:
13333 break;
13334 }
13335
13336 return NULL_TREE;
13337 }
13338
13339 /* Pass 2: Check each branch and see if its context differs from that of
13340 the destination label's context. */
13341
13342 static tree
13343 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13344 struct walk_stmt_info *wi)
13345 {
13346 gimple *context = (gimple *) wi->info;
13347 splay_tree_node n;
13348 gimple *stmt = gsi_stmt (*gsi_p);
13349
13350 *handled_ops_p = true;
13351
13352 switch (gimple_code (stmt))
13353 {
13354 WALK_SUBSTMTS;
13355
13356 case GIMPLE_OMP_PARALLEL:
13357 case GIMPLE_OMP_TASK:
13358 case GIMPLE_OMP_SECTIONS:
13359 case GIMPLE_OMP_SINGLE:
13360 case GIMPLE_OMP_SECTION:
13361 case GIMPLE_OMP_MASTER:
13362 case GIMPLE_OMP_ORDERED:
13363 case GIMPLE_OMP_SCAN:
13364 case GIMPLE_OMP_CRITICAL:
13365 case GIMPLE_OMP_TARGET:
13366 case GIMPLE_OMP_TEAMS:
13367 case GIMPLE_OMP_TASKGROUP:
13368 wi->info = stmt;
13369 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13370 wi->info = context;
13371 break;
13372
13373 case GIMPLE_OMP_FOR:
13374 wi->info = stmt;
13375 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13376 walk them. */
13377 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13378 diagnose_sb_2, NULL, wi);
13379 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13380 wi->info = context;
13381 break;
13382
13383 case GIMPLE_COND:
13384 {
13385 gcond *cond_stmt = as_a <gcond *> (stmt);
13386 tree lab = gimple_cond_true_label (cond_stmt);
13387 if (lab)
13388 {
13389 n = splay_tree_lookup (all_labels,
13390 (splay_tree_key) lab);
13391 diagnose_sb_0 (gsi_p, context,
13392 n ? (gimple *) n->value : NULL);
13393 }
13394 lab = gimple_cond_false_label (cond_stmt);
13395 if (lab)
13396 {
13397 n = splay_tree_lookup (all_labels,
13398 (splay_tree_key) lab);
13399 diagnose_sb_0 (gsi_p, context,
13400 n ? (gimple *) n->value : NULL);
13401 }
13402 }
13403 break;
13404
13405 case GIMPLE_GOTO:
13406 {
13407 tree lab = gimple_goto_dest (stmt);
13408 if (TREE_CODE (lab) != LABEL_DECL)
13409 break;
13410
13411 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13412 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13413 }
13414 break;
13415
13416 case GIMPLE_SWITCH:
13417 {
13418 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13419 unsigned int i;
13420 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13421 {
13422 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13423 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13424 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13425 break;
13426 }
13427 }
13428 break;
13429
13430 case GIMPLE_RETURN:
13431 diagnose_sb_0 (gsi_p, context, NULL);
13432 break;
13433
13434 default:
13435 break;
13436 }
13437
13438 return NULL_TREE;
13439 }
13440
13441 static unsigned int
13442 diagnose_omp_structured_block_errors (void)
13443 {
13444 struct walk_stmt_info wi;
13445 gimple_seq body = gimple_body (current_function_decl);
13446
13447 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13448
13449 memset (&wi, 0, sizeof (wi));
13450 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13451
13452 memset (&wi, 0, sizeof (wi));
13453 wi.want_locations = true;
13454 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13455
13456 gimple_set_body (current_function_decl, body);
13457
13458 splay_tree_delete (all_labels);
13459 all_labels = NULL;
13460
13461 return 0;
13462 }
13463
13464 namespace {
13465
13466 const pass_data pass_data_diagnose_omp_blocks =
13467 {
13468 GIMPLE_PASS, /* type */
13469 "*diagnose_omp_blocks", /* name */
13470 OPTGROUP_OMP, /* optinfo_flags */
13471 TV_NONE, /* tv_id */
13472 PROP_gimple_any, /* properties_required */
13473 0, /* properties_provided */
13474 0, /* properties_destroyed */
13475 0, /* todo_flags_start */
13476 0, /* todo_flags_finish */
13477 };
13478
13479 class pass_diagnose_omp_blocks : public gimple_opt_pass
13480 {
13481 public:
13482 pass_diagnose_omp_blocks (gcc::context *ctxt)
13483 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13484 {}
13485
13486 /* opt_pass methods: */
13487 virtual bool gate (function *)
13488 {
13489 return flag_openacc || flag_openmp || flag_openmp_simd;
13490 }
13491 virtual unsigned int execute (function *)
13492 {
13493 return diagnose_omp_structured_block_errors ();
13494 }
13495
13496 }; // class pass_diagnose_omp_blocks
13497
13498 } // anon namespace
13499
13500 gimple_opt_pass *
13501 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13502 {
13503 return new pass_diagnose_omp_blocks (ctxt);
13504 }
13505 \f
13506
13507 #include "gt-omp-low.h"