openmp: Add support for non-rect simd and improve collapsed simd support
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
69
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
73
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
76
77 struct omp_context
78 {
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
84
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
88
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
95
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
103
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
107
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
111
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
115
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
120
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
124
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
128
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses;
134
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses;
140
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
144 int depth;
145
146 /* True if this parallel directive is nested within another. */
147 bool is_nested;
148
149 /* True if this construct can be cancelled. */
150 bool cancellable;
151
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
153 context. */
154 bool combined_into_simd_safelen1;
155
156 /* True if there is nested scan context with inclusive clause. */
157 bool scan_inclusive;
158
159 /* True if there is nested scan context with exclusive clause. */
160 bool scan_exclusive;
161
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase;
164
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent;
167
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
169 bool loop_p;
170 };
171
172 static splay_tree all_contexts;
173 static int taskreg_nesting_level;
174 static int target_nesting_level;
175 static bitmap task_shared_vars;
176 static bitmap global_nonaddressable_vars;
177 static vec<omp_context *> taskreg_contexts;
178
179 static void scan_omp (gimple_seq *, omp_context *);
180 static tree scan_omp_1_op (tree *, int *, void *);
181
182 #define WALK_SUBSTMTS \
183 case GIMPLE_BIND: \
184 case GIMPLE_TRY: \
185 case GIMPLE_CATCH: \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
190 break;
191
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
193 region. */
194
195 static bool
196 is_oacc_parallel_or_serial (omp_context *ctx)
197 {
198 enum gimple_code outer_type = gimple_code (ctx->stmt);
199 return ((outer_type == GIMPLE_OMP_TARGET)
200 && ((gimple_omp_target_kind (ctx->stmt)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
202 || (gimple_omp_target_kind (ctx->stmt)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
204 }
205
206 /* Return true if CTX corresponds to an oacc kernels region. */
207
208 static bool
209 is_oacc_kernels (omp_context *ctx)
210 {
211 enum gimple_code outer_type = gimple_code (ctx->stmt);
212 return ((outer_type == GIMPLE_OMP_TARGET)
213 && (gimple_omp_target_kind (ctx->stmt)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS));
215 }
216
217 /* If DECL is the artificial dummy VAR_DECL created for non-static
218 data member privatization, return the underlying "this" parameter,
219 otherwise return NULL. */
220
221 tree
222 omp_member_access_dummy_var (tree decl)
223 {
224 if (!VAR_P (decl)
225 || !DECL_ARTIFICIAL (decl)
226 || !DECL_IGNORED_P (decl)
227 || !DECL_HAS_VALUE_EXPR_P (decl)
228 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
229 return NULL_TREE;
230
231 tree v = DECL_VALUE_EXPR (decl);
232 if (TREE_CODE (v) != COMPONENT_REF)
233 return NULL_TREE;
234
235 while (1)
236 switch (TREE_CODE (v))
237 {
238 case COMPONENT_REF:
239 case MEM_REF:
240 case INDIRECT_REF:
241 CASE_CONVERT:
242 case POINTER_PLUS_EXPR:
243 v = TREE_OPERAND (v, 0);
244 continue;
245 case PARM_DECL:
246 if (DECL_CONTEXT (v) == current_function_decl
247 && DECL_ARTIFICIAL (v)
248 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
249 return v;
250 return NULL_TREE;
251 default:
252 return NULL_TREE;
253 }
254 }
255
256 /* Helper for unshare_and_remap, called through walk_tree. */
257
258 static tree
259 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
260 {
261 tree *pair = (tree *) data;
262 if (*tp == pair[0])
263 {
264 *tp = unshare_expr (pair[1]);
265 *walk_subtrees = 0;
266 }
267 else if (IS_TYPE_OR_DECL_P (*tp))
268 *walk_subtrees = 0;
269 return NULL_TREE;
270 }
271
272 /* Return unshare_expr (X) with all occurrences of FROM
273 replaced with TO. */
274
275 static tree
276 unshare_and_remap (tree x, tree from, tree to)
277 {
278 tree pair[2] = { from, to };
279 x = unshare_expr (x);
280 walk_tree (&x, unshare_and_remap_1, pair, NULL);
281 return x;
282 }
283
284 /* Convenience function for calling scan_omp_1_op on tree operands. */
285
286 static inline tree
287 scan_omp_op (tree *tp, omp_context *ctx)
288 {
289 struct walk_stmt_info wi;
290
291 memset (&wi, 0, sizeof (wi));
292 wi.info = ctx;
293 wi.want_locations = true;
294
295 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
296 }
297
298 static void lower_omp (gimple_seq *, omp_context *);
299 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
300 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
301
302 /* Return true if CTX is for an omp parallel. */
303
304 static inline bool
305 is_parallel_ctx (omp_context *ctx)
306 {
307 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
308 }
309
310
311 /* Return true if CTX is for an omp task. */
312
313 static inline bool
314 is_task_ctx (omp_context *ctx)
315 {
316 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
317 }
318
319
320 /* Return true if CTX is for an omp taskloop. */
321
322 static inline bool
323 is_taskloop_ctx (omp_context *ctx)
324 {
325 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
326 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
327 }
328
329
330 /* Return true if CTX is for a host omp teams. */
331
332 static inline bool
333 is_host_teams_ctx (omp_context *ctx)
334 {
335 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
336 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
337 }
338
339 /* Return true if CTX is for an omp parallel or omp task or host omp teams
340 (the last one is strictly not a task region in OpenMP speak, but we
341 need to treat it similarly). */
342
343 static inline bool
344 is_taskreg_ctx (omp_context *ctx)
345 {
346 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
347 }
348
349 /* Return true if EXPR is variable sized. */
350
351 static inline bool
352 is_variable_sized (const_tree expr)
353 {
354 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
355 }
356
357 /* Lookup variables. The "maybe" form
358 allows for the variable form to not have been entered, otherwise we
359 assert that the variable must have been entered. */
360
361 static inline tree
362 lookup_decl (tree var, omp_context *ctx)
363 {
364 tree *n = ctx->cb.decl_map->get (var);
365 return *n;
366 }
367
368 static inline tree
369 maybe_lookup_decl (const_tree var, omp_context *ctx)
370 {
371 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
372 return n ? *n : NULL_TREE;
373 }
374
375 static inline tree
376 lookup_field (tree var, omp_context *ctx)
377 {
378 splay_tree_node n;
379 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
380 return (tree) n->value;
381 }
382
383 static inline tree
384 lookup_sfield (splay_tree_key key, omp_context *ctx)
385 {
386 splay_tree_node n;
387 n = splay_tree_lookup (ctx->sfield_map
388 ? ctx->sfield_map : ctx->field_map, key);
389 return (tree) n->value;
390 }
391
392 static inline tree
393 lookup_sfield (tree var, omp_context *ctx)
394 {
395 return lookup_sfield ((splay_tree_key) var, ctx);
396 }
397
398 static inline tree
399 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
400 {
401 splay_tree_node n;
402 n = splay_tree_lookup (ctx->field_map, key);
403 return n ? (tree) n->value : NULL_TREE;
404 }
405
406 static inline tree
407 maybe_lookup_field (tree var, omp_context *ctx)
408 {
409 return maybe_lookup_field ((splay_tree_key) var, ctx);
410 }
411
412 /* Return true if DECL should be copied by pointer. SHARED_CTX is
413 the parallel context if DECL is to be shared. */
414
415 static bool
416 use_pointer_for_field (tree decl, omp_context *shared_ctx)
417 {
418 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
419 || TYPE_ATOMIC (TREE_TYPE (decl)))
420 return true;
421
422 /* We can only use copy-in/copy-out semantics for shared variables
423 when we know the value is not accessible from an outer scope. */
424 if (shared_ctx)
425 {
426 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
427
428 /* ??? Trivially accessible from anywhere. But why would we even
429 be passing an address in this case? Should we simply assert
430 this to be false, or should we have a cleanup pass that removes
431 these from the list of mappings? */
432 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
433 return true;
434
435 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
436 without analyzing the expression whether or not its location
437 is accessible to anyone else. In the case of nested parallel
438 regions it certainly may be. */
439 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
440 return true;
441
442 /* Do not use copy-in/copy-out for variables that have their
443 address taken. */
444 if (is_global_var (decl))
445 {
446 /* For file scope vars, track whether we've seen them as
447 non-addressable initially and in that case, keep the same
448 answer for the duration of the pass, even when they are made
449 addressable later on e.g. through reduction expansion. Global
450 variables which weren't addressable before the pass will not
451 have their privatized copies address taken. See PR91216. */
452 if (!TREE_ADDRESSABLE (decl))
453 {
454 if (!global_nonaddressable_vars)
455 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
456 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
457 }
458 else if (!global_nonaddressable_vars
459 || !bitmap_bit_p (global_nonaddressable_vars,
460 DECL_UID (decl)))
461 return true;
462 }
463 else if (TREE_ADDRESSABLE (decl))
464 return true;
465
466 /* lower_send_shared_vars only uses copy-in, but not copy-out
467 for these. */
468 if (TREE_READONLY (decl)
469 || ((TREE_CODE (decl) == RESULT_DECL
470 || TREE_CODE (decl) == PARM_DECL)
471 && DECL_BY_REFERENCE (decl)))
472 return false;
473
474 /* Disallow copy-in/out in nested parallel if
475 decl is shared in outer parallel, otherwise
476 each thread could store the shared variable
477 in its own copy-in location, making the
478 variable no longer really shared. */
479 if (shared_ctx->is_nested)
480 {
481 omp_context *up;
482
483 for (up = shared_ctx->outer; up; up = up->outer)
484 if ((is_taskreg_ctx (up)
485 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
486 && is_gimple_omp_offloaded (up->stmt)))
487 && maybe_lookup_decl (decl, up))
488 break;
489
490 if (up)
491 {
492 tree c;
493
494 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
495 {
496 for (c = gimple_omp_target_clauses (up->stmt);
497 c; c = OMP_CLAUSE_CHAIN (c))
498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
499 && OMP_CLAUSE_DECL (c) == decl)
500 break;
501 }
502 else
503 for (c = gimple_omp_taskreg_clauses (up->stmt);
504 c; c = OMP_CLAUSE_CHAIN (c))
505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
506 && OMP_CLAUSE_DECL (c) == decl)
507 break;
508
509 if (c)
510 goto maybe_mark_addressable_and_ret;
511 }
512 }
513
514 /* For tasks avoid using copy-in/out. As tasks can be
515 deferred or executed in different thread, when GOMP_task
516 returns, the task hasn't necessarily terminated. */
517 if (is_task_ctx (shared_ctx))
518 {
519 tree outer;
520 maybe_mark_addressable_and_ret:
521 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
522 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
523 {
524 /* Taking address of OUTER in lower_send_shared_vars
525 might need regimplification of everything that uses the
526 variable. */
527 if (!task_shared_vars)
528 task_shared_vars = BITMAP_ALLOC (NULL);
529 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
530 TREE_ADDRESSABLE (outer) = 1;
531 }
532 return true;
533 }
534 }
535
536 return false;
537 }
538
539 /* Construct a new automatic decl similar to VAR. */
540
541 static tree
542 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
543 {
544 tree copy = copy_var_decl (var, name, type);
545
546 DECL_CONTEXT (copy) = current_function_decl;
547 DECL_CHAIN (copy) = ctx->block_vars;
548 /* If VAR is listed in task_shared_vars, it means it wasn't
549 originally addressable and is just because task needs to take
550 it's address. But we don't need to take address of privatizations
551 from that var. */
552 if (TREE_ADDRESSABLE (var)
553 && ((task_shared_vars
554 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
555 || (global_nonaddressable_vars
556 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
557 TREE_ADDRESSABLE (copy) = 0;
558 ctx->block_vars = copy;
559
560 return copy;
561 }
562
563 static tree
564 omp_copy_decl_1 (tree var, omp_context *ctx)
565 {
566 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
567 }
568
569 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
570 as appropriate. */
571 static tree
572 omp_build_component_ref (tree obj, tree field)
573 {
574 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
575 if (TREE_THIS_VOLATILE (field))
576 TREE_THIS_VOLATILE (ret) |= 1;
577 if (TREE_READONLY (field))
578 TREE_READONLY (ret) |= 1;
579 return ret;
580 }
581
582 /* Build tree nodes to access the field for VAR on the receiver side. */
583
584 static tree
585 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
586 {
587 tree x, field = lookup_field (var, ctx);
588
589 /* If the receiver record type was remapped in the child function,
590 remap the field into the new record type. */
591 x = maybe_lookup_field (field, ctx);
592 if (x != NULL)
593 field = x;
594
595 x = build_simple_mem_ref (ctx->receiver_decl);
596 TREE_THIS_NOTRAP (x) = 1;
597 x = omp_build_component_ref (x, field);
598 if (by_ref)
599 {
600 x = build_simple_mem_ref (x);
601 TREE_THIS_NOTRAP (x) = 1;
602 }
603
604 return x;
605 }
606
607 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
608 of a parallel, this is a component reference; for workshare constructs
609 this is some variable. */
610
611 static tree
612 build_outer_var_ref (tree var, omp_context *ctx,
613 enum omp_clause_code code = OMP_CLAUSE_ERROR)
614 {
615 tree x;
616 omp_context *outer = ctx->outer;
617 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
618 outer = outer->outer;
619
620 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
621 x = var;
622 else if (is_variable_sized (var))
623 {
624 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
625 x = build_outer_var_ref (x, ctx, code);
626 x = build_simple_mem_ref (x);
627 }
628 else if (is_taskreg_ctx (ctx))
629 {
630 bool by_ref = use_pointer_for_field (var, NULL);
631 x = build_receiver_ref (var, by_ref, ctx);
632 }
633 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
634 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
635 || ctx->loop_p
636 || (code == OMP_CLAUSE_PRIVATE
637 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
638 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
639 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
640 {
641 /* #pragma omp simd isn't a worksharing construct, and can reference
642 even private vars in its linear etc. clauses.
643 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
644 to private vars in all worksharing constructs. */
645 x = NULL_TREE;
646 if (outer && is_taskreg_ctx (outer))
647 x = lookup_decl (var, outer);
648 else if (outer)
649 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
650 if (x == NULL_TREE)
651 x = var;
652 }
653 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
654 {
655 gcc_assert (outer);
656 splay_tree_node n
657 = splay_tree_lookup (outer->field_map,
658 (splay_tree_key) &DECL_UID (var));
659 if (n == NULL)
660 {
661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
662 x = var;
663 else
664 x = lookup_decl (var, outer);
665 }
666 else
667 {
668 tree field = (tree) n->value;
669 /* If the receiver record type was remapped in the child function,
670 remap the field into the new record type. */
671 x = maybe_lookup_field (field, outer);
672 if (x != NULL)
673 field = x;
674
675 x = build_simple_mem_ref (outer->receiver_decl);
676 x = omp_build_component_ref (x, field);
677 if (use_pointer_for_field (var, outer))
678 x = build_simple_mem_ref (x);
679 }
680 }
681 else if (outer)
682 x = lookup_decl (var, outer);
683 else if (omp_is_reference (var))
684 /* This can happen with orphaned constructs. If var is reference, it is
685 possible it is shared and as such valid. */
686 x = var;
687 else if (omp_member_access_dummy_var (var))
688 x = var;
689 else
690 gcc_unreachable ();
691
692 if (x == var)
693 {
694 tree t = omp_member_access_dummy_var (var);
695 if (t)
696 {
697 x = DECL_VALUE_EXPR (var);
698 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
699 if (o != t)
700 x = unshare_and_remap (x, t, o);
701 else
702 x = unshare_expr (x);
703 }
704 }
705
706 if (omp_is_reference (var))
707 x = build_simple_mem_ref (x);
708
709 return x;
710 }
711
712 /* Build tree nodes to access the field for VAR on the sender side. */
713
714 static tree
715 build_sender_ref (splay_tree_key key, omp_context *ctx)
716 {
717 tree field = lookup_sfield (key, ctx);
718 return omp_build_component_ref (ctx->sender_decl, field);
719 }
720
721 static tree
722 build_sender_ref (tree var, omp_context *ctx)
723 {
724 return build_sender_ref ((splay_tree_key) var, ctx);
725 }
726
727 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
728 BASE_POINTERS_RESTRICT, declare the field with restrict. */
729
730 static void
731 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
732 {
733 tree field, type, sfield = NULL_TREE;
734 splay_tree_key key = (splay_tree_key) var;
735
736 if ((mask & 16) != 0)
737 {
738 key = (splay_tree_key) &DECL_NAME (var);
739 gcc_checking_assert (key != (splay_tree_key) var);
740 }
741 if ((mask & 8) != 0)
742 {
743 key = (splay_tree_key) &DECL_UID (var);
744 gcc_checking_assert (key != (splay_tree_key) var);
745 }
746 gcc_assert ((mask & 1) == 0
747 || !splay_tree_lookup (ctx->field_map, key));
748 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
749 || !splay_tree_lookup (ctx->sfield_map, key));
750 gcc_assert ((mask & 3) == 3
751 || !is_gimple_omp_oacc (ctx->stmt));
752
753 type = TREE_TYPE (var);
754 if ((mask & 16) != 0)
755 type = lang_hooks.decls.omp_array_data (var, true);
756
757 /* Prevent redeclaring the var in the split-off function with a restrict
758 pointer type. Note that we only clear type itself, restrict qualifiers in
759 the pointed-to type will be ignored by points-to analysis. */
760 if (POINTER_TYPE_P (type)
761 && TYPE_RESTRICT (type))
762 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
763
764 if (mask & 4)
765 {
766 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
767 type = build_pointer_type (build_pointer_type (type));
768 }
769 else if (by_ref)
770 type = build_pointer_type (type);
771 else if ((mask & 3) == 1 && omp_is_reference (var))
772 type = TREE_TYPE (type);
773
774 field = build_decl (DECL_SOURCE_LOCATION (var),
775 FIELD_DECL, DECL_NAME (var), type);
776
777 /* Remember what variable this field was created for. This does have a
778 side effect of making dwarf2out ignore this member, so for helpful
779 debugging we clear it later in delete_omp_context. */
780 DECL_ABSTRACT_ORIGIN (field) = var;
781 if ((mask & 16) == 0 && type == TREE_TYPE (var))
782 {
783 SET_DECL_ALIGN (field, DECL_ALIGN (var));
784 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
785 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
786 }
787 else
788 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
789
790 if ((mask & 3) == 3)
791 {
792 insert_field_into_struct (ctx->record_type, field);
793 if (ctx->srecord_type)
794 {
795 sfield = build_decl (DECL_SOURCE_LOCATION (var),
796 FIELD_DECL, DECL_NAME (var), type);
797 DECL_ABSTRACT_ORIGIN (sfield) = var;
798 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
799 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
800 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
801 insert_field_into_struct (ctx->srecord_type, sfield);
802 }
803 }
804 else
805 {
806 if (ctx->srecord_type == NULL_TREE)
807 {
808 tree t;
809
810 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
811 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
812 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
813 {
814 sfield = build_decl (DECL_SOURCE_LOCATION (t),
815 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
816 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
817 insert_field_into_struct (ctx->srecord_type, sfield);
818 splay_tree_insert (ctx->sfield_map,
819 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
820 (splay_tree_value) sfield);
821 }
822 }
823 sfield = field;
824 insert_field_into_struct ((mask & 1) ? ctx->record_type
825 : ctx->srecord_type, field);
826 }
827
828 if (mask & 1)
829 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
830 if ((mask & 2) && ctx->sfield_map)
831 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
832 }
833
834 static tree
835 install_var_local (tree var, omp_context *ctx)
836 {
837 tree new_var = omp_copy_decl_1 (var, ctx);
838 insert_decl_map (&ctx->cb, var, new_var);
839 return new_var;
840 }
841
842 /* Adjust the replacement for DECL in CTX for the new context. This means
843 copying the DECL_VALUE_EXPR, and fixing up the type. */
844
845 static void
846 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
847 {
848 tree new_decl, size;
849
850 new_decl = lookup_decl (decl, ctx);
851
852 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
853
854 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
855 && DECL_HAS_VALUE_EXPR_P (decl))
856 {
857 tree ve = DECL_VALUE_EXPR (decl);
858 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
859 SET_DECL_VALUE_EXPR (new_decl, ve);
860 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
861 }
862
863 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
864 {
865 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
866 if (size == error_mark_node)
867 size = TYPE_SIZE (TREE_TYPE (new_decl));
868 DECL_SIZE (new_decl) = size;
869
870 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
871 if (size == error_mark_node)
872 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
873 DECL_SIZE_UNIT (new_decl) = size;
874 }
875 }
876
877 /* The callback for remap_decl. Search all containing contexts for a
878 mapping of the variable; this avoids having to duplicate the splay
879 tree ahead of time. We know a mapping doesn't already exist in the
880 given context. Create new mappings to implement default semantics. */
881
882 static tree
883 omp_copy_decl (tree var, copy_body_data *cb)
884 {
885 omp_context *ctx = (omp_context *) cb;
886 tree new_var;
887
888 if (TREE_CODE (var) == LABEL_DECL)
889 {
890 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
891 return var;
892 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
893 DECL_CONTEXT (new_var) = current_function_decl;
894 insert_decl_map (&ctx->cb, var, new_var);
895 return new_var;
896 }
897
898 while (!is_taskreg_ctx (ctx))
899 {
900 ctx = ctx->outer;
901 if (ctx == NULL)
902 return var;
903 new_var = maybe_lookup_decl (var, ctx);
904 if (new_var)
905 return new_var;
906 }
907
908 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
909 return var;
910
911 return error_mark_node;
912 }
913
914 /* Create a new context, with OUTER_CTX being the surrounding context. */
915
916 static omp_context *
917 new_omp_context (gimple *stmt, omp_context *outer_ctx)
918 {
919 omp_context *ctx = XCNEW (omp_context);
920
921 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
922 (splay_tree_value) ctx);
923 ctx->stmt = stmt;
924
925 if (outer_ctx)
926 {
927 ctx->outer = outer_ctx;
928 ctx->cb = outer_ctx->cb;
929 ctx->cb.block = NULL;
930 ctx->depth = outer_ctx->depth + 1;
931 }
932 else
933 {
934 ctx->cb.src_fn = current_function_decl;
935 ctx->cb.dst_fn = current_function_decl;
936 ctx->cb.src_node = cgraph_node::get (current_function_decl);
937 gcc_checking_assert (ctx->cb.src_node);
938 ctx->cb.dst_node = ctx->cb.src_node;
939 ctx->cb.src_cfun = cfun;
940 ctx->cb.copy_decl = omp_copy_decl;
941 ctx->cb.eh_lp_nr = 0;
942 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
943 ctx->cb.adjust_array_error_bounds = true;
944 ctx->cb.dont_remap_vla_if_no_change = true;
945 ctx->depth = 1;
946 }
947
948 ctx->cb.decl_map = new hash_map<tree, tree>;
949
950 return ctx;
951 }
952
953 static gimple_seq maybe_catch_exception (gimple_seq);
954
955 /* Finalize task copyfn. */
956
957 static void
958 finalize_task_copyfn (gomp_task *task_stmt)
959 {
960 struct function *child_cfun;
961 tree child_fn;
962 gimple_seq seq = NULL, new_seq;
963 gbind *bind;
964
965 child_fn = gimple_omp_task_copy_fn (task_stmt);
966 if (child_fn == NULL_TREE)
967 return;
968
969 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
970 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
971
972 push_cfun (child_cfun);
973 bind = gimplify_body (child_fn, false);
974 gimple_seq_add_stmt (&seq, bind);
975 new_seq = maybe_catch_exception (seq);
976 if (new_seq != seq)
977 {
978 bind = gimple_build_bind (NULL, new_seq, NULL);
979 seq = NULL;
980 gimple_seq_add_stmt (&seq, bind);
981 }
982 gimple_set_body (child_fn, seq);
983 pop_cfun ();
984
985 /* Inform the callgraph about the new function. */
986 cgraph_node *node = cgraph_node::get_create (child_fn);
987 node->parallelized_function = 1;
988 cgraph_node::add_new_function (child_fn, false);
989 }
990
991 /* Destroy a omp_context data structures. Called through the splay tree
992 value delete callback. */
993
994 static void
995 delete_omp_context (splay_tree_value value)
996 {
997 omp_context *ctx = (omp_context *) value;
998
999 delete ctx->cb.decl_map;
1000
1001 if (ctx->field_map)
1002 splay_tree_delete (ctx->field_map);
1003 if (ctx->sfield_map)
1004 splay_tree_delete (ctx->sfield_map);
1005
1006 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1007 it produces corrupt debug information. */
1008 if (ctx->record_type)
1009 {
1010 tree t;
1011 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1012 DECL_ABSTRACT_ORIGIN (t) = NULL;
1013 }
1014 if (ctx->srecord_type)
1015 {
1016 tree t;
1017 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1018 DECL_ABSTRACT_ORIGIN (t) = NULL;
1019 }
1020
1021 if (is_task_ctx (ctx))
1022 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1023
1024 if (ctx->task_reduction_map)
1025 {
1026 ctx->task_reductions.release ();
1027 delete ctx->task_reduction_map;
1028 }
1029
1030 delete ctx->lastprivate_conditional_map;
1031
1032 XDELETE (ctx);
1033 }
1034
1035 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1036 context. */
1037
1038 static void
1039 fixup_child_record_type (omp_context *ctx)
1040 {
1041 tree f, type = ctx->record_type;
1042
1043 if (!ctx->receiver_decl)
1044 return;
1045 /* ??? It isn't sufficient to just call remap_type here, because
1046 variably_modified_type_p doesn't work the way we expect for
1047 record types. Testing each field for whether it needs remapping
1048 and creating a new record by hand works, however. */
1049 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1050 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1051 break;
1052 if (f)
1053 {
1054 tree name, new_fields = NULL;
1055
1056 type = lang_hooks.types.make_type (RECORD_TYPE);
1057 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1058 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1059 TYPE_DECL, name, type);
1060 TYPE_NAME (type) = name;
1061
1062 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1063 {
1064 tree new_f = copy_node (f);
1065 DECL_CONTEXT (new_f) = type;
1066 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1067 DECL_CHAIN (new_f) = new_fields;
1068 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1069 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1070 &ctx->cb, NULL);
1071 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1072 &ctx->cb, NULL);
1073 new_fields = new_f;
1074
1075 /* Arrange to be able to look up the receiver field
1076 given the sender field. */
1077 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1078 (splay_tree_value) new_f);
1079 }
1080 TYPE_FIELDS (type) = nreverse (new_fields);
1081 layout_type (type);
1082 }
1083
1084 /* In a target region we never modify any of the pointers in *.omp_data_i,
1085 so attempt to help the optimizers. */
1086 if (is_gimple_omp_offloaded (ctx->stmt))
1087 type = build_qualified_type (type, TYPE_QUAL_CONST);
1088
1089 TREE_TYPE (ctx->receiver_decl)
1090 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1091 }
1092
1093 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1094 specified by CLAUSES. */
1095
1096 static void
1097 scan_sharing_clauses (tree clauses, omp_context *ctx)
1098 {
1099 tree c, decl;
1100 bool scan_array_reductions = false;
1101
1102 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1103 {
1104 bool by_ref;
1105
1106 switch (OMP_CLAUSE_CODE (c))
1107 {
1108 case OMP_CLAUSE_PRIVATE:
1109 decl = OMP_CLAUSE_DECL (c);
1110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1111 goto do_private;
1112 else if (!is_variable_sized (decl))
1113 install_var_local (decl, ctx);
1114 break;
1115
1116 case OMP_CLAUSE_SHARED:
1117 decl = OMP_CLAUSE_DECL (c);
1118 /* Ignore shared directives in teams construct inside of
1119 target construct. */
1120 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1121 && !is_host_teams_ctx (ctx))
1122 {
1123 /* Global variables don't need to be copied,
1124 the receiver side will use them directly. */
1125 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1126 if (is_global_var (odecl))
1127 break;
1128 insert_decl_map (&ctx->cb, decl, odecl);
1129 break;
1130 }
1131 gcc_assert (is_taskreg_ctx (ctx));
1132 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1133 || !is_variable_sized (decl));
1134 /* Global variables don't need to be copied,
1135 the receiver side will use them directly. */
1136 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1137 break;
1138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1139 {
1140 use_pointer_for_field (decl, ctx);
1141 break;
1142 }
1143 by_ref = use_pointer_for_field (decl, NULL);
1144 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1145 || TREE_ADDRESSABLE (decl)
1146 || by_ref
1147 || omp_is_reference (decl))
1148 {
1149 by_ref = use_pointer_for_field (decl, ctx);
1150 install_var_field (decl, by_ref, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1153 }
1154 /* We don't need to copy const scalar vars back. */
1155 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1156 goto do_private;
1157
1158 case OMP_CLAUSE_REDUCTION:
1159 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1160 ctx->local_reduction_clauses
1161 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1162 /* FALLTHRU */
1163
1164 case OMP_CLAUSE_IN_REDUCTION:
1165 decl = OMP_CLAUSE_DECL (c);
1166 if (TREE_CODE (decl) == MEM_REF)
1167 {
1168 tree t = TREE_OPERAND (decl, 0);
1169 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1170 t = TREE_OPERAND (t, 0);
1171 if (TREE_CODE (t) == INDIRECT_REF
1172 || TREE_CODE (t) == ADDR_EXPR)
1173 t = TREE_OPERAND (t, 0);
1174 install_var_local (t, ctx);
1175 if (is_taskreg_ctx (ctx)
1176 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1177 || (is_task_ctx (ctx)
1178 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1179 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1181 == POINTER_TYPE)))))
1182 && !is_variable_sized (t)
1183 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1184 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1185 && !is_task_ctx (ctx))))
1186 {
1187 by_ref = use_pointer_for_field (t, NULL);
1188 if (is_task_ctx (ctx)
1189 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1190 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1191 {
1192 install_var_field (t, false, 1, ctx);
1193 install_var_field (t, by_ref, 2, ctx);
1194 }
1195 else
1196 install_var_field (t, by_ref, 3, ctx);
1197 }
1198 break;
1199 }
1200 if (is_task_ctx (ctx)
1201 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1202 && OMP_CLAUSE_REDUCTION_TASK (c)
1203 && is_parallel_ctx (ctx)))
1204 {
1205 /* Global variables don't need to be copied,
1206 the receiver side will use them directly. */
1207 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1208 {
1209 by_ref = use_pointer_for_field (decl, ctx);
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1211 install_var_field (decl, by_ref, 3, ctx);
1212 }
1213 install_var_local (decl, ctx);
1214 break;
1215 }
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c))
1218 {
1219 install_var_local (decl, ctx);
1220 break;
1221 }
1222 goto do_private;
1223
1224 case OMP_CLAUSE_LASTPRIVATE:
1225 /* Let the corresponding firstprivate clause create
1226 the variable. */
1227 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1228 break;
1229 /* FALLTHRU */
1230
1231 case OMP_CLAUSE_FIRSTPRIVATE:
1232 case OMP_CLAUSE_LINEAR:
1233 decl = OMP_CLAUSE_DECL (c);
1234 do_private:
1235 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1236 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1237 && is_gimple_omp_offloaded (ctx->stmt))
1238 {
1239 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1240 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1241 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1242 install_var_field (decl, true, 3, ctx);
1243 else
1244 install_var_field (decl, false, 3, ctx);
1245 }
1246 if (is_variable_sized (decl))
1247 {
1248 if (is_task_ctx (ctx))
1249 install_var_field (decl, false, 1, ctx);
1250 break;
1251 }
1252 else if (is_taskreg_ctx (ctx))
1253 {
1254 bool global
1255 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1256 by_ref = use_pointer_for_field (decl, NULL);
1257
1258 if (is_task_ctx (ctx)
1259 && (global || by_ref || omp_is_reference (decl)))
1260 {
1261 install_var_field (decl, false, 1, ctx);
1262 if (!global)
1263 install_var_field (decl, by_ref, 2, ctx);
1264 }
1265 else if (!global)
1266 install_var_field (decl, by_ref, 3, ctx);
1267 }
1268 install_var_local (decl, ctx);
1269 break;
1270
1271 case OMP_CLAUSE_USE_DEVICE_PTR:
1272 case OMP_CLAUSE_USE_DEVICE_ADDR:
1273 decl = OMP_CLAUSE_DECL (c);
1274
1275 /* Fortran array descriptors. */
1276 if (lang_hooks.decls.omp_array_data (decl, true))
1277 install_var_field (decl, false, 19, ctx);
1278 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1279 && !omp_is_reference (decl)
1280 && !omp_is_allocatable_or_ptr (decl))
1281 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1282 install_var_field (decl, true, 11, ctx);
1283 else
1284 install_var_field (decl, false, 11, ctx);
1285 if (DECL_SIZE (decl)
1286 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1287 {
1288 tree decl2 = DECL_VALUE_EXPR (decl);
1289 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1290 decl2 = TREE_OPERAND (decl2, 0);
1291 gcc_assert (DECL_P (decl2));
1292 install_var_local (decl2, ctx);
1293 }
1294 install_var_local (decl, ctx);
1295 break;
1296
1297 case OMP_CLAUSE_IS_DEVICE_PTR:
1298 decl = OMP_CLAUSE_DECL (c);
1299 goto do_private;
1300
1301 case OMP_CLAUSE__LOOPTEMP_:
1302 case OMP_CLAUSE__REDUCTEMP_:
1303 gcc_assert (is_taskreg_ctx (ctx));
1304 decl = OMP_CLAUSE_DECL (c);
1305 install_var_field (decl, false, 3, ctx);
1306 install_var_local (decl, ctx);
1307 break;
1308
1309 case OMP_CLAUSE_COPYPRIVATE:
1310 case OMP_CLAUSE_COPYIN:
1311 decl = OMP_CLAUSE_DECL (c);
1312 by_ref = use_pointer_for_field (decl, NULL);
1313 install_var_field (decl, by_ref, 3, ctx);
1314 break;
1315
1316 case OMP_CLAUSE_FINAL:
1317 case OMP_CLAUSE_IF:
1318 case OMP_CLAUSE_NUM_THREADS:
1319 case OMP_CLAUSE_NUM_TEAMS:
1320 case OMP_CLAUSE_THREAD_LIMIT:
1321 case OMP_CLAUSE_DEVICE:
1322 case OMP_CLAUSE_SCHEDULE:
1323 case OMP_CLAUSE_DIST_SCHEDULE:
1324 case OMP_CLAUSE_DEPEND:
1325 case OMP_CLAUSE_PRIORITY:
1326 case OMP_CLAUSE_GRAINSIZE:
1327 case OMP_CLAUSE_NUM_TASKS:
1328 case OMP_CLAUSE_NUM_GANGS:
1329 case OMP_CLAUSE_NUM_WORKERS:
1330 case OMP_CLAUSE_VECTOR_LENGTH:
1331 if (ctx->outer)
1332 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1333 break;
1334
1335 case OMP_CLAUSE_TO:
1336 case OMP_CLAUSE_FROM:
1337 case OMP_CLAUSE_MAP:
1338 if (ctx->outer)
1339 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1340 decl = OMP_CLAUSE_DECL (c);
1341 /* Global variables with "omp declare target" attribute
1342 don't need to be copied, the receiver side will use them
1343 directly. However, global variables with "omp declare target link"
1344 attribute need to be copied. Or when ALWAYS modifier is used. */
1345 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1346 && DECL_P (decl)
1347 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1348 && (OMP_CLAUSE_MAP_KIND (c)
1349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1350 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1351 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1352 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1353 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1354 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1355 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1356 && varpool_node::get_create (decl)->offloadable
1357 && !lookup_attribute ("omp declare target link",
1358 DECL_ATTRIBUTES (decl)))
1359 break;
1360 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1361 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1362 {
1363 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1364 not offloaded; there is nothing to map for those. */
1365 if (!is_gimple_omp_offloaded (ctx->stmt)
1366 && !POINTER_TYPE_P (TREE_TYPE (decl))
1367 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1368 break;
1369 }
1370 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1371 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1372 || (OMP_CLAUSE_MAP_KIND (c)
1373 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1374 {
1375 if (TREE_CODE (decl) == COMPONENT_REF
1376 || (TREE_CODE (decl) == INDIRECT_REF
1377 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1378 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1379 == REFERENCE_TYPE)))
1380 break;
1381 if (DECL_SIZE (decl)
1382 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1383 {
1384 tree decl2 = DECL_VALUE_EXPR (decl);
1385 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1386 decl2 = TREE_OPERAND (decl2, 0);
1387 gcc_assert (DECL_P (decl2));
1388 install_var_local (decl2, ctx);
1389 }
1390 install_var_local (decl, ctx);
1391 break;
1392 }
1393 if (DECL_P (decl))
1394 {
1395 if (DECL_SIZE (decl)
1396 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1397 {
1398 tree decl2 = DECL_VALUE_EXPR (decl);
1399 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1400 decl2 = TREE_OPERAND (decl2, 0);
1401 gcc_assert (DECL_P (decl2));
1402 install_var_field (decl2, true, 3, ctx);
1403 install_var_local (decl2, ctx);
1404 install_var_local (decl, ctx);
1405 }
1406 else
1407 {
1408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1409 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1410 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1411 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1412 install_var_field (decl, true, 7, ctx);
1413 else
1414 install_var_field (decl, true, 3, ctx);
1415 if (is_gimple_omp_offloaded (ctx->stmt)
1416 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1417 install_var_local (decl, ctx);
1418 }
1419 }
1420 else
1421 {
1422 tree base = get_base_address (decl);
1423 tree nc = OMP_CLAUSE_CHAIN (c);
1424 if (DECL_P (base)
1425 && nc != NULL_TREE
1426 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1427 && OMP_CLAUSE_DECL (nc) == base
1428 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1429 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1430 {
1431 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1432 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1433 }
1434 else
1435 {
1436 if (ctx->outer)
1437 {
1438 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1439 decl = OMP_CLAUSE_DECL (c);
1440 }
1441 gcc_assert (!splay_tree_lookup (ctx->field_map,
1442 (splay_tree_key) decl));
1443 tree field
1444 = build_decl (OMP_CLAUSE_LOCATION (c),
1445 FIELD_DECL, NULL_TREE, ptr_type_node);
1446 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1447 insert_field_into_struct (ctx->record_type, field);
1448 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1449 (splay_tree_value) field);
1450 }
1451 }
1452 break;
1453
1454 case OMP_CLAUSE_ORDER:
1455 ctx->order_concurrent = true;
1456 break;
1457
1458 case OMP_CLAUSE_BIND:
1459 ctx->loop_p = true;
1460 break;
1461
1462 case OMP_CLAUSE_NOWAIT:
1463 case OMP_CLAUSE_ORDERED:
1464 case OMP_CLAUSE_COLLAPSE:
1465 case OMP_CLAUSE_UNTIED:
1466 case OMP_CLAUSE_MERGEABLE:
1467 case OMP_CLAUSE_PROC_BIND:
1468 case OMP_CLAUSE_SAFELEN:
1469 case OMP_CLAUSE_SIMDLEN:
1470 case OMP_CLAUSE_THREADS:
1471 case OMP_CLAUSE_SIMD:
1472 case OMP_CLAUSE_NOGROUP:
1473 case OMP_CLAUSE_DEFAULTMAP:
1474 case OMP_CLAUSE_ASYNC:
1475 case OMP_CLAUSE_WAIT:
1476 case OMP_CLAUSE_GANG:
1477 case OMP_CLAUSE_WORKER:
1478 case OMP_CLAUSE_VECTOR:
1479 case OMP_CLAUSE_INDEPENDENT:
1480 case OMP_CLAUSE_AUTO:
1481 case OMP_CLAUSE_SEQ:
1482 case OMP_CLAUSE_TILE:
1483 case OMP_CLAUSE__SIMT_:
1484 case OMP_CLAUSE_DEFAULT:
1485 case OMP_CLAUSE_NONTEMPORAL:
1486 case OMP_CLAUSE_IF_PRESENT:
1487 case OMP_CLAUSE_FINALIZE:
1488 case OMP_CLAUSE_TASK_REDUCTION:
1489 break;
1490
1491 case OMP_CLAUSE_ALIGNED:
1492 decl = OMP_CLAUSE_DECL (c);
1493 if (is_global_var (decl)
1494 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1495 install_var_local (decl, ctx);
1496 break;
1497
1498 case OMP_CLAUSE__CONDTEMP_:
1499 decl = OMP_CLAUSE_DECL (c);
1500 if (is_parallel_ctx (ctx))
1501 {
1502 install_var_field (decl, false, 3, ctx);
1503 install_var_local (decl, ctx);
1504 }
1505 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1506 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1507 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1508 install_var_local (decl, ctx);
1509 break;
1510
1511 case OMP_CLAUSE__CACHE_:
1512 default:
1513 gcc_unreachable ();
1514 }
1515 }
1516
1517 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1518 {
1519 switch (OMP_CLAUSE_CODE (c))
1520 {
1521 case OMP_CLAUSE_LASTPRIVATE:
1522 /* Let the corresponding firstprivate clause create
1523 the variable. */
1524 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1525 scan_array_reductions = true;
1526 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1527 break;
1528 /* FALLTHRU */
1529
1530 case OMP_CLAUSE_FIRSTPRIVATE:
1531 case OMP_CLAUSE_PRIVATE:
1532 case OMP_CLAUSE_LINEAR:
1533 case OMP_CLAUSE_IS_DEVICE_PTR:
1534 decl = OMP_CLAUSE_DECL (c);
1535 if (is_variable_sized (decl))
1536 {
1537 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1538 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1539 && is_gimple_omp_offloaded (ctx->stmt))
1540 {
1541 tree decl2 = DECL_VALUE_EXPR (decl);
1542 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1543 decl2 = TREE_OPERAND (decl2, 0);
1544 gcc_assert (DECL_P (decl2));
1545 install_var_local (decl2, ctx);
1546 fixup_remapped_decl (decl2, ctx, false);
1547 }
1548 install_var_local (decl, ctx);
1549 }
1550 fixup_remapped_decl (decl, ctx,
1551 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1552 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1554 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1555 scan_array_reductions = true;
1556 break;
1557
1558 case OMP_CLAUSE_REDUCTION:
1559 case OMP_CLAUSE_IN_REDUCTION:
1560 decl = OMP_CLAUSE_DECL (c);
1561 if (TREE_CODE (decl) != MEM_REF)
1562 {
1563 if (is_variable_sized (decl))
1564 install_var_local (decl, ctx);
1565 fixup_remapped_decl (decl, ctx, false);
1566 }
1567 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1568 scan_array_reductions = true;
1569 break;
1570
1571 case OMP_CLAUSE_TASK_REDUCTION:
1572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1573 scan_array_reductions = true;
1574 break;
1575
1576 case OMP_CLAUSE_SHARED:
1577 /* Ignore shared directives in teams construct inside of
1578 target construct. */
1579 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1580 && !is_host_teams_ctx (ctx))
1581 break;
1582 decl = OMP_CLAUSE_DECL (c);
1583 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1584 break;
1585 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1586 {
1587 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1588 ctx->outer)))
1589 break;
1590 bool by_ref = use_pointer_for_field (decl, ctx);
1591 install_var_field (decl, by_ref, 11, ctx);
1592 break;
1593 }
1594 fixup_remapped_decl (decl, ctx, false);
1595 break;
1596
1597 case OMP_CLAUSE_MAP:
1598 if (!is_gimple_omp_offloaded (ctx->stmt))
1599 break;
1600 decl = OMP_CLAUSE_DECL (c);
1601 if (DECL_P (decl)
1602 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1603 && (OMP_CLAUSE_MAP_KIND (c)
1604 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1605 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1606 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1607 && varpool_node::get_create (decl)->offloadable)
1608 break;
1609 if (DECL_P (decl))
1610 {
1611 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1612 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1613 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1614 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1615 {
1616 tree new_decl = lookup_decl (decl, ctx);
1617 TREE_TYPE (new_decl)
1618 = remap_type (TREE_TYPE (decl), &ctx->cb);
1619 }
1620 else if (DECL_SIZE (decl)
1621 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1622 {
1623 tree decl2 = DECL_VALUE_EXPR (decl);
1624 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1625 decl2 = TREE_OPERAND (decl2, 0);
1626 gcc_assert (DECL_P (decl2));
1627 fixup_remapped_decl (decl2, ctx, false);
1628 fixup_remapped_decl (decl, ctx, true);
1629 }
1630 else
1631 fixup_remapped_decl (decl, ctx, false);
1632 }
1633 break;
1634
1635 case OMP_CLAUSE_COPYPRIVATE:
1636 case OMP_CLAUSE_COPYIN:
1637 case OMP_CLAUSE_DEFAULT:
1638 case OMP_CLAUSE_IF:
1639 case OMP_CLAUSE_NUM_THREADS:
1640 case OMP_CLAUSE_NUM_TEAMS:
1641 case OMP_CLAUSE_THREAD_LIMIT:
1642 case OMP_CLAUSE_DEVICE:
1643 case OMP_CLAUSE_SCHEDULE:
1644 case OMP_CLAUSE_DIST_SCHEDULE:
1645 case OMP_CLAUSE_NOWAIT:
1646 case OMP_CLAUSE_ORDERED:
1647 case OMP_CLAUSE_COLLAPSE:
1648 case OMP_CLAUSE_UNTIED:
1649 case OMP_CLAUSE_FINAL:
1650 case OMP_CLAUSE_MERGEABLE:
1651 case OMP_CLAUSE_PROC_BIND:
1652 case OMP_CLAUSE_SAFELEN:
1653 case OMP_CLAUSE_SIMDLEN:
1654 case OMP_CLAUSE_ALIGNED:
1655 case OMP_CLAUSE_DEPEND:
1656 case OMP_CLAUSE__LOOPTEMP_:
1657 case OMP_CLAUSE__REDUCTEMP_:
1658 case OMP_CLAUSE_TO:
1659 case OMP_CLAUSE_FROM:
1660 case OMP_CLAUSE_PRIORITY:
1661 case OMP_CLAUSE_GRAINSIZE:
1662 case OMP_CLAUSE_NUM_TASKS:
1663 case OMP_CLAUSE_THREADS:
1664 case OMP_CLAUSE_SIMD:
1665 case OMP_CLAUSE_NOGROUP:
1666 case OMP_CLAUSE_DEFAULTMAP:
1667 case OMP_CLAUSE_ORDER:
1668 case OMP_CLAUSE_BIND:
1669 case OMP_CLAUSE_USE_DEVICE_PTR:
1670 case OMP_CLAUSE_USE_DEVICE_ADDR:
1671 case OMP_CLAUSE_NONTEMPORAL:
1672 case OMP_CLAUSE_ASYNC:
1673 case OMP_CLAUSE_WAIT:
1674 case OMP_CLAUSE_NUM_GANGS:
1675 case OMP_CLAUSE_NUM_WORKERS:
1676 case OMP_CLAUSE_VECTOR_LENGTH:
1677 case OMP_CLAUSE_GANG:
1678 case OMP_CLAUSE_WORKER:
1679 case OMP_CLAUSE_VECTOR:
1680 case OMP_CLAUSE_INDEPENDENT:
1681 case OMP_CLAUSE_AUTO:
1682 case OMP_CLAUSE_SEQ:
1683 case OMP_CLAUSE_TILE:
1684 case OMP_CLAUSE__SIMT_:
1685 case OMP_CLAUSE_IF_PRESENT:
1686 case OMP_CLAUSE_FINALIZE:
1687 case OMP_CLAUSE__CONDTEMP_:
1688 break;
1689
1690 case OMP_CLAUSE__CACHE_:
1691 default:
1692 gcc_unreachable ();
1693 }
1694 }
1695
1696 gcc_checking_assert (!scan_array_reductions
1697 || !is_gimple_omp_oacc (ctx->stmt));
1698 if (scan_array_reductions)
1699 {
1700 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1701 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1702 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1703 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1704 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1705 {
1706 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1707 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1708 }
1709 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1710 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1711 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1712 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1713 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1714 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1715 }
1716 }
1717
1718 /* Create a new name for omp child function. Returns an identifier. */
1719
1720 static tree
1721 create_omp_child_function_name (bool task_copy)
1722 {
1723 return clone_function_name_numbered (current_function_decl,
1724 task_copy ? "_omp_cpyfn" : "_omp_fn");
1725 }
1726
1727 /* Return true if CTX may belong to offloaded code: either if current function
1728 is offloaded, or any enclosing context corresponds to a target region. */
1729
1730 static bool
1731 omp_maybe_offloaded_ctx (omp_context *ctx)
1732 {
1733 if (cgraph_node::get (current_function_decl)->offloadable)
1734 return true;
1735 for (; ctx; ctx = ctx->outer)
1736 if (is_gimple_omp_offloaded (ctx->stmt))
1737 return true;
1738 return false;
1739 }
1740
1741 /* Build a decl for the omp child function. It'll not contain a body
1742 yet, just the bare decl. */
1743
1744 static void
1745 create_omp_child_function (omp_context *ctx, bool task_copy)
1746 {
1747 tree decl, type, name, t;
1748
1749 name = create_omp_child_function_name (task_copy);
1750 if (task_copy)
1751 type = build_function_type_list (void_type_node, ptr_type_node,
1752 ptr_type_node, NULL_TREE);
1753 else
1754 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1755
1756 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1757
1758 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1759 || !task_copy);
1760 if (!task_copy)
1761 ctx->cb.dst_fn = decl;
1762 else
1763 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1764
1765 TREE_STATIC (decl) = 1;
1766 TREE_USED (decl) = 1;
1767 DECL_ARTIFICIAL (decl) = 1;
1768 DECL_IGNORED_P (decl) = 0;
1769 TREE_PUBLIC (decl) = 0;
1770 DECL_UNINLINABLE (decl) = 1;
1771 DECL_EXTERNAL (decl) = 0;
1772 DECL_CONTEXT (decl) = NULL_TREE;
1773 DECL_INITIAL (decl) = make_node (BLOCK);
1774 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1775 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1776 /* Remove omp declare simd attribute from the new attributes. */
1777 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1778 {
1779 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1780 a = a2;
1781 a = TREE_CHAIN (a);
1782 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1783 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1784 *p = TREE_CHAIN (*p);
1785 else
1786 {
1787 tree chain = TREE_CHAIN (*p);
1788 *p = copy_node (*p);
1789 p = &TREE_CHAIN (*p);
1790 *p = chain;
1791 }
1792 }
1793 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1794 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1795 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1796 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1797 DECL_FUNCTION_VERSIONED (decl)
1798 = DECL_FUNCTION_VERSIONED (current_function_decl);
1799
1800 if (omp_maybe_offloaded_ctx (ctx))
1801 {
1802 cgraph_node::get_create (decl)->offloadable = 1;
1803 if (ENABLE_OFFLOADING)
1804 g->have_offload = true;
1805 }
1806
1807 if (cgraph_node::get_create (decl)->offloadable
1808 && !lookup_attribute ("omp declare target",
1809 DECL_ATTRIBUTES (current_function_decl)))
1810 {
1811 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1812 ? "omp target entrypoint"
1813 : "omp declare target");
1814 DECL_ATTRIBUTES (decl)
1815 = tree_cons (get_identifier (target_attr),
1816 NULL_TREE, DECL_ATTRIBUTES (decl));
1817 }
1818
1819 t = build_decl (DECL_SOURCE_LOCATION (decl),
1820 RESULT_DECL, NULL_TREE, void_type_node);
1821 DECL_ARTIFICIAL (t) = 1;
1822 DECL_IGNORED_P (t) = 1;
1823 DECL_CONTEXT (t) = decl;
1824 DECL_RESULT (decl) = t;
1825
1826 tree data_name = get_identifier (".omp_data_i");
1827 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1828 ptr_type_node);
1829 DECL_ARTIFICIAL (t) = 1;
1830 DECL_NAMELESS (t) = 1;
1831 DECL_ARG_TYPE (t) = ptr_type_node;
1832 DECL_CONTEXT (t) = current_function_decl;
1833 TREE_USED (t) = 1;
1834 TREE_READONLY (t) = 1;
1835 DECL_ARGUMENTS (decl) = t;
1836 if (!task_copy)
1837 ctx->receiver_decl = t;
1838 else
1839 {
1840 t = build_decl (DECL_SOURCE_LOCATION (decl),
1841 PARM_DECL, get_identifier (".omp_data_o"),
1842 ptr_type_node);
1843 DECL_ARTIFICIAL (t) = 1;
1844 DECL_NAMELESS (t) = 1;
1845 DECL_ARG_TYPE (t) = ptr_type_node;
1846 DECL_CONTEXT (t) = current_function_decl;
1847 TREE_USED (t) = 1;
1848 TREE_ADDRESSABLE (t) = 1;
1849 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1850 DECL_ARGUMENTS (decl) = t;
1851 }
1852
1853 /* Allocate memory for the function structure. The call to
1854 allocate_struct_function clobbers CFUN, so we need to restore
1855 it afterward. */
1856 push_struct_function (decl);
1857 cfun->function_end_locus = gimple_location (ctx->stmt);
1858 init_tree_ssa (cfun);
1859 pop_cfun ();
1860 }
1861
1862 /* Callback for walk_gimple_seq. Check if combined parallel
1863 contains gimple_omp_for_combined_into_p OMP_FOR. */
1864
1865 tree
1866 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1867 bool *handled_ops_p,
1868 struct walk_stmt_info *wi)
1869 {
1870 gimple *stmt = gsi_stmt (*gsi_p);
1871
1872 *handled_ops_p = true;
1873 switch (gimple_code (stmt))
1874 {
1875 WALK_SUBSTMTS;
1876
1877 case GIMPLE_OMP_FOR:
1878 if (gimple_omp_for_combined_into_p (stmt)
1879 && gimple_omp_for_kind (stmt)
1880 == *(const enum gf_mask *) (wi->info))
1881 {
1882 wi->info = stmt;
1883 return integer_zero_node;
1884 }
1885 break;
1886 default:
1887 break;
1888 }
1889 return NULL;
1890 }
1891
1892 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1893
1894 static void
1895 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1896 omp_context *outer_ctx)
1897 {
1898 struct walk_stmt_info wi;
1899
1900 memset (&wi, 0, sizeof (wi));
1901 wi.val_only = true;
1902 wi.info = (void *) &msk;
1903 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1904 if (wi.info != (void *) &msk)
1905 {
1906 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1907 struct omp_for_data fd;
1908 omp_extract_for_data (for_stmt, &fd, NULL);
1909 /* We need two temporaries with fd.loop.v type (istart/iend)
1910 and then (fd.collapse - 1) temporaries with the same
1911 type for count2 ... countN-1 vars if not constant. */
1912 size_t count = 2, i;
1913 tree type = fd.iter_type;
1914 if (fd.collapse > 1
1915 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1916 {
1917 count += fd.collapse - 1;
1918 /* If there are lastprivate clauses on the inner
1919 GIMPLE_OMP_FOR, add one more temporaries for the total number
1920 of iterations (product of count1 ... countN-1). */
1921 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1922 OMP_CLAUSE_LASTPRIVATE))
1923 count++;
1924 else if (msk == GF_OMP_FOR_KIND_FOR
1925 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1926 OMP_CLAUSE_LASTPRIVATE))
1927 count++;
1928 }
1929 for (i = 0; i < count; i++)
1930 {
1931 tree temp = create_tmp_var (type);
1932 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1933 insert_decl_map (&outer_ctx->cb, temp, temp);
1934 OMP_CLAUSE_DECL (c) = temp;
1935 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1936 gimple_omp_taskreg_set_clauses (stmt, c);
1937 }
1938 }
1939 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1940 && omp_find_clause (gimple_omp_task_clauses (stmt),
1941 OMP_CLAUSE_REDUCTION))
1942 {
1943 tree type = build_pointer_type (pointer_sized_int_node);
1944 tree temp = create_tmp_var (type);
1945 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1946 insert_decl_map (&outer_ctx->cb, temp, temp);
1947 OMP_CLAUSE_DECL (c) = temp;
1948 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1949 gimple_omp_task_set_clauses (stmt, c);
1950 }
1951 }
1952
1953 /* Scan an OpenMP parallel directive. */
1954
1955 static void
1956 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1957 {
1958 omp_context *ctx;
1959 tree name;
1960 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1961
1962 /* Ignore parallel directives with empty bodies, unless there
1963 are copyin clauses. */
1964 if (optimize > 0
1965 && empty_body_p (gimple_omp_body (stmt))
1966 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1967 OMP_CLAUSE_COPYIN) == NULL)
1968 {
1969 gsi_replace (gsi, gimple_build_nop (), false);
1970 return;
1971 }
1972
1973 if (gimple_omp_parallel_combined_p (stmt))
1974 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1975 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1976 OMP_CLAUSE_REDUCTION);
1977 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1978 if (OMP_CLAUSE_REDUCTION_TASK (c))
1979 {
1980 tree type = build_pointer_type (pointer_sized_int_node);
1981 tree temp = create_tmp_var (type);
1982 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1983 if (outer_ctx)
1984 insert_decl_map (&outer_ctx->cb, temp, temp);
1985 OMP_CLAUSE_DECL (c) = temp;
1986 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1987 gimple_omp_parallel_set_clauses (stmt, c);
1988 break;
1989 }
1990 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1991 break;
1992
1993 ctx = new_omp_context (stmt, outer_ctx);
1994 taskreg_contexts.safe_push (ctx);
1995 if (taskreg_nesting_level > 1)
1996 ctx->is_nested = true;
1997 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1998 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1999 name = create_tmp_var_name (".omp_data_s");
2000 name = build_decl (gimple_location (stmt),
2001 TYPE_DECL, name, ctx->record_type);
2002 DECL_ARTIFICIAL (name) = 1;
2003 DECL_NAMELESS (name) = 1;
2004 TYPE_NAME (ctx->record_type) = name;
2005 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2006 create_omp_child_function (ctx, false);
2007 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2008
2009 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2010 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2011
2012 if (TYPE_FIELDS (ctx->record_type) == NULL)
2013 ctx->record_type = ctx->receiver_decl = NULL;
2014 }
2015
2016 /* Scan an OpenMP task directive. */
2017
2018 static void
2019 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2020 {
2021 omp_context *ctx;
2022 tree name, t;
2023 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2024
2025 /* Ignore task directives with empty bodies, unless they have depend
2026 clause. */
2027 if (optimize > 0
2028 && gimple_omp_body (stmt)
2029 && empty_body_p (gimple_omp_body (stmt))
2030 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2031 {
2032 gsi_replace (gsi, gimple_build_nop (), false);
2033 return;
2034 }
2035
2036 if (gimple_omp_task_taskloop_p (stmt))
2037 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2038
2039 ctx = new_omp_context (stmt, outer_ctx);
2040
2041 if (gimple_omp_task_taskwait_p (stmt))
2042 {
2043 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2044 return;
2045 }
2046
2047 taskreg_contexts.safe_push (ctx);
2048 if (taskreg_nesting_level > 1)
2049 ctx->is_nested = true;
2050 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2051 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2052 name = create_tmp_var_name (".omp_data_s");
2053 name = build_decl (gimple_location (stmt),
2054 TYPE_DECL, name, ctx->record_type);
2055 DECL_ARTIFICIAL (name) = 1;
2056 DECL_NAMELESS (name) = 1;
2057 TYPE_NAME (ctx->record_type) = name;
2058 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2059 create_omp_child_function (ctx, false);
2060 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2061
2062 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2063
2064 if (ctx->srecord_type)
2065 {
2066 name = create_tmp_var_name (".omp_data_a");
2067 name = build_decl (gimple_location (stmt),
2068 TYPE_DECL, name, ctx->srecord_type);
2069 DECL_ARTIFICIAL (name) = 1;
2070 DECL_NAMELESS (name) = 1;
2071 TYPE_NAME (ctx->srecord_type) = name;
2072 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2073 create_omp_child_function (ctx, true);
2074 }
2075
2076 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2077
2078 if (TYPE_FIELDS (ctx->record_type) == NULL)
2079 {
2080 ctx->record_type = ctx->receiver_decl = NULL;
2081 t = build_int_cst (long_integer_type_node, 0);
2082 gimple_omp_task_set_arg_size (stmt, t);
2083 t = build_int_cst (long_integer_type_node, 1);
2084 gimple_omp_task_set_arg_align (stmt, t);
2085 }
2086 }
2087
2088 /* Helper function for finish_taskreg_scan, called through walk_tree.
2089 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2090 tree, replace it in the expression. */
2091
2092 static tree
2093 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2094 {
2095 if (VAR_P (*tp))
2096 {
2097 omp_context *ctx = (omp_context *) data;
2098 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2099 if (t != *tp)
2100 {
2101 if (DECL_HAS_VALUE_EXPR_P (t))
2102 t = unshare_expr (DECL_VALUE_EXPR (t));
2103 *tp = t;
2104 }
2105 *walk_subtrees = 0;
2106 }
2107 else if (IS_TYPE_OR_DECL_P (*tp))
2108 *walk_subtrees = 0;
2109 return NULL_TREE;
2110 }
2111
2112 /* If any decls have been made addressable during scan_omp,
2113 adjust their fields if needed, and layout record types
2114 of parallel/task constructs. */
2115
2116 static void
2117 finish_taskreg_scan (omp_context *ctx)
2118 {
2119 if (ctx->record_type == NULL_TREE)
2120 return;
2121
2122 /* If any task_shared_vars were needed, verify all
2123 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2124 statements if use_pointer_for_field hasn't changed
2125 because of that. If it did, update field types now. */
2126 if (task_shared_vars)
2127 {
2128 tree c;
2129
2130 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2131 c; c = OMP_CLAUSE_CHAIN (c))
2132 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2133 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2134 {
2135 tree decl = OMP_CLAUSE_DECL (c);
2136
2137 /* Global variables don't need to be copied,
2138 the receiver side will use them directly. */
2139 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2140 continue;
2141 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2142 || !use_pointer_for_field (decl, ctx))
2143 continue;
2144 tree field = lookup_field (decl, ctx);
2145 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2146 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2147 continue;
2148 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2149 TREE_THIS_VOLATILE (field) = 0;
2150 DECL_USER_ALIGN (field) = 0;
2151 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2152 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2153 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2154 if (ctx->srecord_type)
2155 {
2156 tree sfield = lookup_sfield (decl, ctx);
2157 TREE_TYPE (sfield) = TREE_TYPE (field);
2158 TREE_THIS_VOLATILE (sfield) = 0;
2159 DECL_USER_ALIGN (sfield) = 0;
2160 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2161 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2162 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2163 }
2164 }
2165 }
2166
2167 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2168 {
2169 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2170 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2171 if (c)
2172 {
2173 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2174 expects to find it at the start of data. */
2175 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2176 tree *p = &TYPE_FIELDS (ctx->record_type);
2177 while (*p)
2178 if (*p == f)
2179 {
2180 *p = DECL_CHAIN (*p);
2181 break;
2182 }
2183 else
2184 p = &DECL_CHAIN (*p);
2185 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2186 TYPE_FIELDS (ctx->record_type) = f;
2187 }
2188 layout_type (ctx->record_type);
2189 fixup_child_record_type (ctx);
2190 }
2191 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2192 {
2193 layout_type (ctx->record_type);
2194 fixup_child_record_type (ctx);
2195 }
2196 else
2197 {
2198 location_t loc = gimple_location (ctx->stmt);
2199 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2200 /* Move VLA fields to the end. */
2201 p = &TYPE_FIELDS (ctx->record_type);
2202 while (*p)
2203 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2204 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2205 {
2206 *q = *p;
2207 *p = TREE_CHAIN (*p);
2208 TREE_CHAIN (*q) = NULL_TREE;
2209 q = &TREE_CHAIN (*q);
2210 }
2211 else
2212 p = &DECL_CHAIN (*p);
2213 *p = vla_fields;
2214 if (gimple_omp_task_taskloop_p (ctx->stmt))
2215 {
2216 /* Move fields corresponding to first and second _looptemp_
2217 clause first. There are filled by GOMP_taskloop
2218 and thus need to be in specific positions. */
2219 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2220 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2221 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2222 OMP_CLAUSE__LOOPTEMP_);
2223 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2224 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2225 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2226 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2227 p = &TYPE_FIELDS (ctx->record_type);
2228 while (*p)
2229 if (*p == f1 || *p == f2 || *p == f3)
2230 *p = DECL_CHAIN (*p);
2231 else
2232 p = &DECL_CHAIN (*p);
2233 DECL_CHAIN (f1) = f2;
2234 if (c3)
2235 {
2236 DECL_CHAIN (f2) = f3;
2237 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2238 }
2239 else
2240 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2241 TYPE_FIELDS (ctx->record_type) = f1;
2242 if (ctx->srecord_type)
2243 {
2244 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2245 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2246 if (c3)
2247 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2248 p = &TYPE_FIELDS (ctx->srecord_type);
2249 while (*p)
2250 if (*p == f1 || *p == f2 || *p == f3)
2251 *p = DECL_CHAIN (*p);
2252 else
2253 p = &DECL_CHAIN (*p);
2254 DECL_CHAIN (f1) = f2;
2255 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2256 if (c3)
2257 {
2258 DECL_CHAIN (f2) = f3;
2259 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2260 }
2261 else
2262 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2263 TYPE_FIELDS (ctx->srecord_type) = f1;
2264 }
2265 }
2266 layout_type (ctx->record_type);
2267 fixup_child_record_type (ctx);
2268 if (ctx->srecord_type)
2269 layout_type (ctx->srecord_type);
2270 tree t = fold_convert_loc (loc, long_integer_type_node,
2271 TYPE_SIZE_UNIT (ctx->record_type));
2272 if (TREE_CODE (t) != INTEGER_CST)
2273 {
2274 t = unshare_expr (t);
2275 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2276 }
2277 gimple_omp_task_set_arg_size (ctx->stmt, t);
2278 t = build_int_cst (long_integer_type_node,
2279 TYPE_ALIGN_UNIT (ctx->record_type));
2280 gimple_omp_task_set_arg_align (ctx->stmt, t);
2281 }
2282 }
2283
2284 /* Find the enclosing offload context. */
2285
2286 static omp_context *
2287 enclosing_target_ctx (omp_context *ctx)
2288 {
2289 for (; ctx; ctx = ctx->outer)
2290 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2291 break;
2292
2293 return ctx;
2294 }
2295
2296 /* Return true if ctx is part of an oacc kernels region. */
2297
2298 static bool
2299 ctx_in_oacc_kernels_region (omp_context *ctx)
2300 {
2301 for (;ctx != NULL; ctx = ctx->outer)
2302 {
2303 gimple *stmt = ctx->stmt;
2304 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2305 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2306 return true;
2307 }
2308
2309 return false;
2310 }
2311
2312 /* Check the parallelism clauses inside a kernels regions.
2313 Until kernels handling moves to use the same loop indirection
2314 scheme as parallel, we need to do this checking early. */
2315
2316 static unsigned
2317 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2318 {
2319 bool checking = true;
2320 unsigned outer_mask = 0;
2321 unsigned this_mask = 0;
2322 bool has_seq = false, has_auto = false;
2323
2324 if (ctx->outer)
2325 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2326 if (!stmt)
2327 {
2328 checking = false;
2329 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2330 return outer_mask;
2331 stmt = as_a <gomp_for *> (ctx->stmt);
2332 }
2333
2334 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2335 {
2336 switch (OMP_CLAUSE_CODE (c))
2337 {
2338 case OMP_CLAUSE_GANG:
2339 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2340 break;
2341 case OMP_CLAUSE_WORKER:
2342 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2343 break;
2344 case OMP_CLAUSE_VECTOR:
2345 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2346 break;
2347 case OMP_CLAUSE_SEQ:
2348 has_seq = true;
2349 break;
2350 case OMP_CLAUSE_AUTO:
2351 has_auto = true;
2352 break;
2353 default:
2354 break;
2355 }
2356 }
2357
2358 if (checking)
2359 {
2360 if (has_seq && (this_mask || has_auto))
2361 error_at (gimple_location (stmt), "%<seq%> overrides other"
2362 " OpenACC loop specifiers");
2363 else if (has_auto && this_mask)
2364 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2365 " OpenACC loop specifiers");
2366
2367 if (this_mask & outer_mask)
2368 error_at (gimple_location (stmt), "inner loop uses same"
2369 " OpenACC parallelism as containing loop");
2370 }
2371
2372 return outer_mask | this_mask;
2373 }
2374
2375 /* Scan a GIMPLE_OMP_FOR. */
2376
2377 static omp_context *
2378 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2379 {
2380 omp_context *ctx;
2381 size_t i;
2382 tree clauses = gimple_omp_for_clauses (stmt);
2383
2384 ctx = new_omp_context (stmt, outer_ctx);
2385
2386 if (is_gimple_omp_oacc (stmt))
2387 {
2388 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2389
2390 if (!tgt || is_oacc_parallel_or_serial (tgt))
2391 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2392 {
2393 char const *check = NULL;
2394
2395 switch (OMP_CLAUSE_CODE (c))
2396 {
2397 case OMP_CLAUSE_GANG:
2398 check = "gang";
2399 break;
2400
2401 case OMP_CLAUSE_WORKER:
2402 check = "worker";
2403 break;
2404
2405 case OMP_CLAUSE_VECTOR:
2406 check = "vector";
2407 break;
2408
2409 default:
2410 break;
2411 }
2412
2413 if (check && OMP_CLAUSE_OPERAND (c, 0))
2414 error_at (gimple_location (stmt),
2415 "argument not permitted on %qs clause in"
2416 " OpenACC %<parallel%> or %<serial%>", check);
2417 }
2418
2419 if (tgt && is_oacc_kernels (tgt))
2420 {
2421 /* Strip out reductions, as they are not handled yet. */
2422 tree *prev_ptr = &clauses;
2423
2424 while (tree probe = *prev_ptr)
2425 {
2426 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2427
2428 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2429 *prev_ptr = *next_ptr;
2430 else
2431 prev_ptr = next_ptr;
2432 }
2433
2434 gimple_omp_for_set_clauses (stmt, clauses);
2435 check_oacc_kernel_gwv (stmt, ctx);
2436 }
2437
2438 /* Collect all variables named in reductions on this loop. Ensure
2439 that, if this loop has a reduction on some variable v, and there is
2440 a reduction on v somewhere in an outer context, then there is a
2441 reduction on v on all intervening loops as well. */
2442 tree local_reduction_clauses = NULL;
2443 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2444 {
2445 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2446 local_reduction_clauses
2447 = tree_cons (NULL, c, local_reduction_clauses);
2448 }
2449 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2450 ctx->outer_reduction_clauses
2451 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2452 ctx->outer->outer_reduction_clauses);
2453 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2454 tree local_iter = local_reduction_clauses;
2455 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2456 {
2457 tree local_clause = TREE_VALUE (local_iter);
2458 tree local_var = OMP_CLAUSE_DECL (local_clause);
2459 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2460 bool have_outer_reduction = false;
2461 tree ctx_iter = outer_reduction_clauses;
2462 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2463 {
2464 tree outer_clause = TREE_VALUE (ctx_iter);
2465 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2466 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2467 if (outer_var == local_var && outer_op != local_op)
2468 {
2469 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2470 "conflicting reduction operations for %qE",
2471 local_var);
2472 inform (OMP_CLAUSE_LOCATION (outer_clause),
2473 "location of the previous reduction for %qE",
2474 outer_var);
2475 }
2476 if (outer_var == local_var)
2477 {
2478 have_outer_reduction = true;
2479 break;
2480 }
2481 }
2482 if (have_outer_reduction)
2483 {
2484 /* There is a reduction on outer_var both on this loop and on
2485 some enclosing loop. Walk up the context tree until such a
2486 loop with a reduction on outer_var is found, and complain
2487 about all intervening loops that do not have such a
2488 reduction. */
2489 struct omp_context *curr_loop = ctx->outer;
2490 bool found = false;
2491 while (curr_loop != NULL)
2492 {
2493 tree curr_iter = curr_loop->local_reduction_clauses;
2494 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2495 {
2496 tree curr_clause = TREE_VALUE (curr_iter);
2497 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2498 if (curr_var == local_var)
2499 {
2500 found = true;
2501 break;
2502 }
2503 }
2504 if (!found)
2505 warning_at (gimple_location (curr_loop->stmt), 0,
2506 "nested loop in reduction needs "
2507 "reduction clause for %qE",
2508 local_var);
2509 else
2510 break;
2511 curr_loop = curr_loop->outer;
2512 }
2513 }
2514 }
2515 ctx->local_reduction_clauses = local_reduction_clauses;
2516 ctx->outer_reduction_clauses
2517 = chainon (unshare_expr (ctx->local_reduction_clauses),
2518 ctx->outer_reduction_clauses);
2519 }
2520
2521 scan_sharing_clauses (clauses, ctx);
2522
2523 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2524 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2525 {
2526 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2527 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2528 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2529 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2530 }
2531 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2532 return ctx;
2533 }
2534
2535 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2536
2537 static void
2538 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2539 omp_context *outer_ctx)
2540 {
2541 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2542 gsi_replace (gsi, bind, false);
2543 gimple_seq seq = NULL;
2544 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2545 tree cond = create_tmp_var_raw (integer_type_node);
2546 DECL_CONTEXT (cond) = current_function_decl;
2547 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2548 gimple_bind_set_vars (bind, cond);
2549 gimple_call_set_lhs (g, cond);
2550 gimple_seq_add_stmt (&seq, g);
2551 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2552 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2553 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2554 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2555 gimple_seq_add_stmt (&seq, g);
2556 g = gimple_build_label (lab1);
2557 gimple_seq_add_stmt (&seq, g);
2558 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2559 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2560 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2561 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2562 gimple_omp_for_set_clauses (new_stmt, clause);
2563 gimple_seq_add_stmt (&seq, new_stmt);
2564 g = gimple_build_goto (lab3);
2565 gimple_seq_add_stmt (&seq, g);
2566 g = gimple_build_label (lab2);
2567 gimple_seq_add_stmt (&seq, g);
2568 gimple_seq_add_stmt (&seq, stmt);
2569 g = gimple_build_label (lab3);
2570 gimple_seq_add_stmt (&seq, g);
2571 gimple_bind_set_body (bind, seq);
2572 update_stmt (bind);
2573 scan_omp_for (new_stmt, outer_ctx);
2574 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2575 }
2576
2577 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2578 struct walk_stmt_info *);
2579 static omp_context *maybe_lookup_ctx (gimple *);
2580
2581 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2582 for scan phase loop. */
2583
2584 static void
2585 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2586 omp_context *outer_ctx)
2587 {
2588 /* The only change between inclusive and exclusive scan will be
2589 within the first simd loop, so just use inclusive in the
2590 worksharing loop. */
2591 outer_ctx->scan_inclusive = true;
2592 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2593 OMP_CLAUSE_DECL (c) = integer_zero_node;
2594
2595 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2596 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2597 gsi_replace (gsi, input_stmt, false);
2598 gimple_seq input_body = NULL;
2599 gimple_seq_add_stmt (&input_body, stmt);
2600 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2601
2602 gimple_stmt_iterator input1_gsi = gsi_none ();
2603 struct walk_stmt_info wi;
2604 memset (&wi, 0, sizeof (wi));
2605 wi.val_only = true;
2606 wi.info = (void *) &input1_gsi;
2607 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2608 gcc_assert (!gsi_end_p (input1_gsi));
2609
2610 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2611 gsi_next (&input1_gsi);
2612 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2613 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2614 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2615 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2616 std::swap (input_stmt1, scan_stmt1);
2617
2618 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2619 gimple_omp_set_body (input_stmt1, NULL);
2620
2621 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2622 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2623
2624 gimple_omp_set_body (input_stmt1, input_body1);
2625 gimple_omp_set_body (scan_stmt1, NULL);
2626
2627 gimple_stmt_iterator input2_gsi = gsi_none ();
2628 memset (&wi, 0, sizeof (wi));
2629 wi.val_only = true;
2630 wi.info = (void *) &input2_gsi;
2631 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2632 NULL, &wi);
2633 gcc_assert (!gsi_end_p (input2_gsi));
2634
2635 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2636 gsi_next (&input2_gsi);
2637 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2638 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2639 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2640 std::swap (input_stmt2, scan_stmt2);
2641
2642 gimple_omp_set_body (input_stmt2, NULL);
2643
2644 gimple_omp_set_body (input_stmt, input_body);
2645 gimple_omp_set_body (scan_stmt, scan_body);
2646
2647 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2648 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2649
2650 ctx = new_omp_context (scan_stmt, outer_ctx);
2651 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2652
2653 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2654 }
2655
2656 /* Scan an OpenMP sections directive. */
2657
2658 static void
2659 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2660 {
2661 omp_context *ctx;
2662
2663 ctx = new_omp_context (stmt, outer_ctx);
2664 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2665 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2666 }
2667
2668 /* Scan an OpenMP single directive. */
2669
2670 static void
2671 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2672 {
2673 omp_context *ctx;
2674 tree name;
2675
2676 ctx = new_omp_context (stmt, outer_ctx);
2677 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2678 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2679 name = create_tmp_var_name (".omp_copy_s");
2680 name = build_decl (gimple_location (stmt),
2681 TYPE_DECL, name, ctx->record_type);
2682 TYPE_NAME (ctx->record_type) = name;
2683
2684 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2685 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2686
2687 if (TYPE_FIELDS (ctx->record_type) == NULL)
2688 ctx->record_type = NULL;
2689 else
2690 layout_type (ctx->record_type);
2691 }
2692
2693 /* Scan a GIMPLE_OMP_TARGET. */
2694
2695 static void
2696 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2697 {
2698 omp_context *ctx;
2699 tree name;
2700 bool offloaded = is_gimple_omp_offloaded (stmt);
2701 tree clauses = gimple_omp_target_clauses (stmt);
2702
2703 ctx = new_omp_context (stmt, outer_ctx);
2704 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2705 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2706 name = create_tmp_var_name (".omp_data_t");
2707 name = build_decl (gimple_location (stmt),
2708 TYPE_DECL, name, ctx->record_type);
2709 DECL_ARTIFICIAL (name) = 1;
2710 DECL_NAMELESS (name) = 1;
2711 TYPE_NAME (ctx->record_type) = name;
2712 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2713
2714 if (offloaded)
2715 {
2716 create_omp_child_function (ctx, false);
2717 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2718 }
2719
2720 scan_sharing_clauses (clauses, ctx);
2721 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2722
2723 if (TYPE_FIELDS (ctx->record_type) == NULL)
2724 ctx->record_type = ctx->receiver_decl = NULL;
2725 else
2726 {
2727 TYPE_FIELDS (ctx->record_type)
2728 = nreverse (TYPE_FIELDS (ctx->record_type));
2729 if (flag_checking)
2730 {
2731 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2732 for (tree field = TYPE_FIELDS (ctx->record_type);
2733 field;
2734 field = DECL_CHAIN (field))
2735 gcc_assert (DECL_ALIGN (field) == align);
2736 }
2737 layout_type (ctx->record_type);
2738 if (offloaded)
2739 fixup_child_record_type (ctx);
2740 }
2741 }
2742
2743 /* Scan an OpenMP teams directive. */
2744
2745 static void
2746 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2747 {
2748 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2749
2750 if (!gimple_omp_teams_host (stmt))
2751 {
2752 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2753 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2754 return;
2755 }
2756 taskreg_contexts.safe_push (ctx);
2757 gcc_assert (taskreg_nesting_level == 1);
2758 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2759 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2760 tree name = create_tmp_var_name (".omp_data_s");
2761 name = build_decl (gimple_location (stmt),
2762 TYPE_DECL, name, ctx->record_type);
2763 DECL_ARTIFICIAL (name) = 1;
2764 DECL_NAMELESS (name) = 1;
2765 TYPE_NAME (ctx->record_type) = name;
2766 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2767 create_omp_child_function (ctx, false);
2768 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2769
2770 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2771 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2772
2773 if (TYPE_FIELDS (ctx->record_type) == NULL)
2774 ctx->record_type = ctx->receiver_decl = NULL;
2775 }
2776
2777 /* Check nesting restrictions. */
2778 static bool
2779 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2780 {
2781 tree c;
2782
2783 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2784 inside an OpenACC CTX. */
2785 if (!(is_gimple_omp (stmt)
2786 && is_gimple_omp_oacc (stmt))
2787 /* Except for atomic codes that we share with OpenMP. */
2788 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2789 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2790 {
2791 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2792 {
2793 error_at (gimple_location (stmt),
2794 "non-OpenACC construct inside of OpenACC routine");
2795 return false;
2796 }
2797 else
2798 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2799 if (is_gimple_omp (octx->stmt)
2800 && is_gimple_omp_oacc (octx->stmt))
2801 {
2802 error_at (gimple_location (stmt),
2803 "non-OpenACC construct inside of OpenACC region");
2804 return false;
2805 }
2806 }
2807
2808 if (ctx != NULL)
2809 {
2810 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2811 && ctx->outer
2812 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2813 ctx = ctx->outer;
2814 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2815 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2816 && !ctx->loop_p)
2817 {
2818 c = NULL_TREE;
2819 if (ctx->order_concurrent
2820 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2821 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2822 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2823 {
2824 error_at (gimple_location (stmt),
2825 "OpenMP constructs other than %<parallel%>, %<loop%>"
2826 " or %<simd%> may not be nested inside a region with"
2827 " the %<order(concurrent)%> clause");
2828 return false;
2829 }
2830 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2831 {
2832 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2833 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2834 {
2835 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2836 && (ctx->outer == NULL
2837 || !gimple_omp_for_combined_into_p (ctx->stmt)
2838 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2839 || (gimple_omp_for_kind (ctx->outer->stmt)
2840 != GF_OMP_FOR_KIND_FOR)
2841 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2842 {
2843 error_at (gimple_location (stmt),
2844 "%<ordered simd threads%> must be closely "
2845 "nested inside of %<for simd%> region");
2846 return false;
2847 }
2848 return true;
2849 }
2850 }
2851 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2852 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2853 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2854 return true;
2855 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2856 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2857 return true;
2858 error_at (gimple_location (stmt),
2859 "OpenMP constructs other than "
2860 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2861 "not be nested inside %<simd%> region");
2862 return false;
2863 }
2864 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2865 {
2866 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2867 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2868 && omp_find_clause (gimple_omp_for_clauses (stmt),
2869 OMP_CLAUSE_BIND) == NULL_TREE))
2870 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2871 {
2872 error_at (gimple_location (stmt),
2873 "only %<distribute%>, %<parallel%> or %<loop%> "
2874 "regions are allowed to be strictly nested inside "
2875 "%<teams%> region");
2876 return false;
2877 }
2878 }
2879 else if (ctx->order_concurrent
2880 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2881 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2882 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2883 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2884 {
2885 if (ctx->loop_p)
2886 error_at (gimple_location (stmt),
2887 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2888 "%<simd%> may not be nested inside a %<loop%> region");
2889 else
2890 error_at (gimple_location (stmt),
2891 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2892 "%<simd%> may not be nested inside a region with "
2893 "the %<order(concurrent)%> clause");
2894 return false;
2895 }
2896 }
2897 switch (gimple_code (stmt))
2898 {
2899 case GIMPLE_OMP_FOR:
2900 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2901 return true;
2902 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2903 {
2904 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2905 {
2906 error_at (gimple_location (stmt),
2907 "%<distribute%> region must be strictly nested "
2908 "inside %<teams%> construct");
2909 return false;
2910 }
2911 return true;
2912 }
2913 /* We split taskloop into task and nested taskloop in it. */
2914 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2915 return true;
2916 /* For now, hope this will change and loop bind(parallel) will not
2917 be allowed in lots of contexts. */
2918 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2919 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2920 return true;
2921 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2922 {
2923 bool ok = false;
2924
2925 if (ctx)
2926 switch (gimple_code (ctx->stmt))
2927 {
2928 case GIMPLE_OMP_FOR:
2929 ok = (gimple_omp_for_kind (ctx->stmt)
2930 == GF_OMP_FOR_KIND_OACC_LOOP);
2931 break;
2932
2933 case GIMPLE_OMP_TARGET:
2934 switch (gimple_omp_target_kind (ctx->stmt))
2935 {
2936 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2937 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2938 case GF_OMP_TARGET_KIND_OACC_SERIAL:
2939 ok = true;
2940 break;
2941
2942 default:
2943 break;
2944 }
2945
2946 default:
2947 break;
2948 }
2949 else if (oacc_get_fn_attrib (current_function_decl))
2950 ok = true;
2951 if (!ok)
2952 {
2953 error_at (gimple_location (stmt),
2954 "OpenACC loop directive must be associated with"
2955 " an OpenACC compute region");
2956 return false;
2957 }
2958 }
2959 /* FALLTHRU */
2960 case GIMPLE_CALL:
2961 if (is_gimple_call (stmt)
2962 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2963 == BUILT_IN_GOMP_CANCEL
2964 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2965 == BUILT_IN_GOMP_CANCELLATION_POINT))
2966 {
2967 const char *bad = NULL;
2968 const char *kind = NULL;
2969 const char *construct
2970 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2971 == BUILT_IN_GOMP_CANCEL)
2972 ? "cancel"
2973 : "cancellation point";
2974 if (ctx == NULL)
2975 {
2976 error_at (gimple_location (stmt), "orphaned %qs construct",
2977 construct);
2978 return false;
2979 }
2980 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2981 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2982 : 0)
2983 {
2984 case 1:
2985 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2986 bad = "parallel";
2987 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2988 == BUILT_IN_GOMP_CANCEL
2989 && !integer_zerop (gimple_call_arg (stmt, 1)))
2990 ctx->cancellable = true;
2991 kind = "parallel";
2992 break;
2993 case 2:
2994 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2995 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2996 bad = "for";
2997 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2998 == BUILT_IN_GOMP_CANCEL
2999 && !integer_zerop (gimple_call_arg (stmt, 1)))
3000 {
3001 ctx->cancellable = true;
3002 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3003 OMP_CLAUSE_NOWAIT))
3004 warning_at (gimple_location (stmt), 0,
3005 "%<cancel for%> inside "
3006 "%<nowait%> for construct");
3007 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3008 OMP_CLAUSE_ORDERED))
3009 warning_at (gimple_location (stmt), 0,
3010 "%<cancel for%> inside "
3011 "%<ordered%> for construct");
3012 }
3013 kind = "for";
3014 break;
3015 case 4:
3016 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3017 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3018 bad = "sections";
3019 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3020 == BUILT_IN_GOMP_CANCEL
3021 && !integer_zerop (gimple_call_arg (stmt, 1)))
3022 {
3023 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3024 {
3025 ctx->cancellable = true;
3026 if (omp_find_clause (gimple_omp_sections_clauses
3027 (ctx->stmt),
3028 OMP_CLAUSE_NOWAIT))
3029 warning_at (gimple_location (stmt), 0,
3030 "%<cancel sections%> inside "
3031 "%<nowait%> sections construct");
3032 }
3033 else
3034 {
3035 gcc_assert (ctx->outer
3036 && gimple_code (ctx->outer->stmt)
3037 == GIMPLE_OMP_SECTIONS);
3038 ctx->outer->cancellable = true;
3039 if (omp_find_clause (gimple_omp_sections_clauses
3040 (ctx->outer->stmt),
3041 OMP_CLAUSE_NOWAIT))
3042 warning_at (gimple_location (stmt), 0,
3043 "%<cancel sections%> inside "
3044 "%<nowait%> sections construct");
3045 }
3046 }
3047 kind = "sections";
3048 break;
3049 case 8:
3050 if (!is_task_ctx (ctx)
3051 && (!is_taskloop_ctx (ctx)
3052 || ctx->outer == NULL
3053 || !is_task_ctx (ctx->outer)))
3054 bad = "task";
3055 else
3056 {
3057 for (omp_context *octx = ctx->outer;
3058 octx; octx = octx->outer)
3059 {
3060 switch (gimple_code (octx->stmt))
3061 {
3062 case GIMPLE_OMP_TASKGROUP:
3063 break;
3064 case GIMPLE_OMP_TARGET:
3065 if (gimple_omp_target_kind (octx->stmt)
3066 != GF_OMP_TARGET_KIND_REGION)
3067 continue;
3068 /* FALLTHRU */
3069 case GIMPLE_OMP_PARALLEL:
3070 case GIMPLE_OMP_TEAMS:
3071 error_at (gimple_location (stmt),
3072 "%<%s taskgroup%> construct not closely "
3073 "nested inside of %<taskgroup%> region",
3074 construct);
3075 return false;
3076 case GIMPLE_OMP_TASK:
3077 if (gimple_omp_task_taskloop_p (octx->stmt)
3078 && octx->outer
3079 && is_taskloop_ctx (octx->outer))
3080 {
3081 tree clauses
3082 = gimple_omp_for_clauses (octx->outer->stmt);
3083 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3084 break;
3085 }
3086 continue;
3087 default:
3088 continue;
3089 }
3090 break;
3091 }
3092 ctx->cancellable = true;
3093 }
3094 kind = "taskgroup";
3095 break;
3096 default:
3097 error_at (gimple_location (stmt), "invalid arguments");
3098 return false;
3099 }
3100 if (bad)
3101 {
3102 error_at (gimple_location (stmt),
3103 "%<%s %s%> construct not closely nested inside of %qs",
3104 construct, kind, bad);
3105 return false;
3106 }
3107 }
3108 /* FALLTHRU */
3109 case GIMPLE_OMP_SECTIONS:
3110 case GIMPLE_OMP_SINGLE:
3111 for (; ctx != NULL; ctx = ctx->outer)
3112 switch (gimple_code (ctx->stmt))
3113 {
3114 case GIMPLE_OMP_FOR:
3115 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3116 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3117 break;
3118 /* FALLTHRU */
3119 case GIMPLE_OMP_SECTIONS:
3120 case GIMPLE_OMP_SINGLE:
3121 case GIMPLE_OMP_ORDERED:
3122 case GIMPLE_OMP_MASTER:
3123 case GIMPLE_OMP_TASK:
3124 case GIMPLE_OMP_CRITICAL:
3125 if (is_gimple_call (stmt))
3126 {
3127 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3128 != BUILT_IN_GOMP_BARRIER)
3129 return true;
3130 error_at (gimple_location (stmt),
3131 "barrier region may not be closely nested inside "
3132 "of work-sharing, %<loop%>, %<critical%>, "
3133 "%<ordered%>, %<master%>, explicit %<task%> or "
3134 "%<taskloop%> region");
3135 return false;
3136 }
3137 error_at (gimple_location (stmt),
3138 "work-sharing region may not be closely nested inside "
3139 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3140 "%<master%>, explicit %<task%> or %<taskloop%> region");
3141 return false;
3142 case GIMPLE_OMP_PARALLEL:
3143 case GIMPLE_OMP_TEAMS:
3144 return true;
3145 case GIMPLE_OMP_TARGET:
3146 if (gimple_omp_target_kind (ctx->stmt)
3147 == GF_OMP_TARGET_KIND_REGION)
3148 return true;
3149 break;
3150 default:
3151 break;
3152 }
3153 break;
3154 case GIMPLE_OMP_MASTER:
3155 for (; ctx != NULL; ctx = ctx->outer)
3156 switch (gimple_code (ctx->stmt))
3157 {
3158 case GIMPLE_OMP_FOR:
3159 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3160 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3161 break;
3162 /* FALLTHRU */
3163 case GIMPLE_OMP_SECTIONS:
3164 case GIMPLE_OMP_SINGLE:
3165 case GIMPLE_OMP_TASK:
3166 error_at (gimple_location (stmt),
3167 "%<master%> region may not be closely nested inside "
3168 "of work-sharing, %<loop%>, explicit %<task%> or "
3169 "%<taskloop%> region");
3170 return false;
3171 case GIMPLE_OMP_PARALLEL:
3172 case GIMPLE_OMP_TEAMS:
3173 return true;
3174 case GIMPLE_OMP_TARGET:
3175 if (gimple_omp_target_kind (ctx->stmt)
3176 == GF_OMP_TARGET_KIND_REGION)
3177 return true;
3178 break;
3179 default:
3180 break;
3181 }
3182 break;
3183 case GIMPLE_OMP_TASK:
3184 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3186 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3187 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3188 {
3189 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3190 error_at (OMP_CLAUSE_LOCATION (c),
3191 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3192 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3193 return false;
3194 }
3195 break;
3196 case GIMPLE_OMP_ORDERED:
3197 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3198 c; c = OMP_CLAUSE_CHAIN (c))
3199 {
3200 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3201 {
3202 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3203 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3204 continue;
3205 }
3206 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3207 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3208 || kind == OMP_CLAUSE_DEPEND_SINK)
3209 {
3210 tree oclause;
3211 /* Look for containing ordered(N) loop. */
3212 if (ctx == NULL
3213 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3214 || (oclause
3215 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3216 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3217 {
3218 error_at (OMP_CLAUSE_LOCATION (c),
3219 "%<ordered%> construct with %<depend%> clause "
3220 "must be closely nested inside an %<ordered%> "
3221 "loop");
3222 return false;
3223 }
3224 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3225 {
3226 error_at (OMP_CLAUSE_LOCATION (c),
3227 "%<ordered%> construct with %<depend%> clause "
3228 "must be closely nested inside a loop with "
3229 "%<ordered%> clause with a parameter");
3230 return false;
3231 }
3232 }
3233 else
3234 {
3235 error_at (OMP_CLAUSE_LOCATION (c),
3236 "invalid depend kind in omp %<ordered%> %<depend%>");
3237 return false;
3238 }
3239 }
3240 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3241 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3242 {
3243 /* ordered simd must be closely nested inside of simd region,
3244 and simd region must not encounter constructs other than
3245 ordered simd, therefore ordered simd may be either orphaned,
3246 or ctx->stmt must be simd. The latter case is handled already
3247 earlier. */
3248 if (ctx != NULL)
3249 {
3250 error_at (gimple_location (stmt),
3251 "%<ordered%> %<simd%> must be closely nested inside "
3252 "%<simd%> region");
3253 return false;
3254 }
3255 }
3256 for (; ctx != NULL; ctx = ctx->outer)
3257 switch (gimple_code (ctx->stmt))
3258 {
3259 case GIMPLE_OMP_CRITICAL:
3260 case GIMPLE_OMP_TASK:
3261 case GIMPLE_OMP_ORDERED:
3262 ordered_in_taskloop:
3263 error_at (gimple_location (stmt),
3264 "%<ordered%> region may not be closely nested inside "
3265 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3266 "%<taskloop%> region");
3267 return false;
3268 case GIMPLE_OMP_FOR:
3269 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3270 goto ordered_in_taskloop;
3271 tree o;
3272 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3273 OMP_CLAUSE_ORDERED);
3274 if (o == NULL)
3275 {
3276 error_at (gimple_location (stmt),
3277 "%<ordered%> region must be closely nested inside "
3278 "a loop region with an %<ordered%> clause");
3279 return false;
3280 }
3281 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3282 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3283 {
3284 error_at (gimple_location (stmt),
3285 "%<ordered%> region without %<depend%> clause may "
3286 "not be closely nested inside a loop region with "
3287 "an %<ordered%> clause with a parameter");
3288 return false;
3289 }
3290 return true;
3291 case GIMPLE_OMP_TARGET:
3292 if (gimple_omp_target_kind (ctx->stmt)
3293 != GF_OMP_TARGET_KIND_REGION)
3294 break;
3295 /* FALLTHRU */
3296 case GIMPLE_OMP_PARALLEL:
3297 case GIMPLE_OMP_TEAMS:
3298 error_at (gimple_location (stmt),
3299 "%<ordered%> region must be closely nested inside "
3300 "a loop region with an %<ordered%> clause");
3301 return false;
3302 default:
3303 break;
3304 }
3305 break;
3306 case GIMPLE_OMP_CRITICAL:
3307 {
3308 tree this_stmt_name
3309 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3310 for (; ctx != NULL; ctx = ctx->outer)
3311 if (gomp_critical *other_crit
3312 = dyn_cast <gomp_critical *> (ctx->stmt))
3313 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3314 {
3315 error_at (gimple_location (stmt),
3316 "%<critical%> region may not be nested inside "
3317 "a %<critical%> region with the same name");
3318 return false;
3319 }
3320 }
3321 break;
3322 case GIMPLE_OMP_TEAMS:
3323 if (ctx == NULL)
3324 break;
3325 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3326 || (gimple_omp_target_kind (ctx->stmt)
3327 != GF_OMP_TARGET_KIND_REGION))
3328 {
3329 /* Teams construct can appear either strictly nested inside of
3330 target construct with no intervening stmts, or can be encountered
3331 only by initial task (so must not appear inside any OpenMP
3332 construct. */
3333 error_at (gimple_location (stmt),
3334 "%<teams%> construct must be closely nested inside of "
3335 "%<target%> construct or not nested in any OpenMP "
3336 "construct");
3337 return false;
3338 }
3339 break;
3340 case GIMPLE_OMP_TARGET:
3341 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3342 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3343 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3344 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3345 {
3346 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3347 error_at (OMP_CLAUSE_LOCATION (c),
3348 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3349 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3350 return false;
3351 }
3352 if (is_gimple_omp_offloaded (stmt)
3353 && oacc_get_fn_attrib (cfun->decl) != NULL)
3354 {
3355 error_at (gimple_location (stmt),
3356 "OpenACC region inside of OpenACC routine, nested "
3357 "parallelism not supported yet");
3358 return false;
3359 }
3360 for (; ctx != NULL; ctx = ctx->outer)
3361 {
3362 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3363 {
3364 if (is_gimple_omp (stmt)
3365 && is_gimple_omp_oacc (stmt)
3366 && is_gimple_omp (ctx->stmt))
3367 {
3368 error_at (gimple_location (stmt),
3369 "OpenACC construct inside of non-OpenACC region");
3370 return false;
3371 }
3372 continue;
3373 }
3374
3375 const char *stmt_name, *ctx_stmt_name;
3376 switch (gimple_omp_target_kind (stmt))
3377 {
3378 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3379 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3380 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3381 case GF_OMP_TARGET_KIND_ENTER_DATA:
3382 stmt_name = "target enter data"; break;
3383 case GF_OMP_TARGET_KIND_EXIT_DATA:
3384 stmt_name = "target exit data"; break;
3385 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3386 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3387 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3388 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3389 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3390 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3391 stmt_name = "enter/exit data"; break;
3392 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3393 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3394 break;
3395 default: gcc_unreachable ();
3396 }
3397 switch (gimple_omp_target_kind (ctx->stmt))
3398 {
3399 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3400 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3401 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3402 ctx_stmt_name = "parallel"; break;
3403 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3404 ctx_stmt_name = "kernels"; break;
3405 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3406 ctx_stmt_name = "serial"; break;
3407 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3408 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3409 ctx_stmt_name = "host_data"; break;
3410 default: gcc_unreachable ();
3411 }
3412
3413 /* OpenACC/OpenMP mismatch? */
3414 if (is_gimple_omp_oacc (stmt)
3415 != is_gimple_omp_oacc (ctx->stmt))
3416 {
3417 error_at (gimple_location (stmt),
3418 "%s %qs construct inside of %s %qs region",
3419 (is_gimple_omp_oacc (stmt)
3420 ? "OpenACC" : "OpenMP"), stmt_name,
3421 (is_gimple_omp_oacc (ctx->stmt)
3422 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3423 return false;
3424 }
3425 if (is_gimple_omp_offloaded (ctx->stmt))
3426 {
3427 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3428 if (is_gimple_omp_oacc (ctx->stmt))
3429 {
3430 error_at (gimple_location (stmt),
3431 "%qs construct inside of %qs region",
3432 stmt_name, ctx_stmt_name);
3433 return false;
3434 }
3435 else
3436 {
3437 warning_at (gimple_location (stmt), 0,
3438 "%qs construct inside of %qs region",
3439 stmt_name, ctx_stmt_name);
3440 }
3441 }
3442 }
3443 break;
3444 default:
3445 break;
3446 }
3447 return true;
3448 }
3449
3450
3451 /* Helper function scan_omp.
3452
3453 Callback for walk_tree or operators in walk_gimple_stmt used to
3454 scan for OMP directives in TP. */
3455
3456 static tree
3457 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3458 {
3459 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3460 omp_context *ctx = (omp_context *) wi->info;
3461 tree t = *tp;
3462
3463 switch (TREE_CODE (t))
3464 {
3465 case VAR_DECL:
3466 case PARM_DECL:
3467 case LABEL_DECL:
3468 case RESULT_DECL:
3469 if (ctx)
3470 {
3471 tree repl = remap_decl (t, &ctx->cb);
3472 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3473 *tp = repl;
3474 }
3475 break;
3476
3477 default:
3478 if (ctx && TYPE_P (t))
3479 *tp = remap_type (t, &ctx->cb);
3480 else if (!DECL_P (t))
3481 {
3482 *walk_subtrees = 1;
3483 if (ctx)
3484 {
3485 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3486 if (tem != TREE_TYPE (t))
3487 {
3488 if (TREE_CODE (t) == INTEGER_CST)
3489 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3490 else
3491 TREE_TYPE (t) = tem;
3492 }
3493 }
3494 }
3495 break;
3496 }
3497
3498 return NULL_TREE;
3499 }
3500
3501 /* Return true if FNDECL is a setjmp or a longjmp. */
3502
3503 static bool
3504 setjmp_or_longjmp_p (const_tree fndecl)
3505 {
3506 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3507 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3508 return true;
3509
3510 tree declname = DECL_NAME (fndecl);
3511 if (!declname
3512 || (DECL_CONTEXT (fndecl) != NULL_TREE
3513 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3514 || !TREE_PUBLIC (fndecl))
3515 return false;
3516
3517 const char *name = IDENTIFIER_POINTER (declname);
3518 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3519 }
3520
3521 /* Return true if FNDECL is an omp_* runtime API call. */
3522
3523 static bool
3524 omp_runtime_api_call (const_tree fndecl)
3525 {
3526 tree declname = DECL_NAME (fndecl);
3527 if (!declname
3528 || (DECL_CONTEXT (fndecl) != NULL_TREE
3529 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3530 || !TREE_PUBLIC (fndecl))
3531 return false;
3532
3533 const char *name = IDENTIFIER_POINTER (declname);
3534 if (strncmp (name, "omp_", 4) != 0)
3535 return false;
3536
3537 static const char *omp_runtime_apis[] =
3538 {
3539 /* This array has 3 sections. First omp_* calls that don't
3540 have any suffixes. */
3541 "target_alloc",
3542 "target_associate_ptr",
3543 "target_disassociate_ptr",
3544 "target_free",
3545 "target_is_present",
3546 "target_memcpy",
3547 "target_memcpy_rect",
3548 NULL,
3549 /* Now omp_* calls that are available as omp_* and omp_*_. */
3550 "capture_affinity",
3551 "destroy_lock",
3552 "destroy_nest_lock",
3553 "display_affinity",
3554 "get_active_level",
3555 "get_affinity_format",
3556 "get_cancellation",
3557 "get_default_device",
3558 "get_dynamic",
3559 "get_initial_device",
3560 "get_level",
3561 "get_max_active_levels",
3562 "get_max_task_priority",
3563 "get_max_threads",
3564 "get_nested",
3565 "get_num_devices",
3566 "get_num_places",
3567 "get_num_procs",
3568 "get_num_teams",
3569 "get_num_threads",
3570 "get_partition_num_places",
3571 "get_place_num",
3572 "get_proc_bind",
3573 "get_team_num",
3574 "get_thread_limit",
3575 "get_thread_num",
3576 "get_wtick",
3577 "get_wtime",
3578 "in_final",
3579 "in_parallel",
3580 "init_lock",
3581 "init_nest_lock",
3582 "is_initial_device",
3583 "pause_resource",
3584 "pause_resource_all",
3585 "set_affinity_format",
3586 "set_lock",
3587 "set_nest_lock",
3588 "test_lock",
3589 "test_nest_lock",
3590 "unset_lock",
3591 "unset_nest_lock",
3592 NULL,
3593 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3594 "get_ancestor_thread_num",
3595 "get_partition_place_nums",
3596 "get_place_num_procs",
3597 "get_place_proc_ids",
3598 "get_schedule",
3599 "get_team_size",
3600 "set_default_device",
3601 "set_dynamic",
3602 "set_max_active_levels",
3603 "set_nested",
3604 "set_num_threads",
3605 "set_schedule"
3606 };
3607
3608 int mode = 0;
3609 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3610 {
3611 if (omp_runtime_apis[i] == NULL)
3612 {
3613 mode++;
3614 continue;
3615 }
3616 size_t len = strlen (omp_runtime_apis[i]);
3617 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3618 && (name[4 + len] == '\0'
3619 || (mode > 0
3620 && name[4 + len] == '_'
3621 && (name[4 + len + 1] == '\0'
3622 || (mode > 1
3623 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3624 return true;
3625 }
3626 return false;
3627 }
3628
3629 /* Helper function for scan_omp.
3630
3631 Callback for walk_gimple_stmt used to scan for OMP directives in
3632 the current statement in GSI. */
3633
3634 static tree
3635 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3636 struct walk_stmt_info *wi)
3637 {
3638 gimple *stmt = gsi_stmt (*gsi);
3639 omp_context *ctx = (omp_context *) wi->info;
3640
3641 if (gimple_has_location (stmt))
3642 input_location = gimple_location (stmt);
3643
3644 /* Check the nesting restrictions. */
3645 bool remove = false;
3646 if (is_gimple_omp (stmt))
3647 remove = !check_omp_nesting_restrictions (stmt, ctx);
3648 else if (is_gimple_call (stmt))
3649 {
3650 tree fndecl = gimple_call_fndecl (stmt);
3651 if (fndecl)
3652 {
3653 if (ctx
3654 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3655 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3656 && setjmp_or_longjmp_p (fndecl)
3657 && !ctx->loop_p)
3658 {
3659 remove = true;
3660 error_at (gimple_location (stmt),
3661 "setjmp/longjmp inside %<simd%> construct");
3662 }
3663 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3664 switch (DECL_FUNCTION_CODE (fndecl))
3665 {
3666 case BUILT_IN_GOMP_BARRIER:
3667 case BUILT_IN_GOMP_CANCEL:
3668 case BUILT_IN_GOMP_CANCELLATION_POINT:
3669 case BUILT_IN_GOMP_TASKYIELD:
3670 case BUILT_IN_GOMP_TASKWAIT:
3671 case BUILT_IN_GOMP_TASKGROUP_START:
3672 case BUILT_IN_GOMP_TASKGROUP_END:
3673 remove = !check_omp_nesting_restrictions (stmt, ctx);
3674 break;
3675 default:
3676 break;
3677 }
3678 else if (ctx)
3679 {
3680 omp_context *octx = ctx;
3681 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3682 octx = ctx->outer;
3683 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3684 {
3685 remove = true;
3686 error_at (gimple_location (stmt),
3687 "OpenMP runtime API call %qD in a region with "
3688 "%<order(concurrent)%> clause", fndecl);
3689 }
3690 }
3691 }
3692 }
3693 if (remove)
3694 {
3695 stmt = gimple_build_nop ();
3696 gsi_replace (gsi, stmt, false);
3697 }
3698
3699 *handled_ops_p = true;
3700
3701 switch (gimple_code (stmt))
3702 {
3703 case GIMPLE_OMP_PARALLEL:
3704 taskreg_nesting_level++;
3705 scan_omp_parallel (gsi, ctx);
3706 taskreg_nesting_level--;
3707 break;
3708
3709 case GIMPLE_OMP_TASK:
3710 taskreg_nesting_level++;
3711 scan_omp_task (gsi, ctx);
3712 taskreg_nesting_level--;
3713 break;
3714
3715 case GIMPLE_OMP_FOR:
3716 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3717 == GF_OMP_FOR_KIND_SIMD)
3718 && gimple_omp_for_combined_into_p (stmt)
3719 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3720 {
3721 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3722 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3723 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3724 {
3725 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3726 break;
3727 }
3728 }
3729 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3730 == GF_OMP_FOR_KIND_SIMD)
3731 && omp_maybe_offloaded_ctx (ctx)
3732 && omp_max_simt_vf ()
3733 && gimple_omp_for_collapse (stmt) == 1)
3734 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3735 else
3736 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3737 break;
3738
3739 case GIMPLE_OMP_SECTIONS:
3740 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3741 break;
3742
3743 case GIMPLE_OMP_SINGLE:
3744 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3745 break;
3746
3747 case GIMPLE_OMP_SCAN:
3748 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3749 {
3750 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3751 ctx->scan_inclusive = true;
3752 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3753 ctx->scan_exclusive = true;
3754 }
3755 /* FALLTHRU */
3756 case GIMPLE_OMP_SECTION:
3757 case GIMPLE_OMP_MASTER:
3758 case GIMPLE_OMP_ORDERED:
3759 case GIMPLE_OMP_CRITICAL:
3760 ctx = new_omp_context (stmt, ctx);
3761 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3762 break;
3763
3764 case GIMPLE_OMP_TASKGROUP:
3765 ctx = new_omp_context (stmt, ctx);
3766 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3767 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3768 break;
3769
3770 case GIMPLE_OMP_TARGET:
3771 if (is_gimple_omp_offloaded (stmt))
3772 {
3773 taskreg_nesting_level++;
3774 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3775 taskreg_nesting_level--;
3776 }
3777 else
3778 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3779 break;
3780
3781 case GIMPLE_OMP_TEAMS:
3782 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3783 {
3784 taskreg_nesting_level++;
3785 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3786 taskreg_nesting_level--;
3787 }
3788 else
3789 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3790 break;
3791
3792 case GIMPLE_BIND:
3793 {
3794 tree var;
3795
3796 *handled_ops_p = false;
3797 if (ctx)
3798 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3799 var ;
3800 var = DECL_CHAIN (var))
3801 insert_decl_map (&ctx->cb, var, var);
3802 }
3803 break;
3804 default:
3805 *handled_ops_p = false;
3806 break;
3807 }
3808
3809 return NULL_TREE;
3810 }
3811
3812
3813 /* Scan all the statements starting at the current statement. CTX
3814 contains context information about the OMP directives and
3815 clauses found during the scan. */
3816
3817 static void
3818 scan_omp (gimple_seq *body_p, omp_context *ctx)
3819 {
3820 location_t saved_location;
3821 struct walk_stmt_info wi;
3822
3823 memset (&wi, 0, sizeof (wi));
3824 wi.info = ctx;
3825 wi.want_locations = true;
3826
3827 saved_location = input_location;
3828 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3829 input_location = saved_location;
3830 }
3831 \f
3832 /* Re-gimplification and code generation routines. */
3833
3834 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3835 of BIND if in a method. */
3836
3837 static void
3838 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3839 {
3840 if (DECL_ARGUMENTS (current_function_decl)
3841 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3842 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3843 == POINTER_TYPE))
3844 {
3845 tree vars = gimple_bind_vars (bind);
3846 for (tree *pvar = &vars; *pvar; )
3847 if (omp_member_access_dummy_var (*pvar))
3848 *pvar = DECL_CHAIN (*pvar);
3849 else
3850 pvar = &DECL_CHAIN (*pvar);
3851 gimple_bind_set_vars (bind, vars);
3852 }
3853 }
3854
3855 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3856 block and its subblocks. */
3857
3858 static void
3859 remove_member_access_dummy_vars (tree block)
3860 {
3861 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3862 if (omp_member_access_dummy_var (*pvar))
3863 *pvar = DECL_CHAIN (*pvar);
3864 else
3865 pvar = &DECL_CHAIN (*pvar);
3866
3867 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3868 remove_member_access_dummy_vars (block);
3869 }
3870
3871 /* If a context was created for STMT when it was scanned, return it. */
3872
3873 static omp_context *
3874 maybe_lookup_ctx (gimple *stmt)
3875 {
3876 splay_tree_node n;
3877 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3878 return n ? (omp_context *) n->value : NULL;
3879 }
3880
3881
3882 /* Find the mapping for DECL in CTX or the immediately enclosing
3883 context that has a mapping for DECL.
3884
3885 If CTX is a nested parallel directive, we may have to use the decl
3886 mappings created in CTX's parent context. Suppose that we have the
3887 following parallel nesting (variable UIDs showed for clarity):
3888
3889 iD.1562 = 0;
3890 #omp parallel shared(iD.1562) -> outer parallel
3891 iD.1562 = iD.1562 + 1;
3892
3893 #omp parallel shared (iD.1562) -> inner parallel
3894 iD.1562 = iD.1562 - 1;
3895
3896 Each parallel structure will create a distinct .omp_data_s structure
3897 for copying iD.1562 in/out of the directive:
3898
3899 outer parallel .omp_data_s.1.i -> iD.1562
3900 inner parallel .omp_data_s.2.i -> iD.1562
3901
3902 A shared variable mapping will produce a copy-out operation before
3903 the parallel directive and a copy-in operation after it. So, in
3904 this case we would have:
3905
3906 iD.1562 = 0;
3907 .omp_data_o.1.i = iD.1562;
3908 #omp parallel shared(iD.1562) -> outer parallel
3909 .omp_data_i.1 = &.omp_data_o.1
3910 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3911
3912 .omp_data_o.2.i = iD.1562; -> **
3913 #omp parallel shared(iD.1562) -> inner parallel
3914 .omp_data_i.2 = &.omp_data_o.2
3915 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3916
3917
3918 ** This is a problem. The symbol iD.1562 cannot be referenced
3919 inside the body of the outer parallel region. But since we are
3920 emitting this copy operation while expanding the inner parallel
3921 directive, we need to access the CTX structure of the outer
3922 parallel directive to get the correct mapping:
3923
3924 .omp_data_o.2.i = .omp_data_i.1->i
3925
3926 Since there may be other workshare or parallel directives enclosing
3927 the parallel directive, it may be necessary to walk up the context
3928 parent chain. This is not a problem in general because nested
3929 parallelism happens only rarely. */
3930
3931 static tree
3932 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3933 {
3934 tree t;
3935 omp_context *up;
3936
3937 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3938 t = maybe_lookup_decl (decl, up);
3939
3940 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3941
3942 return t ? t : decl;
3943 }
3944
3945
3946 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3947 in outer contexts. */
3948
3949 static tree
3950 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3951 {
3952 tree t = NULL;
3953 omp_context *up;
3954
3955 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3956 t = maybe_lookup_decl (decl, up);
3957
3958 return t ? t : decl;
3959 }
3960
3961
3962 /* Construct the initialization value for reduction operation OP. */
3963
3964 tree
3965 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3966 {
3967 switch (op)
3968 {
3969 case PLUS_EXPR:
3970 case MINUS_EXPR:
3971 case BIT_IOR_EXPR:
3972 case BIT_XOR_EXPR:
3973 case TRUTH_OR_EXPR:
3974 case TRUTH_ORIF_EXPR:
3975 case TRUTH_XOR_EXPR:
3976 case NE_EXPR:
3977 return build_zero_cst (type);
3978
3979 case MULT_EXPR:
3980 case TRUTH_AND_EXPR:
3981 case TRUTH_ANDIF_EXPR:
3982 case EQ_EXPR:
3983 return fold_convert_loc (loc, type, integer_one_node);
3984
3985 case BIT_AND_EXPR:
3986 return fold_convert_loc (loc, type, integer_minus_one_node);
3987
3988 case MAX_EXPR:
3989 if (SCALAR_FLOAT_TYPE_P (type))
3990 {
3991 REAL_VALUE_TYPE max, min;
3992 if (HONOR_INFINITIES (type))
3993 {
3994 real_inf (&max);
3995 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3996 }
3997 else
3998 real_maxval (&min, 1, TYPE_MODE (type));
3999 return build_real (type, min);
4000 }
4001 else if (POINTER_TYPE_P (type))
4002 {
4003 wide_int min
4004 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4005 return wide_int_to_tree (type, min);
4006 }
4007 else
4008 {
4009 gcc_assert (INTEGRAL_TYPE_P (type));
4010 return TYPE_MIN_VALUE (type);
4011 }
4012
4013 case MIN_EXPR:
4014 if (SCALAR_FLOAT_TYPE_P (type))
4015 {
4016 REAL_VALUE_TYPE max;
4017 if (HONOR_INFINITIES (type))
4018 real_inf (&max);
4019 else
4020 real_maxval (&max, 0, TYPE_MODE (type));
4021 return build_real (type, max);
4022 }
4023 else if (POINTER_TYPE_P (type))
4024 {
4025 wide_int max
4026 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4027 return wide_int_to_tree (type, max);
4028 }
4029 else
4030 {
4031 gcc_assert (INTEGRAL_TYPE_P (type));
4032 return TYPE_MAX_VALUE (type);
4033 }
4034
4035 default:
4036 gcc_unreachable ();
4037 }
4038 }
4039
4040 /* Construct the initialization value for reduction CLAUSE. */
4041
4042 tree
4043 omp_reduction_init (tree clause, tree type)
4044 {
4045 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4046 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4047 }
4048
4049 /* Return alignment to be assumed for var in CLAUSE, which should be
4050 OMP_CLAUSE_ALIGNED. */
4051
4052 static tree
4053 omp_clause_aligned_alignment (tree clause)
4054 {
4055 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4056 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4057
4058 /* Otherwise return implementation defined alignment. */
4059 unsigned int al = 1;
4060 opt_scalar_mode mode_iter;
4061 auto_vector_modes modes;
4062 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4063 static enum mode_class classes[]
4064 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4065 for (int i = 0; i < 4; i += 2)
4066 /* The for loop above dictates that we only walk through scalar classes. */
4067 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4068 {
4069 scalar_mode mode = mode_iter.require ();
4070 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4071 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4072 continue;
4073 machine_mode alt_vmode;
4074 for (unsigned int j = 0; j < modes.length (); ++j)
4075 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4076 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4077 vmode = alt_vmode;
4078
4079 tree type = lang_hooks.types.type_for_mode (mode, 1);
4080 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4081 continue;
4082 type = build_vector_type_for_mode (type, vmode);
4083 if (TYPE_MODE (type) != vmode)
4084 continue;
4085 if (TYPE_ALIGN_UNIT (type) > al)
4086 al = TYPE_ALIGN_UNIT (type);
4087 }
4088 return build_int_cst (integer_type_node, al);
4089 }
4090
4091
4092 /* This structure is part of the interface between lower_rec_simd_input_clauses
4093 and lower_rec_input_clauses. */
4094
4095 class omplow_simd_context {
4096 public:
4097 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4098 tree idx;
4099 tree lane;
4100 tree lastlane;
4101 vec<tree, va_heap> simt_eargs;
4102 gimple_seq simt_dlist;
4103 poly_uint64_pod max_vf;
4104 bool is_simt;
4105 };
4106
4107 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4108 privatization. */
4109
4110 static bool
4111 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4112 omplow_simd_context *sctx, tree &ivar,
4113 tree &lvar, tree *rvar = NULL,
4114 tree *rvar2 = NULL)
4115 {
4116 if (known_eq (sctx->max_vf, 0U))
4117 {
4118 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4119 if (maybe_gt (sctx->max_vf, 1U))
4120 {
4121 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4122 OMP_CLAUSE_SAFELEN);
4123 if (c)
4124 {
4125 poly_uint64 safe_len;
4126 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4127 || maybe_lt (safe_len, 1U))
4128 sctx->max_vf = 1;
4129 else
4130 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4131 }
4132 }
4133 if (maybe_gt (sctx->max_vf, 1U))
4134 {
4135 sctx->idx = create_tmp_var (unsigned_type_node);
4136 sctx->lane = create_tmp_var (unsigned_type_node);
4137 }
4138 }
4139 if (known_eq (sctx->max_vf, 1U))
4140 return false;
4141
4142 if (sctx->is_simt)
4143 {
4144 if (is_gimple_reg (new_var))
4145 {
4146 ivar = lvar = new_var;
4147 return true;
4148 }
4149 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4150 ivar = lvar = create_tmp_var (type);
4151 TREE_ADDRESSABLE (ivar) = 1;
4152 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4153 NULL, DECL_ATTRIBUTES (ivar));
4154 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4155 tree clobber = build_clobber (type);
4156 gimple *g = gimple_build_assign (ivar, clobber);
4157 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4158 }
4159 else
4160 {
4161 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4162 tree avar = create_tmp_var_raw (atype);
4163 if (TREE_ADDRESSABLE (new_var))
4164 TREE_ADDRESSABLE (avar) = 1;
4165 DECL_ATTRIBUTES (avar)
4166 = tree_cons (get_identifier ("omp simd array"), NULL,
4167 DECL_ATTRIBUTES (avar));
4168 gimple_add_tmp_var (avar);
4169 tree iavar = avar;
4170 if (rvar && !ctx->for_simd_scan_phase)
4171 {
4172 /* For inscan reductions, create another array temporary,
4173 which will hold the reduced value. */
4174 iavar = create_tmp_var_raw (atype);
4175 if (TREE_ADDRESSABLE (new_var))
4176 TREE_ADDRESSABLE (iavar) = 1;
4177 DECL_ATTRIBUTES (iavar)
4178 = tree_cons (get_identifier ("omp simd array"), NULL,
4179 tree_cons (get_identifier ("omp simd inscan"), NULL,
4180 DECL_ATTRIBUTES (iavar)));
4181 gimple_add_tmp_var (iavar);
4182 ctx->cb.decl_map->put (avar, iavar);
4183 if (sctx->lastlane == NULL_TREE)
4184 sctx->lastlane = create_tmp_var (unsigned_type_node);
4185 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4186 sctx->lastlane, NULL_TREE, NULL_TREE);
4187 TREE_THIS_NOTRAP (*rvar) = 1;
4188
4189 if (ctx->scan_exclusive)
4190 {
4191 /* And for exclusive scan yet another one, which will
4192 hold the value during the scan phase. */
4193 tree savar = create_tmp_var_raw (atype);
4194 if (TREE_ADDRESSABLE (new_var))
4195 TREE_ADDRESSABLE (savar) = 1;
4196 DECL_ATTRIBUTES (savar)
4197 = tree_cons (get_identifier ("omp simd array"), NULL,
4198 tree_cons (get_identifier ("omp simd inscan "
4199 "exclusive"), NULL,
4200 DECL_ATTRIBUTES (savar)));
4201 gimple_add_tmp_var (savar);
4202 ctx->cb.decl_map->put (iavar, savar);
4203 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4204 sctx->idx, NULL_TREE, NULL_TREE);
4205 TREE_THIS_NOTRAP (*rvar2) = 1;
4206 }
4207 }
4208 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4209 NULL_TREE, NULL_TREE);
4210 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4211 NULL_TREE, NULL_TREE);
4212 TREE_THIS_NOTRAP (ivar) = 1;
4213 TREE_THIS_NOTRAP (lvar) = 1;
4214 }
4215 if (DECL_P (new_var))
4216 {
4217 SET_DECL_VALUE_EXPR (new_var, lvar);
4218 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4219 }
4220 return true;
4221 }
4222
4223 /* Helper function of lower_rec_input_clauses. For a reference
4224 in simd reduction, add an underlying variable it will reference. */
4225
4226 static void
4227 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4228 {
4229 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4230 if (TREE_CONSTANT (z))
4231 {
4232 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4233 get_name (new_vard));
4234 gimple_add_tmp_var (z);
4235 TREE_ADDRESSABLE (z) = 1;
4236 z = build_fold_addr_expr_loc (loc, z);
4237 gimplify_assign (new_vard, z, ilist);
4238 }
4239 }
4240
4241 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4242 code to emit (type) (tskred_temp[idx]). */
4243
4244 static tree
4245 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4246 unsigned idx)
4247 {
4248 unsigned HOST_WIDE_INT sz
4249 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4250 tree r = build2 (MEM_REF, pointer_sized_int_node,
4251 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4252 idx * sz));
4253 tree v = create_tmp_var (pointer_sized_int_node);
4254 gimple *g = gimple_build_assign (v, r);
4255 gimple_seq_add_stmt (ilist, g);
4256 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4257 {
4258 v = create_tmp_var (type);
4259 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4260 gimple_seq_add_stmt (ilist, g);
4261 }
4262 return v;
4263 }
4264
4265 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4266 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4267 private variables. Initialization statements go in ILIST, while calls
4268 to destructors go in DLIST. */
4269
4270 static void
4271 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4272 omp_context *ctx, struct omp_for_data *fd)
4273 {
4274 tree c, copyin_seq, x, ptr;
4275 bool copyin_by_ref = false;
4276 bool lastprivate_firstprivate = false;
4277 bool reduction_omp_orig_ref = false;
4278 int pass;
4279 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4280 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4281 omplow_simd_context sctx = omplow_simd_context ();
4282 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4283 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4284 gimple_seq llist[4] = { };
4285 tree nonconst_simd_if = NULL_TREE;
4286
4287 copyin_seq = NULL;
4288 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4289
4290 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4291 with data sharing clauses referencing variable sized vars. That
4292 is unnecessarily hard to support and very unlikely to result in
4293 vectorized code anyway. */
4294 if (is_simd)
4295 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4296 switch (OMP_CLAUSE_CODE (c))
4297 {
4298 case OMP_CLAUSE_LINEAR:
4299 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4300 sctx.max_vf = 1;
4301 /* FALLTHRU */
4302 case OMP_CLAUSE_PRIVATE:
4303 case OMP_CLAUSE_FIRSTPRIVATE:
4304 case OMP_CLAUSE_LASTPRIVATE:
4305 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4306 sctx.max_vf = 1;
4307 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4308 {
4309 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4310 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4311 sctx.max_vf = 1;
4312 }
4313 break;
4314 case OMP_CLAUSE_REDUCTION:
4315 case OMP_CLAUSE_IN_REDUCTION:
4316 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4317 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4318 sctx.max_vf = 1;
4319 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4320 {
4321 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4322 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4323 sctx.max_vf = 1;
4324 }
4325 break;
4326 case OMP_CLAUSE_IF:
4327 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4328 sctx.max_vf = 1;
4329 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4330 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4331 break;
4332 case OMP_CLAUSE_SIMDLEN:
4333 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4334 sctx.max_vf = 1;
4335 break;
4336 case OMP_CLAUSE__CONDTEMP_:
4337 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4338 if (sctx.is_simt)
4339 sctx.max_vf = 1;
4340 break;
4341 default:
4342 continue;
4343 }
4344
4345 /* Add a placeholder for simduid. */
4346 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4347 sctx.simt_eargs.safe_push (NULL_TREE);
4348
4349 unsigned task_reduction_cnt = 0;
4350 unsigned task_reduction_cntorig = 0;
4351 unsigned task_reduction_cnt_full = 0;
4352 unsigned task_reduction_cntorig_full = 0;
4353 unsigned task_reduction_other_cnt = 0;
4354 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4355 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4356 /* Do all the fixed sized types in the first pass, and the variable sized
4357 types in the second pass. This makes sure that the scalar arguments to
4358 the variable sized types are processed before we use them in the
4359 variable sized operations. For task reductions we use 4 passes, in the
4360 first two we ignore them, in the third one gather arguments for
4361 GOMP_task_reduction_remap call and in the last pass actually handle
4362 the task reductions. */
4363 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4364 ? 4 : 2); ++pass)
4365 {
4366 if (pass == 2 && task_reduction_cnt)
4367 {
4368 tskred_atype
4369 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4370 + task_reduction_cntorig);
4371 tskred_avar = create_tmp_var_raw (tskred_atype);
4372 gimple_add_tmp_var (tskred_avar);
4373 TREE_ADDRESSABLE (tskred_avar) = 1;
4374 task_reduction_cnt_full = task_reduction_cnt;
4375 task_reduction_cntorig_full = task_reduction_cntorig;
4376 }
4377 else if (pass == 3 && task_reduction_cnt)
4378 {
4379 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4380 gimple *g
4381 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4382 size_int (task_reduction_cntorig),
4383 build_fold_addr_expr (tskred_avar));
4384 gimple_seq_add_stmt (ilist, g);
4385 }
4386 if (pass == 3 && task_reduction_other_cnt)
4387 {
4388 /* For reduction clauses, build
4389 tskred_base = (void *) tskred_temp[2]
4390 + omp_get_thread_num () * tskred_temp[1]
4391 or if tskred_temp[1] is known to be constant, that constant
4392 directly. This is the start of the private reduction copy block
4393 for the current thread. */
4394 tree v = create_tmp_var (integer_type_node);
4395 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4396 gimple *g = gimple_build_call (x, 0);
4397 gimple_call_set_lhs (g, v);
4398 gimple_seq_add_stmt (ilist, g);
4399 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4400 tskred_temp = OMP_CLAUSE_DECL (c);
4401 if (is_taskreg_ctx (ctx))
4402 tskred_temp = lookup_decl (tskred_temp, ctx);
4403 tree v2 = create_tmp_var (sizetype);
4404 g = gimple_build_assign (v2, NOP_EXPR, v);
4405 gimple_seq_add_stmt (ilist, g);
4406 if (ctx->task_reductions[0])
4407 v = fold_convert (sizetype, ctx->task_reductions[0]);
4408 else
4409 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4410 tree v3 = create_tmp_var (sizetype);
4411 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4412 gimple_seq_add_stmt (ilist, g);
4413 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4414 tskred_base = create_tmp_var (ptr_type_node);
4415 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4416 gimple_seq_add_stmt (ilist, g);
4417 }
4418 task_reduction_cnt = 0;
4419 task_reduction_cntorig = 0;
4420 task_reduction_other_cnt = 0;
4421 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4422 {
4423 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4424 tree var, new_var;
4425 bool by_ref;
4426 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4427 bool task_reduction_p = false;
4428 bool task_reduction_needs_orig_p = false;
4429 tree cond = NULL_TREE;
4430
4431 switch (c_kind)
4432 {
4433 case OMP_CLAUSE_PRIVATE:
4434 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4435 continue;
4436 break;
4437 case OMP_CLAUSE_SHARED:
4438 /* Ignore shared directives in teams construct inside
4439 of target construct. */
4440 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4441 && !is_host_teams_ctx (ctx))
4442 continue;
4443 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4444 {
4445 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4446 || is_global_var (OMP_CLAUSE_DECL (c)));
4447 continue;
4448 }
4449 case OMP_CLAUSE_FIRSTPRIVATE:
4450 case OMP_CLAUSE_COPYIN:
4451 break;
4452 case OMP_CLAUSE_LINEAR:
4453 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4454 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4455 lastprivate_firstprivate = true;
4456 break;
4457 case OMP_CLAUSE_REDUCTION:
4458 case OMP_CLAUSE_IN_REDUCTION:
4459 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4460 {
4461 task_reduction_p = true;
4462 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4463 {
4464 task_reduction_other_cnt++;
4465 if (pass == 2)
4466 continue;
4467 }
4468 else
4469 task_reduction_cnt++;
4470 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4471 {
4472 var = OMP_CLAUSE_DECL (c);
4473 /* If var is a global variable that isn't privatized
4474 in outer contexts, we don't need to look up the
4475 original address, it is always the address of the
4476 global variable itself. */
4477 if (!DECL_P (var)
4478 || omp_is_reference (var)
4479 || !is_global_var
4480 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4481 {
4482 task_reduction_needs_orig_p = true;
4483 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4484 task_reduction_cntorig++;
4485 }
4486 }
4487 }
4488 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4489 reduction_omp_orig_ref = true;
4490 break;
4491 case OMP_CLAUSE__REDUCTEMP_:
4492 if (!is_taskreg_ctx (ctx))
4493 continue;
4494 /* FALLTHRU */
4495 case OMP_CLAUSE__LOOPTEMP_:
4496 /* Handle _looptemp_/_reductemp_ clauses only on
4497 parallel/task. */
4498 if (fd)
4499 continue;
4500 break;
4501 case OMP_CLAUSE_LASTPRIVATE:
4502 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4503 {
4504 lastprivate_firstprivate = true;
4505 if (pass != 0 || is_taskloop_ctx (ctx))
4506 continue;
4507 }
4508 /* Even without corresponding firstprivate, if
4509 decl is Fortran allocatable, it needs outer var
4510 reference. */
4511 else if (pass == 0
4512 && lang_hooks.decls.omp_private_outer_ref
4513 (OMP_CLAUSE_DECL (c)))
4514 lastprivate_firstprivate = true;
4515 break;
4516 case OMP_CLAUSE_ALIGNED:
4517 if (pass != 1)
4518 continue;
4519 var = OMP_CLAUSE_DECL (c);
4520 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4521 && !is_global_var (var))
4522 {
4523 new_var = maybe_lookup_decl (var, ctx);
4524 if (new_var == NULL_TREE)
4525 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4526 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4527 tree alarg = omp_clause_aligned_alignment (c);
4528 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4529 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4530 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4531 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4532 gimplify_and_add (x, ilist);
4533 }
4534 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4535 && is_global_var (var))
4536 {
4537 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4538 new_var = lookup_decl (var, ctx);
4539 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4540 t = build_fold_addr_expr_loc (clause_loc, t);
4541 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4542 tree alarg = omp_clause_aligned_alignment (c);
4543 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4544 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4545 t = fold_convert_loc (clause_loc, ptype, t);
4546 x = create_tmp_var (ptype);
4547 t = build2 (MODIFY_EXPR, ptype, x, t);
4548 gimplify_and_add (t, ilist);
4549 t = build_simple_mem_ref_loc (clause_loc, x);
4550 SET_DECL_VALUE_EXPR (new_var, t);
4551 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4552 }
4553 continue;
4554 case OMP_CLAUSE__CONDTEMP_:
4555 if (is_parallel_ctx (ctx)
4556 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4557 break;
4558 continue;
4559 default:
4560 continue;
4561 }
4562
4563 if (task_reduction_p != (pass >= 2))
4564 continue;
4565
4566 new_var = var = OMP_CLAUSE_DECL (c);
4567 if ((c_kind == OMP_CLAUSE_REDUCTION
4568 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4569 && TREE_CODE (var) == MEM_REF)
4570 {
4571 var = TREE_OPERAND (var, 0);
4572 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4573 var = TREE_OPERAND (var, 0);
4574 if (TREE_CODE (var) == INDIRECT_REF
4575 || TREE_CODE (var) == ADDR_EXPR)
4576 var = TREE_OPERAND (var, 0);
4577 if (is_variable_sized (var))
4578 {
4579 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4580 var = DECL_VALUE_EXPR (var);
4581 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4582 var = TREE_OPERAND (var, 0);
4583 gcc_assert (DECL_P (var));
4584 }
4585 new_var = var;
4586 }
4587 if (c_kind != OMP_CLAUSE_COPYIN)
4588 new_var = lookup_decl (var, ctx);
4589
4590 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4591 {
4592 if (pass != 0)
4593 continue;
4594 }
4595 /* C/C++ array section reductions. */
4596 else if ((c_kind == OMP_CLAUSE_REDUCTION
4597 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4598 && var != OMP_CLAUSE_DECL (c))
4599 {
4600 if (pass == 0)
4601 continue;
4602
4603 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4604 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4605
4606 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4607 {
4608 tree b = TREE_OPERAND (orig_var, 1);
4609 b = maybe_lookup_decl (b, ctx);
4610 if (b == NULL)
4611 {
4612 b = TREE_OPERAND (orig_var, 1);
4613 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4614 }
4615 if (integer_zerop (bias))
4616 bias = b;
4617 else
4618 {
4619 bias = fold_convert_loc (clause_loc,
4620 TREE_TYPE (b), bias);
4621 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4622 TREE_TYPE (b), b, bias);
4623 }
4624 orig_var = TREE_OPERAND (orig_var, 0);
4625 }
4626 if (pass == 2)
4627 {
4628 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4629 if (is_global_var (out)
4630 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4631 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4632 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4633 != POINTER_TYPE)))
4634 x = var;
4635 else
4636 {
4637 bool by_ref = use_pointer_for_field (var, NULL);
4638 x = build_receiver_ref (var, by_ref, ctx);
4639 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4640 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4641 == POINTER_TYPE))
4642 x = build_fold_addr_expr (x);
4643 }
4644 if (TREE_CODE (orig_var) == INDIRECT_REF)
4645 x = build_simple_mem_ref (x);
4646 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4647 {
4648 if (var == TREE_OPERAND (orig_var, 0))
4649 x = build_fold_addr_expr (x);
4650 }
4651 bias = fold_convert (sizetype, bias);
4652 x = fold_convert (ptr_type_node, x);
4653 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4654 TREE_TYPE (x), x, bias);
4655 unsigned cnt = task_reduction_cnt - 1;
4656 if (!task_reduction_needs_orig_p)
4657 cnt += (task_reduction_cntorig_full
4658 - task_reduction_cntorig);
4659 else
4660 cnt = task_reduction_cntorig - 1;
4661 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4662 size_int (cnt), NULL_TREE, NULL_TREE);
4663 gimplify_assign (r, x, ilist);
4664 continue;
4665 }
4666
4667 if (TREE_CODE (orig_var) == INDIRECT_REF
4668 || TREE_CODE (orig_var) == ADDR_EXPR)
4669 orig_var = TREE_OPERAND (orig_var, 0);
4670 tree d = OMP_CLAUSE_DECL (c);
4671 tree type = TREE_TYPE (d);
4672 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4673 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4674 const char *name = get_name (orig_var);
4675 if (pass == 3)
4676 {
4677 tree xv = create_tmp_var (ptr_type_node);
4678 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4679 {
4680 unsigned cnt = task_reduction_cnt - 1;
4681 if (!task_reduction_needs_orig_p)
4682 cnt += (task_reduction_cntorig_full
4683 - task_reduction_cntorig);
4684 else
4685 cnt = task_reduction_cntorig - 1;
4686 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4687 size_int (cnt), NULL_TREE, NULL_TREE);
4688
4689 gimple *g = gimple_build_assign (xv, x);
4690 gimple_seq_add_stmt (ilist, g);
4691 }
4692 else
4693 {
4694 unsigned int idx = *ctx->task_reduction_map->get (c);
4695 tree off;
4696 if (ctx->task_reductions[1 + idx])
4697 off = fold_convert (sizetype,
4698 ctx->task_reductions[1 + idx]);
4699 else
4700 off = task_reduction_read (ilist, tskred_temp, sizetype,
4701 7 + 3 * idx + 1);
4702 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4703 tskred_base, off);
4704 gimple_seq_add_stmt (ilist, g);
4705 }
4706 x = fold_convert (build_pointer_type (boolean_type_node),
4707 xv);
4708 if (TREE_CONSTANT (v))
4709 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4710 TYPE_SIZE_UNIT (type));
4711 else
4712 {
4713 tree t = maybe_lookup_decl (v, ctx);
4714 if (t)
4715 v = t;
4716 else
4717 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4718 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4719 fb_rvalue);
4720 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4721 TREE_TYPE (v), v,
4722 build_int_cst (TREE_TYPE (v), 1));
4723 t = fold_build2_loc (clause_loc, MULT_EXPR,
4724 TREE_TYPE (v), t,
4725 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4726 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4727 }
4728 cond = create_tmp_var (TREE_TYPE (x));
4729 gimplify_assign (cond, x, ilist);
4730 x = xv;
4731 }
4732 else if (TREE_CONSTANT (v))
4733 {
4734 x = create_tmp_var_raw (type, name);
4735 gimple_add_tmp_var (x);
4736 TREE_ADDRESSABLE (x) = 1;
4737 x = build_fold_addr_expr_loc (clause_loc, x);
4738 }
4739 else
4740 {
4741 tree atmp
4742 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4743 tree t = maybe_lookup_decl (v, ctx);
4744 if (t)
4745 v = t;
4746 else
4747 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4748 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4749 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4750 TREE_TYPE (v), v,
4751 build_int_cst (TREE_TYPE (v), 1));
4752 t = fold_build2_loc (clause_loc, MULT_EXPR,
4753 TREE_TYPE (v), t,
4754 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4755 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4756 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4757 }
4758
4759 tree ptype = build_pointer_type (TREE_TYPE (type));
4760 x = fold_convert_loc (clause_loc, ptype, x);
4761 tree y = create_tmp_var (ptype, name);
4762 gimplify_assign (y, x, ilist);
4763 x = y;
4764 tree yb = y;
4765
4766 if (!integer_zerop (bias))
4767 {
4768 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4769 bias);
4770 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4771 x);
4772 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4773 pointer_sized_int_node, yb, bias);
4774 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4775 yb = create_tmp_var (ptype, name);
4776 gimplify_assign (yb, x, ilist);
4777 x = yb;
4778 }
4779
4780 d = TREE_OPERAND (d, 0);
4781 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4782 d = TREE_OPERAND (d, 0);
4783 if (TREE_CODE (d) == ADDR_EXPR)
4784 {
4785 if (orig_var != var)
4786 {
4787 gcc_assert (is_variable_sized (orig_var));
4788 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4789 x);
4790 gimplify_assign (new_var, x, ilist);
4791 tree new_orig_var = lookup_decl (orig_var, ctx);
4792 tree t = build_fold_indirect_ref (new_var);
4793 DECL_IGNORED_P (new_var) = 0;
4794 TREE_THIS_NOTRAP (t) = 1;
4795 SET_DECL_VALUE_EXPR (new_orig_var, t);
4796 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4797 }
4798 else
4799 {
4800 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4801 build_int_cst (ptype, 0));
4802 SET_DECL_VALUE_EXPR (new_var, x);
4803 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4804 }
4805 }
4806 else
4807 {
4808 gcc_assert (orig_var == var);
4809 if (TREE_CODE (d) == INDIRECT_REF)
4810 {
4811 x = create_tmp_var (ptype, name);
4812 TREE_ADDRESSABLE (x) = 1;
4813 gimplify_assign (x, yb, ilist);
4814 x = build_fold_addr_expr_loc (clause_loc, x);
4815 }
4816 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4817 gimplify_assign (new_var, x, ilist);
4818 }
4819 /* GOMP_taskgroup_reduction_register memsets the whole
4820 array to zero. If the initializer is zero, we don't
4821 need to initialize it again, just mark it as ever
4822 used unconditionally, i.e. cond = true. */
4823 if (cond
4824 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4825 && initializer_zerop (omp_reduction_init (c,
4826 TREE_TYPE (type))))
4827 {
4828 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4829 boolean_true_node);
4830 gimple_seq_add_stmt (ilist, g);
4831 continue;
4832 }
4833 tree end = create_artificial_label (UNKNOWN_LOCATION);
4834 if (cond)
4835 {
4836 gimple *g;
4837 if (!is_parallel_ctx (ctx))
4838 {
4839 tree condv = create_tmp_var (boolean_type_node);
4840 g = gimple_build_assign (condv,
4841 build_simple_mem_ref (cond));
4842 gimple_seq_add_stmt (ilist, g);
4843 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4844 g = gimple_build_cond (NE_EXPR, condv,
4845 boolean_false_node, end, lab1);
4846 gimple_seq_add_stmt (ilist, g);
4847 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4848 }
4849 g = gimple_build_assign (build_simple_mem_ref (cond),
4850 boolean_true_node);
4851 gimple_seq_add_stmt (ilist, g);
4852 }
4853
4854 tree y1 = create_tmp_var (ptype);
4855 gimplify_assign (y1, y, ilist);
4856 tree i2 = NULL_TREE, y2 = NULL_TREE;
4857 tree body2 = NULL_TREE, end2 = NULL_TREE;
4858 tree y3 = NULL_TREE, y4 = NULL_TREE;
4859 if (task_reduction_needs_orig_p)
4860 {
4861 y3 = create_tmp_var (ptype);
4862 tree ref;
4863 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4864 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4865 size_int (task_reduction_cnt_full
4866 + task_reduction_cntorig - 1),
4867 NULL_TREE, NULL_TREE);
4868 else
4869 {
4870 unsigned int idx = *ctx->task_reduction_map->get (c);
4871 ref = task_reduction_read (ilist, tskred_temp, ptype,
4872 7 + 3 * idx);
4873 }
4874 gimplify_assign (y3, ref, ilist);
4875 }
4876 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4877 {
4878 if (pass != 3)
4879 {
4880 y2 = create_tmp_var (ptype);
4881 gimplify_assign (y2, y, ilist);
4882 }
4883 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4884 {
4885 tree ref = build_outer_var_ref (var, ctx);
4886 /* For ref build_outer_var_ref already performs this. */
4887 if (TREE_CODE (d) == INDIRECT_REF)
4888 gcc_assert (omp_is_reference (var));
4889 else if (TREE_CODE (d) == ADDR_EXPR)
4890 ref = build_fold_addr_expr (ref);
4891 else if (omp_is_reference (var))
4892 ref = build_fold_addr_expr (ref);
4893 ref = fold_convert_loc (clause_loc, ptype, ref);
4894 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4895 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4896 {
4897 y3 = create_tmp_var (ptype);
4898 gimplify_assign (y3, unshare_expr (ref), ilist);
4899 }
4900 if (is_simd)
4901 {
4902 y4 = create_tmp_var (ptype);
4903 gimplify_assign (y4, ref, dlist);
4904 }
4905 }
4906 }
4907 tree i = create_tmp_var (TREE_TYPE (v));
4908 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4909 tree body = create_artificial_label (UNKNOWN_LOCATION);
4910 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4911 if (y2)
4912 {
4913 i2 = create_tmp_var (TREE_TYPE (v));
4914 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4915 body2 = create_artificial_label (UNKNOWN_LOCATION);
4916 end2 = create_artificial_label (UNKNOWN_LOCATION);
4917 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4918 }
4919 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4920 {
4921 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4922 tree decl_placeholder
4923 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4924 SET_DECL_VALUE_EXPR (decl_placeholder,
4925 build_simple_mem_ref (y1));
4926 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4927 SET_DECL_VALUE_EXPR (placeholder,
4928 y3 ? build_simple_mem_ref (y3)
4929 : error_mark_node);
4930 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4931 x = lang_hooks.decls.omp_clause_default_ctor
4932 (c, build_simple_mem_ref (y1),
4933 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4934 if (x)
4935 gimplify_and_add (x, ilist);
4936 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4937 {
4938 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4939 lower_omp (&tseq, ctx);
4940 gimple_seq_add_seq (ilist, tseq);
4941 }
4942 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4943 if (is_simd)
4944 {
4945 SET_DECL_VALUE_EXPR (decl_placeholder,
4946 build_simple_mem_ref (y2));
4947 SET_DECL_VALUE_EXPR (placeholder,
4948 build_simple_mem_ref (y4));
4949 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4950 lower_omp (&tseq, ctx);
4951 gimple_seq_add_seq (dlist, tseq);
4952 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4953 }
4954 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4955 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4956 if (y2)
4957 {
4958 x = lang_hooks.decls.omp_clause_dtor
4959 (c, build_simple_mem_ref (y2));
4960 if (x)
4961 gimplify_and_add (x, dlist);
4962 }
4963 }
4964 else
4965 {
4966 x = omp_reduction_init (c, TREE_TYPE (type));
4967 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4968
4969 /* reduction(-:var) sums up the partial results, so it
4970 acts identically to reduction(+:var). */
4971 if (code == MINUS_EXPR)
4972 code = PLUS_EXPR;
4973
4974 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4975 if (is_simd)
4976 {
4977 x = build2 (code, TREE_TYPE (type),
4978 build_simple_mem_ref (y4),
4979 build_simple_mem_ref (y2));
4980 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4981 }
4982 }
4983 gimple *g
4984 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4985 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4986 gimple_seq_add_stmt (ilist, g);
4987 if (y3)
4988 {
4989 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4990 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4991 gimple_seq_add_stmt (ilist, g);
4992 }
4993 g = gimple_build_assign (i, PLUS_EXPR, i,
4994 build_int_cst (TREE_TYPE (i), 1));
4995 gimple_seq_add_stmt (ilist, g);
4996 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4997 gimple_seq_add_stmt (ilist, g);
4998 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4999 if (y2)
5000 {
5001 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5002 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5003 gimple_seq_add_stmt (dlist, g);
5004 if (y4)
5005 {
5006 g = gimple_build_assign
5007 (y4, POINTER_PLUS_EXPR, y4,
5008 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5009 gimple_seq_add_stmt (dlist, g);
5010 }
5011 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5012 build_int_cst (TREE_TYPE (i2), 1));
5013 gimple_seq_add_stmt (dlist, g);
5014 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5015 gimple_seq_add_stmt (dlist, g);
5016 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5017 }
5018 continue;
5019 }
5020 else if (pass == 2)
5021 {
5022 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5023 x = var;
5024 else
5025 {
5026 bool by_ref = use_pointer_for_field (var, ctx);
5027 x = build_receiver_ref (var, by_ref, ctx);
5028 }
5029 if (!omp_is_reference (var))
5030 x = build_fold_addr_expr (x);
5031 x = fold_convert (ptr_type_node, x);
5032 unsigned cnt = task_reduction_cnt - 1;
5033 if (!task_reduction_needs_orig_p)
5034 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5035 else
5036 cnt = task_reduction_cntorig - 1;
5037 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5038 size_int (cnt), NULL_TREE, NULL_TREE);
5039 gimplify_assign (r, x, ilist);
5040 continue;
5041 }
5042 else if (pass == 3)
5043 {
5044 tree type = TREE_TYPE (new_var);
5045 if (!omp_is_reference (var))
5046 type = build_pointer_type (type);
5047 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5048 {
5049 unsigned cnt = task_reduction_cnt - 1;
5050 if (!task_reduction_needs_orig_p)
5051 cnt += (task_reduction_cntorig_full
5052 - task_reduction_cntorig);
5053 else
5054 cnt = task_reduction_cntorig - 1;
5055 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5056 size_int (cnt), NULL_TREE, NULL_TREE);
5057 }
5058 else
5059 {
5060 unsigned int idx = *ctx->task_reduction_map->get (c);
5061 tree off;
5062 if (ctx->task_reductions[1 + idx])
5063 off = fold_convert (sizetype,
5064 ctx->task_reductions[1 + idx]);
5065 else
5066 off = task_reduction_read (ilist, tskred_temp, sizetype,
5067 7 + 3 * idx + 1);
5068 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5069 tskred_base, off);
5070 }
5071 x = fold_convert (type, x);
5072 tree t;
5073 if (omp_is_reference (var))
5074 {
5075 gimplify_assign (new_var, x, ilist);
5076 t = new_var;
5077 new_var = build_simple_mem_ref (new_var);
5078 }
5079 else
5080 {
5081 t = create_tmp_var (type);
5082 gimplify_assign (t, x, ilist);
5083 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5084 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5085 }
5086 t = fold_convert (build_pointer_type (boolean_type_node), t);
5087 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5088 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5089 cond = create_tmp_var (TREE_TYPE (t));
5090 gimplify_assign (cond, t, ilist);
5091 }
5092 else if (is_variable_sized (var))
5093 {
5094 /* For variable sized types, we need to allocate the
5095 actual storage here. Call alloca and store the
5096 result in the pointer decl that we created elsewhere. */
5097 if (pass == 0)
5098 continue;
5099
5100 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5101 {
5102 gcall *stmt;
5103 tree tmp, atmp;
5104
5105 ptr = DECL_VALUE_EXPR (new_var);
5106 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5107 ptr = TREE_OPERAND (ptr, 0);
5108 gcc_assert (DECL_P (ptr));
5109 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5110
5111 /* void *tmp = __builtin_alloca */
5112 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5113 stmt = gimple_build_call (atmp, 2, x,
5114 size_int (DECL_ALIGN (var)));
5115 tmp = create_tmp_var_raw (ptr_type_node);
5116 gimple_add_tmp_var (tmp);
5117 gimple_call_set_lhs (stmt, tmp);
5118
5119 gimple_seq_add_stmt (ilist, stmt);
5120
5121 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5122 gimplify_assign (ptr, x, ilist);
5123 }
5124 }
5125 else if (omp_is_reference (var)
5126 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5127 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5128 {
5129 /* For references that are being privatized for Fortran,
5130 allocate new backing storage for the new pointer
5131 variable. This allows us to avoid changing all the
5132 code that expects a pointer to something that expects
5133 a direct variable. */
5134 if (pass == 0)
5135 continue;
5136
5137 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5138 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5139 {
5140 x = build_receiver_ref (var, false, ctx);
5141 x = build_fold_addr_expr_loc (clause_loc, x);
5142 }
5143 else if (TREE_CONSTANT (x))
5144 {
5145 /* For reduction in SIMD loop, defer adding the
5146 initialization of the reference, because if we decide
5147 to use SIMD array for it, the initilization could cause
5148 expansion ICE. Ditto for other privatization clauses. */
5149 if (is_simd)
5150 x = NULL_TREE;
5151 else
5152 {
5153 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5154 get_name (var));
5155 gimple_add_tmp_var (x);
5156 TREE_ADDRESSABLE (x) = 1;
5157 x = build_fold_addr_expr_loc (clause_loc, x);
5158 }
5159 }
5160 else
5161 {
5162 tree atmp
5163 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5164 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5165 tree al = size_int (TYPE_ALIGN (rtype));
5166 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5167 }
5168
5169 if (x)
5170 {
5171 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5172 gimplify_assign (new_var, x, ilist);
5173 }
5174
5175 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5176 }
5177 else if ((c_kind == OMP_CLAUSE_REDUCTION
5178 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5179 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5180 {
5181 if (pass == 0)
5182 continue;
5183 }
5184 else if (pass != 0)
5185 continue;
5186
5187 switch (OMP_CLAUSE_CODE (c))
5188 {
5189 case OMP_CLAUSE_SHARED:
5190 /* Ignore shared directives in teams construct inside
5191 target construct. */
5192 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5193 && !is_host_teams_ctx (ctx))
5194 continue;
5195 /* Shared global vars are just accessed directly. */
5196 if (is_global_var (new_var))
5197 break;
5198 /* For taskloop firstprivate/lastprivate, represented
5199 as firstprivate and shared clause on the task, new_var
5200 is the firstprivate var. */
5201 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5202 break;
5203 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5204 needs to be delayed until after fixup_child_record_type so
5205 that we get the correct type during the dereference. */
5206 by_ref = use_pointer_for_field (var, ctx);
5207 x = build_receiver_ref (var, by_ref, ctx);
5208 SET_DECL_VALUE_EXPR (new_var, x);
5209 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5210
5211 /* ??? If VAR is not passed by reference, and the variable
5212 hasn't been initialized yet, then we'll get a warning for
5213 the store into the omp_data_s structure. Ideally, we'd be
5214 able to notice this and not store anything at all, but
5215 we're generating code too early. Suppress the warning. */
5216 if (!by_ref)
5217 TREE_NO_WARNING (var) = 1;
5218 break;
5219
5220 case OMP_CLAUSE__CONDTEMP_:
5221 if (is_parallel_ctx (ctx))
5222 {
5223 x = build_receiver_ref (var, false, ctx);
5224 SET_DECL_VALUE_EXPR (new_var, x);
5225 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5226 }
5227 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5228 {
5229 x = build_zero_cst (TREE_TYPE (var));
5230 goto do_private;
5231 }
5232 break;
5233
5234 case OMP_CLAUSE_LASTPRIVATE:
5235 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5236 break;
5237 /* FALLTHRU */
5238
5239 case OMP_CLAUSE_PRIVATE:
5240 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5241 x = build_outer_var_ref (var, ctx);
5242 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5243 {
5244 if (is_task_ctx (ctx))
5245 x = build_receiver_ref (var, false, ctx);
5246 else
5247 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5248 }
5249 else
5250 x = NULL;
5251 do_private:
5252 tree nx;
5253 bool copy_ctor;
5254 copy_ctor = false;
5255 nx = unshare_expr (new_var);
5256 if (is_simd
5257 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5258 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5259 copy_ctor = true;
5260 if (copy_ctor)
5261 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5262 else
5263 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5264 if (is_simd)
5265 {
5266 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5267 if ((TREE_ADDRESSABLE (new_var) || nx || y
5268 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5269 && (gimple_omp_for_collapse (ctx->stmt) != 1
5270 || (gimple_omp_for_index (ctx->stmt, 0)
5271 != new_var)))
5272 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5273 || omp_is_reference (var))
5274 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5275 ivar, lvar))
5276 {
5277 if (omp_is_reference (var))
5278 {
5279 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5280 tree new_vard = TREE_OPERAND (new_var, 0);
5281 gcc_assert (DECL_P (new_vard));
5282 SET_DECL_VALUE_EXPR (new_vard,
5283 build_fold_addr_expr (lvar));
5284 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5285 }
5286
5287 if (nx)
5288 {
5289 tree iv = unshare_expr (ivar);
5290 if (copy_ctor)
5291 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5292 x);
5293 else
5294 x = lang_hooks.decls.omp_clause_default_ctor (c,
5295 iv,
5296 x);
5297 }
5298 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5299 {
5300 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5301 unshare_expr (ivar), x);
5302 nx = x;
5303 }
5304 if (nx && x)
5305 gimplify_and_add (x, &llist[0]);
5306 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5307 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5308 {
5309 tree v = new_var;
5310 if (!DECL_P (v))
5311 {
5312 gcc_assert (TREE_CODE (v) == MEM_REF);
5313 v = TREE_OPERAND (v, 0);
5314 gcc_assert (DECL_P (v));
5315 }
5316 v = *ctx->lastprivate_conditional_map->get (v);
5317 tree t = create_tmp_var (TREE_TYPE (v));
5318 tree z = build_zero_cst (TREE_TYPE (v));
5319 tree orig_v
5320 = build_outer_var_ref (var, ctx,
5321 OMP_CLAUSE_LASTPRIVATE);
5322 gimple_seq_add_stmt (dlist,
5323 gimple_build_assign (t, z));
5324 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5325 tree civar = DECL_VALUE_EXPR (v);
5326 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5327 civar = unshare_expr (civar);
5328 TREE_OPERAND (civar, 1) = sctx.idx;
5329 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5330 unshare_expr (civar));
5331 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5332 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5333 orig_v, unshare_expr (ivar)));
5334 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5335 civar);
5336 x = build3 (COND_EXPR, void_type_node, cond, x,
5337 void_node);
5338 gimple_seq tseq = NULL;
5339 gimplify_and_add (x, &tseq);
5340 if (ctx->outer)
5341 lower_omp (&tseq, ctx->outer);
5342 gimple_seq_add_seq (&llist[1], tseq);
5343 }
5344 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5345 && ctx->for_simd_scan_phase)
5346 {
5347 x = unshare_expr (ivar);
5348 tree orig_v
5349 = build_outer_var_ref (var, ctx,
5350 OMP_CLAUSE_LASTPRIVATE);
5351 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5352 orig_v);
5353 gimplify_and_add (x, &llist[0]);
5354 }
5355 if (y)
5356 {
5357 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5358 if (y)
5359 gimplify_and_add (y, &llist[1]);
5360 }
5361 break;
5362 }
5363 if (omp_is_reference (var))
5364 {
5365 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5366 tree new_vard = TREE_OPERAND (new_var, 0);
5367 gcc_assert (DECL_P (new_vard));
5368 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5369 x = TYPE_SIZE_UNIT (type);
5370 if (TREE_CONSTANT (x))
5371 {
5372 x = create_tmp_var_raw (type, get_name (var));
5373 gimple_add_tmp_var (x);
5374 TREE_ADDRESSABLE (x) = 1;
5375 x = build_fold_addr_expr_loc (clause_loc, x);
5376 x = fold_convert_loc (clause_loc,
5377 TREE_TYPE (new_vard), x);
5378 gimplify_assign (new_vard, x, ilist);
5379 }
5380 }
5381 }
5382 if (nx)
5383 gimplify_and_add (nx, ilist);
5384 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5385 && is_simd
5386 && ctx->for_simd_scan_phase)
5387 {
5388 tree orig_v = build_outer_var_ref (var, ctx,
5389 OMP_CLAUSE_LASTPRIVATE);
5390 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5391 orig_v);
5392 gimplify_and_add (x, ilist);
5393 }
5394 /* FALLTHRU */
5395
5396 do_dtor:
5397 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5398 if (x)
5399 gimplify_and_add (x, dlist);
5400 break;
5401
5402 case OMP_CLAUSE_LINEAR:
5403 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5404 goto do_firstprivate;
5405 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5406 x = NULL;
5407 else
5408 x = build_outer_var_ref (var, ctx);
5409 goto do_private;
5410
5411 case OMP_CLAUSE_FIRSTPRIVATE:
5412 if (is_task_ctx (ctx))
5413 {
5414 if ((omp_is_reference (var)
5415 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5416 || is_variable_sized (var))
5417 goto do_dtor;
5418 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5419 ctx))
5420 || use_pointer_for_field (var, NULL))
5421 {
5422 x = build_receiver_ref (var, false, ctx);
5423 SET_DECL_VALUE_EXPR (new_var, x);
5424 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5425 goto do_dtor;
5426 }
5427 }
5428 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5429 && omp_is_reference (var))
5430 {
5431 x = build_outer_var_ref (var, ctx);
5432 gcc_assert (TREE_CODE (x) == MEM_REF
5433 && integer_zerop (TREE_OPERAND (x, 1)));
5434 x = TREE_OPERAND (x, 0);
5435 x = lang_hooks.decls.omp_clause_copy_ctor
5436 (c, unshare_expr (new_var), x);
5437 gimplify_and_add (x, ilist);
5438 goto do_dtor;
5439 }
5440 do_firstprivate:
5441 x = build_outer_var_ref (var, ctx);
5442 if (is_simd)
5443 {
5444 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5445 && gimple_omp_for_combined_into_p (ctx->stmt))
5446 {
5447 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5448 tree stept = TREE_TYPE (t);
5449 tree ct = omp_find_clause (clauses,
5450 OMP_CLAUSE__LOOPTEMP_);
5451 gcc_assert (ct);
5452 tree l = OMP_CLAUSE_DECL (ct);
5453 tree n1 = fd->loop.n1;
5454 tree step = fd->loop.step;
5455 tree itype = TREE_TYPE (l);
5456 if (POINTER_TYPE_P (itype))
5457 itype = signed_type_for (itype);
5458 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5459 if (TYPE_UNSIGNED (itype)
5460 && fd->loop.cond_code == GT_EXPR)
5461 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5462 fold_build1 (NEGATE_EXPR, itype, l),
5463 fold_build1 (NEGATE_EXPR,
5464 itype, step));
5465 else
5466 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5467 t = fold_build2 (MULT_EXPR, stept,
5468 fold_convert (stept, l), t);
5469
5470 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5471 {
5472 if (omp_is_reference (var))
5473 {
5474 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5475 tree new_vard = TREE_OPERAND (new_var, 0);
5476 gcc_assert (DECL_P (new_vard));
5477 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5478 nx = TYPE_SIZE_UNIT (type);
5479 if (TREE_CONSTANT (nx))
5480 {
5481 nx = create_tmp_var_raw (type,
5482 get_name (var));
5483 gimple_add_tmp_var (nx);
5484 TREE_ADDRESSABLE (nx) = 1;
5485 nx = build_fold_addr_expr_loc (clause_loc,
5486 nx);
5487 nx = fold_convert_loc (clause_loc,
5488 TREE_TYPE (new_vard),
5489 nx);
5490 gimplify_assign (new_vard, nx, ilist);
5491 }
5492 }
5493
5494 x = lang_hooks.decls.omp_clause_linear_ctor
5495 (c, new_var, x, t);
5496 gimplify_and_add (x, ilist);
5497 goto do_dtor;
5498 }
5499
5500 if (POINTER_TYPE_P (TREE_TYPE (x)))
5501 x = fold_build2 (POINTER_PLUS_EXPR,
5502 TREE_TYPE (x), x, t);
5503 else
5504 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5505 }
5506
5507 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5508 || TREE_ADDRESSABLE (new_var)
5509 || omp_is_reference (var))
5510 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5511 ivar, lvar))
5512 {
5513 if (omp_is_reference (var))
5514 {
5515 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5516 tree new_vard = TREE_OPERAND (new_var, 0);
5517 gcc_assert (DECL_P (new_vard));
5518 SET_DECL_VALUE_EXPR (new_vard,
5519 build_fold_addr_expr (lvar));
5520 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5521 }
5522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5523 {
5524 tree iv = create_tmp_var (TREE_TYPE (new_var));
5525 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5526 gimplify_and_add (x, ilist);
5527 gimple_stmt_iterator gsi
5528 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5529 gassign *g
5530 = gimple_build_assign (unshare_expr (lvar), iv);
5531 gsi_insert_before_without_update (&gsi, g,
5532 GSI_SAME_STMT);
5533 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5534 enum tree_code code = PLUS_EXPR;
5535 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5536 code = POINTER_PLUS_EXPR;
5537 g = gimple_build_assign (iv, code, iv, t);
5538 gsi_insert_before_without_update (&gsi, g,
5539 GSI_SAME_STMT);
5540 break;
5541 }
5542 x = lang_hooks.decls.omp_clause_copy_ctor
5543 (c, unshare_expr (ivar), x);
5544 gimplify_and_add (x, &llist[0]);
5545 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5546 if (x)
5547 gimplify_and_add (x, &llist[1]);
5548 break;
5549 }
5550 if (omp_is_reference (var))
5551 {
5552 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5553 tree new_vard = TREE_OPERAND (new_var, 0);
5554 gcc_assert (DECL_P (new_vard));
5555 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5556 nx = TYPE_SIZE_UNIT (type);
5557 if (TREE_CONSTANT (nx))
5558 {
5559 nx = create_tmp_var_raw (type, get_name (var));
5560 gimple_add_tmp_var (nx);
5561 TREE_ADDRESSABLE (nx) = 1;
5562 nx = build_fold_addr_expr_loc (clause_loc, nx);
5563 nx = fold_convert_loc (clause_loc,
5564 TREE_TYPE (new_vard), nx);
5565 gimplify_assign (new_vard, nx, ilist);
5566 }
5567 }
5568 }
5569 x = lang_hooks.decls.omp_clause_copy_ctor
5570 (c, unshare_expr (new_var), x);
5571 gimplify_and_add (x, ilist);
5572 goto do_dtor;
5573
5574 case OMP_CLAUSE__LOOPTEMP_:
5575 case OMP_CLAUSE__REDUCTEMP_:
5576 gcc_assert (is_taskreg_ctx (ctx));
5577 x = build_outer_var_ref (var, ctx);
5578 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5579 gimplify_and_add (x, ilist);
5580 break;
5581
5582 case OMP_CLAUSE_COPYIN:
5583 by_ref = use_pointer_for_field (var, NULL);
5584 x = build_receiver_ref (var, by_ref, ctx);
5585 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5586 append_to_statement_list (x, &copyin_seq);
5587 copyin_by_ref |= by_ref;
5588 break;
5589
5590 case OMP_CLAUSE_REDUCTION:
5591 case OMP_CLAUSE_IN_REDUCTION:
5592 /* OpenACC reductions are initialized using the
5593 GOACC_REDUCTION internal function. */
5594 if (is_gimple_omp_oacc (ctx->stmt))
5595 break;
5596 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5597 {
5598 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5599 gimple *tseq;
5600 tree ptype = TREE_TYPE (placeholder);
5601 if (cond)
5602 {
5603 x = error_mark_node;
5604 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5605 && !task_reduction_needs_orig_p)
5606 x = var;
5607 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5608 {
5609 tree pptype = build_pointer_type (ptype);
5610 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5611 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5612 size_int (task_reduction_cnt_full
5613 + task_reduction_cntorig - 1),
5614 NULL_TREE, NULL_TREE);
5615 else
5616 {
5617 unsigned int idx
5618 = *ctx->task_reduction_map->get (c);
5619 x = task_reduction_read (ilist, tskred_temp,
5620 pptype, 7 + 3 * idx);
5621 }
5622 x = fold_convert (pptype, x);
5623 x = build_simple_mem_ref (x);
5624 }
5625 }
5626 else
5627 {
5628 x = build_outer_var_ref (var, ctx);
5629
5630 if (omp_is_reference (var)
5631 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5632 x = build_fold_addr_expr_loc (clause_loc, x);
5633 }
5634 SET_DECL_VALUE_EXPR (placeholder, x);
5635 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5636 tree new_vard = new_var;
5637 if (omp_is_reference (var))
5638 {
5639 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5640 new_vard = TREE_OPERAND (new_var, 0);
5641 gcc_assert (DECL_P (new_vard));
5642 }
5643 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5644 if (is_simd
5645 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5646 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5647 rvarp = &rvar;
5648 if (is_simd
5649 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5650 ivar, lvar, rvarp,
5651 &rvar2))
5652 {
5653 if (new_vard == new_var)
5654 {
5655 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5656 SET_DECL_VALUE_EXPR (new_var, ivar);
5657 }
5658 else
5659 {
5660 SET_DECL_VALUE_EXPR (new_vard,
5661 build_fold_addr_expr (ivar));
5662 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5663 }
5664 x = lang_hooks.decls.omp_clause_default_ctor
5665 (c, unshare_expr (ivar),
5666 build_outer_var_ref (var, ctx));
5667 if (rvarp && ctx->for_simd_scan_phase)
5668 {
5669 if (x)
5670 gimplify_and_add (x, &llist[0]);
5671 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5672 if (x)
5673 gimplify_and_add (x, &llist[1]);
5674 break;
5675 }
5676 else if (rvarp)
5677 {
5678 if (x)
5679 {
5680 gimplify_and_add (x, &llist[0]);
5681
5682 tree ivar2 = unshare_expr (lvar);
5683 TREE_OPERAND (ivar2, 1) = sctx.idx;
5684 x = lang_hooks.decls.omp_clause_default_ctor
5685 (c, ivar2, build_outer_var_ref (var, ctx));
5686 gimplify_and_add (x, &llist[0]);
5687
5688 if (rvar2)
5689 {
5690 x = lang_hooks.decls.omp_clause_default_ctor
5691 (c, unshare_expr (rvar2),
5692 build_outer_var_ref (var, ctx));
5693 gimplify_and_add (x, &llist[0]);
5694 }
5695
5696 /* For types that need construction, add another
5697 private var which will be default constructed
5698 and optionally initialized with
5699 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5700 loop we want to assign this value instead of
5701 constructing and destructing it in each
5702 iteration. */
5703 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5704 gimple_add_tmp_var (nv);
5705 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5706 ? rvar2
5707 : ivar, 0),
5708 nv);
5709 x = lang_hooks.decls.omp_clause_default_ctor
5710 (c, nv, build_outer_var_ref (var, ctx));
5711 gimplify_and_add (x, ilist);
5712
5713 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5714 {
5715 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5716 x = DECL_VALUE_EXPR (new_vard);
5717 tree vexpr = nv;
5718 if (new_vard != new_var)
5719 vexpr = build_fold_addr_expr (nv);
5720 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5721 lower_omp (&tseq, ctx);
5722 SET_DECL_VALUE_EXPR (new_vard, x);
5723 gimple_seq_add_seq (ilist, tseq);
5724 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5725 }
5726
5727 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5728 if (x)
5729 gimplify_and_add (x, dlist);
5730 }
5731
5732 tree ref = build_outer_var_ref (var, ctx);
5733 x = unshare_expr (ivar);
5734 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5735 ref);
5736 gimplify_and_add (x, &llist[0]);
5737
5738 ref = build_outer_var_ref (var, ctx);
5739 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5740 rvar);
5741 gimplify_and_add (x, &llist[3]);
5742
5743 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5744 if (new_vard == new_var)
5745 SET_DECL_VALUE_EXPR (new_var, lvar);
5746 else
5747 SET_DECL_VALUE_EXPR (new_vard,
5748 build_fold_addr_expr (lvar));
5749
5750 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5751 if (x)
5752 gimplify_and_add (x, &llist[1]);
5753
5754 tree ivar2 = unshare_expr (lvar);
5755 TREE_OPERAND (ivar2, 1) = sctx.idx;
5756 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5757 if (x)
5758 gimplify_and_add (x, &llist[1]);
5759
5760 if (rvar2)
5761 {
5762 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5763 if (x)
5764 gimplify_and_add (x, &llist[1]);
5765 }
5766 break;
5767 }
5768 if (x)
5769 gimplify_and_add (x, &llist[0]);
5770 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5771 {
5772 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5773 lower_omp (&tseq, ctx);
5774 gimple_seq_add_seq (&llist[0], tseq);
5775 }
5776 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5777 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5778 lower_omp (&tseq, ctx);
5779 gimple_seq_add_seq (&llist[1], tseq);
5780 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5781 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5782 if (new_vard == new_var)
5783 SET_DECL_VALUE_EXPR (new_var, lvar);
5784 else
5785 SET_DECL_VALUE_EXPR (new_vard,
5786 build_fold_addr_expr (lvar));
5787 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5788 if (x)
5789 gimplify_and_add (x, &llist[1]);
5790 break;
5791 }
5792 /* If this is a reference to constant size reduction var
5793 with placeholder, we haven't emitted the initializer
5794 for it because it is undesirable if SIMD arrays are used.
5795 But if they aren't used, we need to emit the deferred
5796 initialization now. */
5797 else if (omp_is_reference (var) && is_simd)
5798 handle_simd_reference (clause_loc, new_vard, ilist);
5799
5800 tree lab2 = NULL_TREE;
5801 if (cond)
5802 {
5803 gimple *g;
5804 if (!is_parallel_ctx (ctx))
5805 {
5806 tree condv = create_tmp_var (boolean_type_node);
5807 tree m = build_simple_mem_ref (cond);
5808 g = gimple_build_assign (condv, m);
5809 gimple_seq_add_stmt (ilist, g);
5810 tree lab1
5811 = create_artificial_label (UNKNOWN_LOCATION);
5812 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5813 g = gimple_build_cond (NE_EXPR, condv,
5814 boolean_false_node,
5815 lab2, lab1);
5816 gimple_seq_add_stmt (ilist, g);
5817 gimple_seq_add_stmt (ilist,
5818 gimple_build_label (lab1));
5819 }
5820 g = gimple_build_assign (build_simple_mem_ref (cond),
5821 boolean_true_node);
5822 gimple_seq_add_stmt (ilist, g);
5823 }
5824 x = lang_hooks.decls.omp_clause_default_ctor
5825 (c, unshare_expr (new_var),
5826 cond ? NULL_TREE
5827 : build_outer_var_ref (var, ctx));
5828 if (x)
5829 gimplify_and_add (x, ilist);
5830
5831 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5832 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5833 {
5834 if (ctx->for_simd_scan_phase)
5835 goto do_dtor;
5836 if (x || (!is_simd
5837 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5838 {
5839 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5840 gimple_add_tmp_var (nv);
5841 ctx->cb.decl_map->put (new_vard, nv);
5842 x = lang_hooks.decls.omp_clause_default_ctor
5843 (c, nv, build_outer_var_ref (var, ctx));
5844 if (x)
5845 gimplify_and_add (x, ilist);
5846 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5847 {
5848 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5849 tree vexpr = nv;
5850 if (new_vard != new_var)
5851 vexpr = build_fold_addr_expr (nv);
5852 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5853 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5854 lower_omp (&tseq, ctx);
5855 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5856 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5857 gimple_seq_add_seq (ilist, tseq);
5858 }
5859 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5860 if (is_simd && ctx->scan_exclusive)
5861 {
5862 tree nv2
5863 = create_tmp_var_raw (TREE_TYPE (new_var));
5864 gimple_add_tmp_var (nv2);
5865 ctx->cb.decl_map->put (nv, nv2);
5866 x = lang_hooks.decls.omp_clause_default_ctor
5867 (c, nv2, build_outer_var_ref (var, ctx));
5868 gimplify_and_add (x, ilist);
5869 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5870 if (x)
5871 gimplify_and_add (x, dlist);
5872 }
5873 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5874 if (x)
5875 gimplify_and_add (x, dlist);
5876 }
5877 else if (is_simd
5878 && ctx->scan_exclusive
5879 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5880 {
5881 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5882 gimple_add_tmp_var (nv2);
5883 ctx->cb.decl_map->put (new_vard, nv2);
5884 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5885 if (x)
5886 gimplify_and_add (x, dlist);
5887 }
5888 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5889 goto do_dtor;
5890 }
5891
5892 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5893 {
5894 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5895 lower_omp (&tseq, ctx);
5896 gimple_seq_add_seq (ilist, tseq);
5897 }
5898 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5899 if (is_simd)
5900 {
5901 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5902 lower_omp (&tseq, ctx);
5903 gimple_seq_add_seq (dlist, tseq);
5904 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5905 }
5906 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5907 if (cond)
5908 {
5909 if (lab2)
5910 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5911 break;
5912 }
5913 goto do_dtor;
5914 }
5915 else
5916 {
5917 x = omp_reduction_init (c, TREE_TYPE (new_var));
5918 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5919 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5920
5921 if (cond)
5922 {
5923 gimple *g;
5924 tree lab2 = NULL_TREE;
5925 /* GOMP_taskgroup_reduction_register memsets the whole
5926 array to zero. If the initializer is zero, we don't
5927 need to initialize it again, just mark it as ever
5928 used unconditionally, i.e. cond = true. */
5929 if (initializer_zerop (x))
5930 {
5931 g = gimple_build_assign (build_simple_mem_ref (cond),
5932 boolean_true_node);
5933 gimple_seq_add_stmt (ilist, g);
5934 break;
5935 }
5936
5937 /* Otherwise, emit
5938 if (!cond) { cond = true; new_var = x; } */
5939 if (!is_parallel_ctx (ctx))
5940 {
5941 tree condv = create_tmp_var (boolean_type_node);
5942 tree m = build_simple_mem_ref (cond);
5943 g = gimple_build_assign (condv, m);
5944 gimple_seq_add_stmt (ilist, g);
5945 tree lab1
5946 = create_artificial_label (UNKNOWN_LOCATION);
5947 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5948 g = gimple_build_cond (NE_EXPR, condv,
5949 boolean_false_node,
5950 lab2, lab1);
5951 gimple_seq_add_stmt (ilist, g);
5952 gimple_seq_add_stmt (ilist,
5953 gimple_build_label (lab1));
5954 }
5955 g = gimple_build_assign (build_simple_mem_ref (cond),
5956 boolean_true_node);
5957 gimple_seq_add_stmt (ilist, g);
5958 gimplify_assign (new_var, x, ilist);
5959 if (lab2)
5960 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5961 break;
5962 }
5963
5964 /* reduction(-:var) sums up the partial results, so it
5965 acts identically to reduction(+:var). */
5966 if (code == MINUS_EXPR)
5967 code = PLUS_EXPR;
5968
5969 tree new_vard = new_var;
5970 if (is_simd && omp_is_reference (var))
5971 {
5972 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5973 new_vard = TREE_OPERAND (new_var, 0);
5974 gcc_assert (DECL_P (new_vard));
5975 }
5976 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5977 if (is_simd
5978 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5979 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5980 rvarp = &rvar;
5981 if (is_simd
5982 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5983 ivar, lvar, rvarp,
5984 &rvar2))
5985 {
5986 if (new_vard != new_var)
5987 {
5988 SET_DECL_VALUE_EXPR (new_vard,
5989 build_fold_addr_expr (lvar));
5990 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5991 }
5992
5993 tree ref = build_outer_var_ref (var, ctx);
5994
5995 if (rvarp)
5996 {
5997 if (ctx->for_simd_scan_phase)
5998 break;
5999 gimplify_assign (ivar, ref, &llist[0]);
6000 ref = build_outer_var_ref (var, ctx);
6001 gimplify_assign (ref, rvar, &llist[3]);
6002 break;
6003 }
6004
6005 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6006
6007 if (sctx.is_simt)
6008 {
6009 if (!simt_lane)
6010 simt_lane = create_tmp_var (unsigned_type_node);
6011 x = build_call_expr_internal_loc
6012 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6013 TREE_TYPE (ivar), 2, ivar, simt_lane);
6014 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6015 gimplify_assign (ivar, x, &llist[2]);
6016 }
6017 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6018 ref = build_outer_var_ref (var, ctx);
6019 gimplify_assign (ref, x, &llist[1]);
6020
6021 }
6022 else
6023 {
6024 if (omp_is_reference (var) && is_simd)
6025 handle_simd_reference (clause_loc, new_vard, ilist);
6026 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6027 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6028 break;
6029 gimplify_assign (new_var, x, ilist);
6030 if (is_simd)
6031 {
6032 tree ref = build_outer_var_ref (var, ctx);
6033
6034 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6035 ref = build_outer_var_ref (var, ctx);
6036 gimplify_assign (ref, x, dlist);
6037 }
6038 }
6039 }
6040 break;
6041
6042 default:
6043 gcc_unreachable ();
6044 }
6045 }
6046 }
6047 if (tskred_avar)
6048 {
6049 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6050 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6051 }
6052
6053 if (known_eq (sctx.max_vf, 1U))
6054 {
6055 sctx.is_simt = false;
6056 if (ctx->lastprivate_conditional_map)
6057 {
6058 if (gimple_omp_for_combined_into_p (ctx->stmt))
6059 {
6060 /* Signal to lower_omp_1 that it should use parent context. */
6061 ctx->combined_into_simd_safelen1 = true;
6062 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6063 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6064 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6065 {
6066 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6067 omp_context *outer = ctx->outer;
6068 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6069 outer = outer->outer;
6070 tree *v = ctx->lastprivate_conditional_map->get (o);
6071 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6072 tree *pv = outer->lastprivate_conditional_map->get (po);
6073 *v = *pv;
6074 }
6075 }
6076 else
6077 {
6078 /* When not vectorized, treat lastprivate(conditional:) like
6079 normal lastprivate, as there will be just one simd lane
6080 writing the privatized variable. */
6081 delete ctx->lastprivate_conditional_map;
6082 ctx->lastprivate_conditional_map = NULL;
6083 }
6084 }
6085 }
6086
6087 if (nonconst_simd_if)
6088 {
6089 if (sctx.lane == NULL_TREE)
6090 {
6091 sctx.idx = create_tmp_var (unsigned_type_node);
6092 sctx.lane = create_tmp_var (unsigned_type_node);
6093 }
6094 /* FIXME: For now. */
6095 sctx.is_simt = false;
6096 }
6097
6098 if (sctx.lane || sctx.is_simt)
6099 {
6100 uid = create_tmp_var (ptr_type_node, "simduid");
6101 /* Don't want uninit warnings on simduid, it is always uninitialized,
6102 but we use it not for the value, but for the DECL_UID only. */
6103 TREE_NO_WARNING (uid) = 1;
6104 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6105 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6106 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6107 gimple_omp_for_set_clauses (ctx->stmt, c);
6108 }
6109 /* Emit calls denoting privatized variables and initializing a pointer to
6110 structure that holds private variables as fields after ompdevlow pass. */
6111 if (sctx.is_simt)
6112 {
6113 sctx.simt_eargs[0] = uid;
6114 gimple *g
6115 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6116 gimple_call_set_lhs (g, uid);
6117 gimple_seq_add_stmt (ilist, g);
6118 sctx.simt_eargs.release ();
6119
6120 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6121 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6122 gimple_call_set_lhs (g, simtrec);
6123 gimple_seq_add_stmt (ilist, g);
6124 }
6125 if (sctx.lane)
6126 {
6127 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6128 2 + (nonconst_simd_if != NULL),
6129 uid, integer_zero_node,
6130 nonconst_simd_if);
6131 gimple_call_set_lhs (g, sctx.lane);
6132 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6133 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6134 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6135 build_int_cst (unsigned_type_node, 0));
6136 gimple_seq_add_stmt (ilist, g);
6137 if (sctx.lastlane)
6138 {
6139 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6140 2, uid, sctx.lane);
6141 gimple_call_set_lhs (g, sctx.lastlane);
6142 gimple_seq_add_stmt (dlist, g);
6143 gimple_seq_add_seq (dlist, llist[3]);
6144 }
6145 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6146 if (llist[2])
6147 {
6148 tree simt_vf = create_tmp_var (unsigned_type_node);
6149 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6150 gimple_call_set_lhs (g, simt_vf);
6151 gimple_seq_add_stmt (dlist, g);
6152
6153 tree t = build_int_cst (unsigned_type_node, 1);
6154 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6155 gimple_seq_add_stmt (dlist, g);
6156
6157 t = build_int_cst (unsigned_type_node, 0);
6158 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6159 gimple_seq_add_stmt (dlist, g);
6160
6161 tree body = create_artificial_label (UNKNOWN_LOCATION);
6162 tree header = create_artificial_label (UNKNOWN_LOCATION);
6163 tree end = create_artificial_label (UNKNOWN_LOCATION);
6164 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6165 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6166
6167 gimple_seq_add_seq (dlist, llist[2]);
6168
6169 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6170 gimple_seq_add_stmt (dlist, g);
6171
6172 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6173 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6174 gimple_seq_add_stmt (dlist, g);
6175
6176 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6177 }
6178 for (int i = 0; i < 2; i++)
6179 if (llist[i])
6180 {
6181 tree vf = create_tmp_var (unsigned_type_node);
6182 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6183 gimple_call_set_lhs (g, vf);
6184 gimple_seq *seq = i == 0 ? ilist : dlist;
6185 gimple_seq_add_stmt (seq, g);
6186 tree t = build_int_cst (unsigned_type_node, 0);
6187 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6188 gimple_seq_add_stmt (seq, g);
6189 tree body = create_artificial_label (UNKNOWN_LOCATION);
6190 tree header = create_artificial_label (UNKNOWN_LOCATION);
6191 tree end = create_artificial_label (UNKNOWN_LOCATION);
6192 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6193 gimple_seq_add_stmt (seq, gimple_build_label (body));
6194 gimple_seq_add_seq (seq, llist[i]);
6195 t = build_int_cst (unsigned_type_node, 1);
6196 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6197 gimple_seq_add_stmt (seq, g);
6198 gimple_seq_add_stmt (seq, gimple_build_label (header));
6199 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6200 gimple_seq_add_stmt (seq, g);
6201 gimple_seq_add_stmt (seq, gimple_build_label (end));
6202 }
6203 }
6204 if (sctx.is_simt)
6205 {
6206 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6207 gimple *g
6208 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6209 gimple_seq_add_stmt (dlist, g);
6210 }
6211
6212 /* The copyin sequence is not to be executed by the main thread, since
6213 that would result in self-copies. Perhaps not visible to scalars,
6214 but it certainly is to C++ operator=. */
6215 if (copyin_seq)
6216 {
6217 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6218 0);
6219 x = build2 (NE_EXPR, boolean_type_node, x,
6220 build_int_cst (TREE_TYPE (x), 0));
6221 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6222 gimplify_and_add (x, ilist);
6223 }
6224
6225 /* If any copyin variable is passed by reference, we must ensure the
6226 master thread doesn't modify it before it is copied over in all
6227 threads. Similarly for variables in both firstprivate and
6228 lastprivate clauses we need to ensure the lastprivate copying
6229 happens after firstprivate copying in all threads. And similarly
6230 for UDRs if initializer expression refers to omp_orig. */
6231 if (copyin_by_ref || lastprivate_firstprivate
6232 || (reduction_omp_orig_ref
6233 && !ctx->scan_inclusive
6234 && !ctx->scan_exclusive))
6235 {
6236 /* Don't add any barrier for #pragma omp simd or
6237 #pragma omp distribute. */
6238 if (!is_task_ctx (ctx)
6239 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6240 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6241 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6242 }
6243
6244 /* If max_vf is non-zero, then we can use only a vectorization factor
6245 up to the max_vf we chose. So stick it into the safelen clause. */
6246 if (maybe_ne (sctx.max_vf, 0U))
6247 {
6248 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6249 OMP_CLAUSE_SAFELEN);
6250 poly_uint64 safe_len;
6251 if (c == NULL_TREE
6252 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6253 && maybe_gt (safe_len, sctx.max_vf)))
6254 {
6255 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6256 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6257 sctx.max_vf);
6258 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6259 gimple_omp_for_set_clauses (ctx->stmt, c);
6260 }
6261 }
6262 }
6263
6264 /* Create temporary variables for lastprivate(conditional:) implementation
6265 in context CTX with CLAUSES. */
6266
6267 static void
6268 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6269 {
6270 tree iter_type = NULL_TREE;
6271 tree cond_ptr = NULL_TREE;
6272 tree iter_var = NULL_TREE;
6273 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6274 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6275 tree next = *clauses;
6276 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6278 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6279 {
6280 if (is_simd)
6281 {
6282 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6283 gcc_assert (cc);
6284 if (iter_type == NULL_TREE)
6285 {
6286 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6287 iter_var = create_tmp_var_raw (iter_type);
6288 DECL_CONTEXT (iter_var) = current_function_decl;
6289 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6290 DECL_CHAIN (iter_var) = ctx->block_vars;
6291 ctx->block_vars = iter_var;
6292 tree c3
6293 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6294 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6295 OMP_CLAUSE_DECL (c3) = iter_var;
6296 OMP_CLAUSE_CHAIN (c3) = *clauses;
6297 *clauses = c3;
6298 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6299 }
6300 next = OMP_CLAUSE_CHAIN (cc);
6301 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6302 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6303 ctx->lastprivate_conditional_map->put (o, v);
6304 continue;
6305 }
6306 if (iter_type == NULL)
6307 {
6308 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6309 {
6310 struct omp_for_data fd;
6311 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6312 NULL);
6313 iter_type = unsigned_type_for (fd.iter_type);
6314 }
6315 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6316 iter_type = unsigned_type_node;
6317 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6318 if (c2)
6319 {
6320 cond_ptr
6321 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6322 OMP_CLAUSE_DECL (c2) = cond_ptr;
6323 }
6324 else
6325 {
6326 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6327 DECL_CONTEXT (cond_ptr) = current_function_decl;
6328 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6329 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6330 ctx->block_vars = cond_ptr;
6331 c2 = build_omp_clause (UNKNOWN_LOCATION,
6332 OMP_CLAUSE__CONDTEMP_);
6333 OMP_CLAUSE_DECL (c2) = cond_ptr;
6334 OMP_CLAUSE_CHAIN (c2) = *clauses;
6335 *clauses = c2;
6336 }
6337 iter_var = create_tmp_var_raw (iter_type);
6338 DECL_CONTEXT (iter_var) = current_function_decl;
6339 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6340 DECL_CHAIN (iter_var) = ctx->block_vars;
6341 ctx->block_vars = iter_var;
6342 tree c3
6343 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6344 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6345 OMP_CLAUSE_DECL (c3) = iter_var;
6346 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6347 OMP_CLAUSE_CHAIN (c2) = c3;
6348 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6349 }
6350 tree v = create_tmp_var_raw (iter_type);
6351 DECL_CONTEXT (v) = current_function_decl;
6352 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6353 DECL_CHAIN (v) = ctx->block_vars;
6354 ctx->block_vars = v;
6355 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6356 ctx->lastprivate_conditional_map->put (o, v);
6357 }
6358 }
6359
6360
6361 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6362 both parallel and workshare constructs. PREDICATE may be NULL if it's
6363 always true. BODY_P is the sequence to insert early initialization
6364 if needed, STMT_LIST is where the non-conditional lastprivate handling
6365 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6366 section. */
6367
6368 static void
6369 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6370 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6371 omp_context *ctx)
6372 {
6373 tree x, c, label = NULL, orig_clauses = clauses;
6374 bool par_clauses = false;
6375 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6376 unsigned HOST_WIDE_INT conditional_off = 0;
6377 gimple_seq post_stmt_list = NULL;
6378
6379 /* Early exit if there are no lastprivate or linear clauses. */
6380 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6381 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6382 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6383 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6384 break;
6385 if (clauses == NULL)
6386 {
6387 /* If this was a workshare clause, see if it had been combined
6388 with its parallel. In that case, look for the clauses on the
6389 parallel statement itself. */
6390 if (is_parallel_ctx (ctx))
6391 return;
6392
6393 ctx = ctx->outer;
6394 if (ctx == NULL || !is_parallel_ctx (ctx))
6395 return;
6396
6397 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6398 OMP_CLAUSE_LASTPRIVATE);
6399 if (clauses == NULL)
6400 return;
6401 par_clauses = true;
6402 }
6403
6404 bool maybe_simt = false;
6405 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6406 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6407 {
6408 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6409 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6410 if (simduid)
6411 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6412 }
6413
6414 if (predicate)
6415 {
6416 gcond *stmt;
6417 tree label_true, arm1, arm2;
6418 enum tree_code pred_code = TREE_CODE (predicate);
6419
6420 label = create_artificial_label (UNKNOWN_LOCATION);
6421 label_true = create_artificial_label (UNKNOWN_LOCATION);
6422 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6423 {
6424 arm1 = TREE_OPERAND (predicate, 0);
6425 arm2 = TREE_OPERAND (predicate, 1);
6426 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6427 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6428 }
6429 else
6430 {
6431 arm1 = predicate;
6432 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6433 arm2 = boolean_false_node;
6434 pred_code = NE_EXPR;
6435 }
6436 if (maybe_simt)
6437 {
6438 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6439 c = fold_convert (integer_type_node, c);
6440 simtcond = create_tmp_var (integer_type_node);
6441 gimplify_assign (simtcond, c, stmt_list);
6442 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6443 1, simtcond);
6444 c = create_tmp_var (integer_type_node);
6445 gimple_call_set_lhs (g, c);
6446 gimple_seq_add_stmt (stmt_list, g);
6447 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6448 label_true, label);
6449 }
6450 else
6451 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6452 gimple_seq_add_stmt (stmt_list, stmt);
6453 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6454 }
6455
6456 tree cond_ptr = NULL_TREE;
6457 for (c = clauses; c ;)
6458 {
6459 tree var, new_var;
6460 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6461 gimple_seq *this_stmt_list = stmt_list;
6462 tree lab2 = NULL_TREE;
6463
6464 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6465 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6466 && ctx->lastprivate_conditional_map
6467 && !ctx->combined_into_simd_safelen1)
6468 {
6469 gcc_assert (body_p);
6470 if (simduid)
6471 goto next;
6472 if (cond_ptr == NULL_TREE)
6473 {
6474 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6475 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6476 }
6477 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6478 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6479 tree v = *ctx->lastprivate_conditional_map->get (o);
6480 gimplify_assign (v, build_zero_cst (type), body_p);
6481 this_stmt_list = cstmt_list;
6482 tree mem;
6483 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6484 {
6485 mem = build2 (MEM_REF, type, cond_ptr,
6486 build_int_cst (TREE_TYPE (cond_ptr),
6487 conditional_off));
6488 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6489 }
6490 else
6491 mem = build4 (ARRAY_REF, type, cond_ptr,
6492 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6493 tree mem2 = copy_node (mem);
6494 gimple_seq seq = NULL;
6495 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6496 gimple_seq_add_seq (this_stmt_list, seq);
6497 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6498 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6499 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6500 gimple_seq_add_stmt (this_stmt_list, g);
6501 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6502 gimplify_assign (mem2, v, this_stmt_list);
6503 }
6504 else if (predicate
6505 && ctx->combined_into_simd_safelen1
6506 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6507 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6508 && ctx->lastprivate_conditional_map)
6509 this_stmt_list = &post_stmt_list;
6510
6511 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6512 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6513 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6514 {
6515 var = OMP_CLAUSE_DECL (c);
6516 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6517 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6518 && is_taskloop_ctx (ctx))
6519 {
6520 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6521 new_var = lookup_decl (var, ctx->outer);
6522 }
6523 else
6524 {
6525 new_var = lookup_decl (var, ctx);
6526 /* Avoid uninitialized warnings for lastprivate and
6527 for linear iterators. */
6528 if (predicate
6529 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6530 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6531 TREE_NO_WARNING (new_var) = 1;
6532 }
6533
6534 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6535 {
6536 tree val = DECL_VALUE_EXPR (new_var);
6537 if (TREE_CODE (val) == ARRAY_REF
6538 && VAR_P (TREE_OPERAND (val, 0))
6539 && lookup_attribute ("omp simd array",
6540 DECL_ATTRIBUTES (TREE_OPERAND (val,
6541 0))))
6542 {
6543 if (lastlane == NULL)
6544 {
6545 lastlane = create_tmp_var (unsigned_type_node);
6546 gcall *g
6547 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6548 2, simduid,
6549 TREE_OPERAND (val, 1));
6550 gimple_call_set_lhs (g, lastlane);
6551 gimple_seq_add_stmt (this_stmt_list, g);
6552 }
6553 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6554 TREE_OPERAND (val, 0), lastlane,
6555 NULL_TREE, NULL_TREE);
6556 TREE_THIS_NOTRAP (new_var) = 1;
6557 }
6558 }
6559 else if (maybe_simt)
6560 {
6561 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6562 ? DECL_VALUE_EXPR (new_var)
6563 : new_var);
6564 if (simtlast == NULL)
6565 {
6566 simtlast = create_tmp_var (unsigned_type_node);
6567 gcall *g = gimple_build_call_internal
6568 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6569 gimple_call_set_lhs (g, simtlast);
6570 gimple_seq_add_stmt (this_stmt_list, g);
6571 }
6572 x = build_call_expr_internal_loc
6573 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6574 TREE_TYPE (val), 2, val, simtlast);
6575 new_var = unshare_expr (new_var);
6576 gimplify_assign (new_var, x, this_stmt_list);
6577 new_var = unshare_expr (new_var);
6578 }
6579
6580 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6581 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6582 {
6583 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6584 gimple_seq_add_seq (this_stmt_list,
6585 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6586 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6587 }
6588 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6589 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6590 {
6591 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6592 gimple_seq_add_seq (this_stmt_list,
6593 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6594 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6595 }
6596
6597 x = NULL_TREE;
6598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6599 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6600 && is_taskloop_ctx (ctx))
6601 {
6602 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6603 ctx->outer->outer);
6604 if (is_global_var (ovar))
6605 x = ovar;
6606 }
6607 if (!x)
6608 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6609 if (omp_is_reference (var))
6610 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6611 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6612 gimplify_and_add (x, this_stmt_list);
6613
6614 if (lab2)
6615 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6616 }
6617
6618 next:
6619 c = OMP_CLAUSE_CHAIN (c);
6620 if (c == NULL && !par_clauses)
6621 {
6622 /* If this was a workshare clause, see if it had been combined
6623 with its parallel. In that case, continue looking for the
6624 clauses also on the parallel statement itself. */
6625 if (is_parallel_ctx (ctx))
6626 break;
6627
6628 ctx = ctx->outer;
6629 if (ctx == NULL || !is_parallel_ctx (ctx))
6630 break;
6631
6632 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6633 OMP_CLAUSE_LASTPRIVATE);
6634 par_clauses = true;
6635 }
6636 }
6637
6638 if (label)
6639 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6640 gimple_seq_add_seq (stmt_list, post_stmt_list);
6641 }
6642
6643 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6644 (which might be a placeholder). INNER is true if this is an inner
6645 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6646 join markers. Generate the before-loop forking sequence in
6647 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6648 general form of these sequences is
6649
6650 GOACC_REDUCTION_SETUP
6651 GOACC_FORK
6652 GOACC_REDUCTION_INIT
6653 ...
6654 GOACC_REDUCTION_FINI
6655 GOACC_JOIN
6656 GOACC_REDUCTION_TEARDOWN. */
6657
6658 static void
6659 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6660 gcall *fork, gcall *join, gimple_seq *fork_seq,
6661 gimple_seq *join_seq, omp_context *ctx)
6662 {
6663 gimple_seq before_fork = NULL;
6664 gimple_seq after_fork = NULL;
6665 gimple_seq before_join = NULL;
6666 gimple_seq after_join = NULL;
6667 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6668 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6669 unsigned offset = 0;
6670
6671 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6672 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6673 {
6674 tree orig = OMP_CLAUSE_DECL (c);
6675 tree var = maybe_lookup_decl (orig, ctx);
6676 tree ref_to_res = NULL_TREE;
6677 tree incoming, outgoing, v1, v2, v3;
6678 bool is_private = false;
6679
6680 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6681 if (rcode == MINUS_EXPR)
6682 rcode = PLUS_EXPR;
6683 else if (rcode == TRUTH_ANDIF_EXPR)
6684 rcode = BIT_AND_EXPR;
6685 else if (rcode == TRUTH_ORIF_EXPR)
6686 rcode = BIT_IOR_EXPR;
6687 tree op = build_int_cst (unsigned_type_node, rcode);
6688
6689 if (!var)
6690 var = orig;
6691
6692 incoming = outgoing = var;
6693
6694 if (!inner)
6695 {
6696 /* See if an outer construct also reduces this variable. */
6697 omp_context *outer = ctx;
6698
6699 while (omp_context *probe = outer->outer)
6700 {
6701 enum gimple_code type = gimple_code (probe->stmt);
6702 tree cls;
6703
6704 switch (type)
6705 {
6706 case GIMPLE_OMP_FOR:
6707 cls = gimple_omp_for_clauses (probe->stmt);
6708 break;
6709
6710 case GIMPLE_OMP_TARGET:
6711 if ((gimple_omp_target_kind (probe->stmt)
6712 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6713 && (gimple_omp_target_kind (probe->stmt)
6714 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6715 goto do_lookup;
6716
6717 cls = gimple_omp_target_clauses (probe->stmt);
6718 break;
6719
6720 default:
6721 goto do_lookup;
6722 }
6723
6724 outer = probe;
6725 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6726 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6727 && orig == OMP_CLAUSE_DECL (cls))
6728 {
6729 incoming = outgoing = lookup_decl (orig, probe);
6730 goto has_outer_reduction;
6731 }
6732 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6733 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6734 && orig == OMP_CLAUSE_DECL (cls))
6735 {
6736 is_private = true;
6737 goto do_lookup;
6738 }
6739 }
6740
6741 do_lookup:
6742 /* This is the outermost construct with this reduction,
6743 see if there's a mapping for it. */
6744 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6745 && maybe_lookup_field (orig, outer) && !is_private)
6746 {
6747 ref_to_res = build_receiver_ref (orig, false, outer);
6748 if (omp_is_reference (orig))
6749 ref_to_res = build_simple_mem_ref (ref_to_res);
6750
6751 tree type = TREE_TYPE (var);
6752 if (POINTER_TYPE_P (type))
6753 type = TREE_TYPE (type);
6754
6755 outgoing = var;
6756 incoming = omp_reduction_init_op (loc, rcode, type);
6757 }
6758 else
6759 {
6760 /* Try to look at enclosing contexts for reduction var,
6761 use original if no mapping found. */
6762 tree t = NULL_TREE;
6763 omp_context *c = ctx->outer;
6764 while (c && !t)
6765 {
6766 t = maybe_lookup_decl (orig, c);
6767 c = c->outer;
6768 }
6769 incoming = outgoing = (t ? t : orig);
6770 }
6771
6772 has_outer_reduction:;
6773 }
6774
6775 if (!ref_to_res)
6776 ref_to_res = integer_zero_node;
6777
6778 if (omp_is_reference (orig))
6779 {
6780 tree type = TREE_TYPE (var);
6781 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6782
6783 if (!inner)
6784 {
6785 tree x = create_tmp_var (TREE_TYPE (type), id);
6786 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6787 }
6788
6789 v1 = create_tmp_var (type, id);
6790 v2 = create_tmp_var (type, id);
6791 v3 = create_tmp_var (type, id);
6792
6793 gimplify_assign (v1, var, fork_seq);
6794 gimplify_assign (v2, var, fork_seq);
6795 gimplify_assign (v3, var, fork_seq);
6796
6797 var = build_simple_mem_ref (var);
6798 v1 = build_simple_mem_ref (v1);
6799 v2 = build_simple_mem_ref (v2);
6800 v3 = build_simple_mem_ref (v3);
6801 outgoing = build_simple_mem_ref (outgoing);
6802
6803 if (!TREE_CONSTANT (incoming))
6804 incoming = build_simple_mem_ref (incoming);
6805 }
6806 else
6807 v1 = v2 = v3 = var;
6808
6809 /* Determine position in reduction buffer, which may be used
6810 by target. The parser has ensured that this is not a
6811 variable-sized type. */
6812 fixed_size_mode mode
6813 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6814 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6815 offset = (offset + align - 1) & ~(align - 1);
6816 tree off = build_int_cst (sizetype, offset);
6817 offset += GET_MODE_SIZE (mode);
6818
6819 if (!init_code)
6820 {
6821 init_code = build_int_cst (integer_type_node,
6822 IFN_GOACC_REDUCTION_INIT);
6823 fini_code = build_int_cst (integer_type_node,
6824 IFN_GOACC_REDUCTION_FINI);
6825 setup_code = build_int_cst (integer_type_node,
6826 IFN_GOACC_REDUCTION_SETUP);
6827 teardown_code = build_int_cst (integer_type_node,
6828 IFN_GOACC_REDUCTION_TEARDOWN);
6829 }
6830
6831 tree setup_call
6832 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6833 TREE_TYPE (var), 6, setup_code,
6834 unshare_expr (ref_to_res),
6835 incoming, level, op, off);
6836 tree init_call
6837 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6838 TREE_TYPE (var), 6, init_code,
6839 unshare_expr (ref_to_res),
6840 v1, level, op, off);
6841 tree fini_call
6842 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6843 TREE_TYPE (var), 6, fini_code,
6844 unshare_expr (ref_to_res),
6845 v2, level, op, off);
6846 tree teardown_call
6847 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6848 TREE_TYPE (var), 6, teardown_code,
6849 ref_to_res, v3, level, op, off);
6850
6851 gimplify_assign (v1, setup_call, &before_fork);
6852 gimplify_assign (v2, init_call, &after_fork);
6853 gimplify_assign (v3, fini_call, &before_join);
6854 gimplify_assign (outgoing, teardown_call, &after_join);
6855 }
6856
6857 /* Now stitch things together. */
6858 gimple_seq_add_seq (fork_seq, before_fork);
6859 if (fork)
6860 gimple_seq_add_stmt (fork_seq, fork);
6861 gimple_seq_add_seq (fork_seq, after_fork);
6862
6863 gimple_seq_add_seq (join_seq, before_join);
6864 if (join)
6865 gimple_seq_add_stmt (join_seq, join);
6866 gimple_seq_add_seq (join_seq, after_join);
6867 }
6868
6869 /* Generate code to implement the REDUCTION clauses, append it
6870 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6871 that should be emitted also inside of the critical section,
6872 in that case clear *CLIST afterwards, otherwise leave it as is
6873 and let the caller emit it itself. */
6874
6875 static void
6876 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6877 gimple_seq *clist, omp_context *ctx)
6878 {
6879 gimple_seq sub_seq = NULL;
6880 gimple *stmt;
6881 tree x, c;
6882 int count = 0;
6883
6884 /* OpenACC loop reductions are handled elsewhere. */
6885 if (is_gimple_omp_oacc (ctx->stmt))
6886 return;
6887
6888 /* SIMD reductions are handled in lower_rec_input_clauses. */
6889 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6890 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6891 return;
6892
6893 /* inscan reductions are handled elsewhere. */
6894 if (ctx->scan_inclusive || ctx->scan_exclusive)
6895 return;
6896
6897 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6898 update in that case, otherwise use a lock. */
6899 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6900 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6901 && !OMP_CLAUSE_REDUCTION_TASK (c))
6902 {
6903 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6904 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6905 {
6906 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6907 count = -1;
6908 break;
6909 }
6910 count++;
6911 }
6912
6913 if (count == 0)
6914 return;
6915
6916 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6917 {
6918 tree var, ref, new_var, orig_var;
6919 enum tree_code code;
6920 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6921
6922 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6923 || OMP_CLAUSE_REDUCTION_TASK (c))
6924 continue;
6925
6926 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6927 orig_var = var = OMP_CLAUSE_DECL (c);
6928 if (TREE_CODE (var) == MEM_REF)
6929 {
6930 var = TREE_OPERAND (var, 0);
6931 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6932 var = TREE_OPERAND (var, 0);
6933 if (TREE_CODE (var) == ADDR_EXPR)
6934 var = TREE_OPERAND (var, 0);
6935 else
6936 {
6937 /* If this is a pointer or referenced based array
6938 section, the var could be private in the outer
6939 context e.g. on orphaned loop construct. Pretend this
6940 is private variable's outer reference. */
6941 ccode = OMP_CLAUSE_PRIVATE;
6942 if (TREE_CODE (var) == INDIRECT_REF)
6943 var = TREE_OPERAND (var, 0);
6944 }
6945 orig_var = var;
6946 if (is_variable_sized (var))
6947 {
6948 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6949 var = DECL_VALUE_EXPR (var);
6950 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6951 var = TREE_OPERAND (var, 0);
6952 gcc_assert (DECL_P (var));
6953 }
6954 }
6955 new_var = lookup_decl (var, ctx);
6956 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6957 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6958 ref = build_outer_var_ref (var, ctx, ccode);
6959 code = OMP_CLAUSE_REDUCTION_CODE (c);
6960
6961 /* reduction(-:var) sums up the partial results, so it acts
6962 identically to reduction(+:var). */
6963 if (code == MINUS_EXPR)
6964 code = PLUS_EXPR;
6965
6966 if (count == 1)
6967 {
6968 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6969
6970 addr = save_expr (addr);
6971 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6972 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6973 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6974 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6975 gimplify_and_add (x, stmt_seqp);
6976 return;
6977 }
6978 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6979 {
6980 tree d = OMP_CLAUSE_DECL (c);
6981 tree type = TREE_TYPE (d);
6982 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6983 tree i = create_tmp_var (TREE_TYPE (v));
6984 tree ptype = build_pointer_type (TREE_TYPE (type));
6985 tree bias = TREE_OPERAND (d, 1);
6986 d = TREE_OPERAND (d, 0);
6987 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6988 {
6989 tree b = TREE_OPERAND (d, 1);
6990 b = maybe_lookup_decl (b, ctx);
6991 if (b == NULL)
6992 {
6993 b = TREE_OPERAND (d, 1);
6994 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6995 }
6996 if (integer_zerop (bias))
6997 bias = b;
6998 else
6999 {
7000 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7001 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7002 TREE_TYPE (b), b, bias);
7003 }
7004 d = TREE_OPERAND (d, 0);
7005 }
7006 /* For ref build_outer_var_ref already performs this, so
7007 only new_var needs a dereference. */
7008 if (TREE_CODE (d) == INDIRECT_REF)
7009 {
7010 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7011 gcc_assert (omp_is_reference (var) && var == orig_var);
7012 }
7013 else if (TREE_CODE (d) == ADDR_EXPR)
7014 {
7015 if (orig_var == var)
7016 {
7017 new_var = build_fold_addr_expr (new_var);
7018 ref = build_fold_addr_expr (ref);
7019 }
7020 }
7021 else
7022 {
7023 gcc_assert (orig_var == var);
7024 if (omp_is_reference (var))
7025 ref = build_fold_addr_expr (ref);
7026 }
7027 if (DECL_P (v))
7028 {
7029 tree t = maybe_lookup_decl (v, ctx);
7030 if (t)
7031 v = t;
7032 else
7033 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7034 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7035 }
7036 if (!integer_zerop (bias))
7037 {
7038 bias = fold_convert_loc (clause_loc, sizetype, bias);
7039 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7040 TREE_TYPE (new_var), new_var,
7041 unshare_expr (bias));
7042 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7043 TREE_TYPE (ref), ref, bias);
7044 }
7045 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7046 ref = fold_convert_loc (clause_loc, ptype, ref);
7047 tree m = create_tmp_var (ptype);
7048 gimplify_assign (m, new_var, stmt_seqp);
7049 new_var = m;
7050 m = create_tmp_var (ptype);
7051 gimplify_assign (m, ref, stmt_seqp);
7052 ref = m;
7053 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7054 tree body = create_artificial_label (UNKNOWN_LOCATION);
7055 tree end = create_artificial_label (UNKNOWN_LOCATION);
7056 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7057 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7058 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7059 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7060 {
7061 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7062 tree decl_placeholder
7063 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7064 SET_DECL_VALUE_EXPR (placeholder, out);
7065 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7066 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7067 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7068 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7069 gimple_seq_add_seq (&sub_seq,
7070 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7071 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7072 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7073 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7074 }
7075 else
7076 {
7077 x = build2 (code, TREE_TYPE (out), out, priv);
7078 out = unshare_expr (out);
7079 gimplify_assign (out, x, &sub_seq);
7080 }
7081 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7082 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7083 gimple_seq_add_stmt (&sub_seq, g);
7084 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7085 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7086 gimple_seq_add_stmt (&sub_seq, g);
7087 g = gimple_build_assign (i, PLUS_EXPR, i,
7088 build_int_cst (TREE_TYPE (i), 1));
7089 gimple_seq_add_stmt (&sub_seq, g);
7090 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7091 gimple_seq_add_stmt (&sub_seq, g);
7092 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7093 }
7094 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7095 {
7096 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7097
7098 if (omp_is_reference (var)
7099 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7100 TREE_TYPE (ref)))
7101 ref = build_fold_addr_expr_loc (clause_loc, ref);
7102 SET_DECL_VALUE_EXPR (placeholder, ref);
7103 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7104 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7105 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7106 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7107 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7108 }
7109 else
7110 {
7111 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7112 ref = build_outer_var_ref (var, ctx);
7113 gimplify_assign (ref, x, &sub_seq);
7114 }
7115 }
7116
7117 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7118 0);
7119 gimple_seq_add_stmt (stmt_seqp, stmt);
7120
7121 gimple_seq_add_seq (stmt_seqp, sub_seq);
7122
7123 if (clist)
7124 {
7125 gimple_seq_add_seq (stmt_seqp, *clist);
7126 *clist = NULL;
7127 }
7128
7129 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7130 0);
7131 gimple_seq_add_stmt (stmt_seqp, stmt);
7132 }
7133
7134
7135 /* Generate code to implement the COPYPRIVATE clauses. */
7136
7137 static void
7138 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7139 omp_context *ctx)
7140 {
7141 tree c;
7142
7143 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7144 {
7145 tree var, new_var, ref, x;
7146 bool by_ref;
7147 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7148
7149 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7150 continue;
7151
7152 var = OMP_CLAUSE_DECL (c);
7153 by_ref = use_pointer_for_field (var, NULL);
7154
7155 ref = build_sender_ref (var, ctx);
7156 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7157 if (by_ref)
7158 {
7159 x = build_fold_addr_expr_loc (clause_loc, new_var);
7160 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7161 }
7162 gimplify_assign (ref, x, slist);
7163
7164 ref = build_receiver_ref (var, false, ctx);
7165 if (by_ref)
7166 {
7167 ref = fold_convert_loc (clause_loc,
7168 build_pointer_type (TREE_TYPE (new_var)),
7169 ref);
7170 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7171 }
7172 if (omp_is_reference (var))
7173 {
7174 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7175 ref = build_simple_mem_ref_loc (clause_loc, ref);
7176 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7177 }
7178 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7179 gimplify_and_add (x, rlist);
7180 }
7181 }
7182
7183
7184 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7185 and REDUCTION from the sender (aka parent) side. */
7186
7187 static void
7188 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7189 omp_context *ctx)
7190 {
7191 tree c, t;
7192 int ignored_looptemp = 0;
7193 bool is_taskloop = false;
7194
7195 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7196 by GOMP_taskloop. */
7197 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7198 {
7199 ignored_looptemp = 2;
7200 is_taskloop = true;
7201 }
7202
7203 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7204 {
7205 tree val, ref, x, var;
7206 bool by_ref, do_in = false, do_out = false;
7207 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7208
7209 switch (OMP_CLAUSE_CODE (c))
7210 {
7211 case OMP_CLAUSE_PRIVATE:
7212 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7213 break;
7214 continue;
7215 case OMP_CLAUSE_FIRSTPRIVATE:
7216 case OMP_CLAUSE_COPYIN:
7217 case OMP_CLAUSE_LASTPRIVATE:
7218 case OMP_CLAUSE_IN_REDUCTION:
7219 case OMP_CLAUSE__REDUCTEMP_:
7220 break;
7221 case OMP_CLAUSE_REDUCTION:
7222 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7223 continue;
7224 break;
7225 case OMP_CLAUSE_SHARED:
7226 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7227 break;
7228 continue;
7229 case OMP_CLAUSE__LOOPTEMP_:
7230 if (ignored_looptemp)
7231 {
7232 ignored_looptemp--;
7233 continue;
7234 }
7235 break;
7236 default:
7237 continue;
7238 }
7239
7240 val = OMP_CLAUSE_DECL (c);
7241 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7242 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7243 && TREE_CODE (val) == MEM_REF)
7244 {
7245 val = TREE_OPERAND (val, 0);
7246 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7247 val = TREE_OPERAND (val, 0);
7248 if (TREE_CODE (val) == INDIRECT_REF
7249 || TREE_CODE (val) == ADDR_EXPR)
7250 val = TREE_OPERAND (val, 0);
7251 if (is_variable_sized (val))
7252 continue;
7253 }
7254
7255 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7256 outer taskloop region. */
7257 omp_context *ctx_for_o = ctx;
7258 if (is_taskloop
7259 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7260 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7261 ctx_for_o = ctx->outer;
7262
7263 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7264
7265 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7266 && is_global_var (var)
7267 && (val == OMP_CLAUSE_DECL (c)
7268 || !is_task_ctx (ctx)
7269 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7270 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7271 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7272 != POINTER_TYPE)))))
7273 continue;
7274
7275 t = omp_member_access_dummy_var (var);
7276 if (t)
7277 {
7278 var = DECL_VALUE_EXPR (var);
7279 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7280 if (o != t)
7281 var = unshare_and_remap (var, t, o);
7282 else
7283 var = unshare_expr (var);
7284 }
7285
7286 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7287 {
7288 /* Handle taskloop firstprivate/lastprivate, where the
7289 lastprivate on GIMPLE_OMP_TASK is represented as
7290 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7291 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7292 x = omp_build_component_ref (ctx->sender_decl, f);
7293 if (use_pointer_for_field (val, ctx))
7294 var = build_fold_addr_expr (var);
7295 gimplify_assign (x, var, ilist);
7296 DECL_ABSTRACT_ORIGIN (f) = NULL;
7297 continue;
7298 }
7299
7300 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7301 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7302 || val == OMP_CLAUSE_DECL (c))
7303 && is_variable_sized (val))
7304 continue;
7305 by_ref = use_pointer_for_field (val, NULL);
7306
7307 switch (OMP_CLAUSE_CODE (c))
7308 {
7309 case OMP_CLAUSE_FIRSTPRIVATE:
7310 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7311 && !by_ref
7312 && is_task_ctx (ctx))
7313 TREE_NO_WARNING (var) = 1;
7314 do_in = true;
7315 break;
7316
7317 case OMP_CLAUSE_PRIVATE:
7318 case OMP_CLAUSE_COPYIN:
7319 case OMP_CLAUSE__LOOPTEMP_:
7320 case OMP_CLAUSE__REDUCTEMP_:
7321 do_in = true;
7322 break;
7323
7324 case OMP_CLAUSE_LASTPRIVATE:
7325 if (by_ref || omp_is_reference (val))
7326 {
7327 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7328 continue;
7329 do_in = true;
7330 }
7331 else
7332 {
7333 do_out = true;
7334 if (lang_hooks.decls.omp_private_outer_ref (val))
7335 do_in = true;
7336 }
7337 break;
7338
7339 case OMP_CLAUSE_REDUCTION:
7340 case OMP_CLAUSE_IN_REDUCTION:
7341 do_in = true;
7342 if (val == OMP_CLAUSE_DECL (c))
7343 {
7344 if (is_task_ctx (ctx))
7345 by_ref = use_pointer_for_field (val, ctx);
7346 else
7347 do_out = !(by_ref || omp_is_reference (val));
7348 }
7349 else
7350 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7351 break;
7352
7353 default:
7354 gcc_unreachable ();
7355 }
7356
7357 if (do_in)
7358 {
7359 ref = build_sender_ref (val, ctx);
7360 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7361 gimplify_assign (ref, x, ilist);
7362 if (is_task_ctx (ctx))
7363 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7364 }
7365
7366 if (do_out)
7367 {
7368 ref = build_sender_ref (val, ctx);
7369 gimplify_assign (var, ref, olist);
7370 }
7371 }
7372 }
7373
7374 /* Generate code to implement SHARED from the sender (aka parent)
7375 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7376 list things that got automatically shared. */
7377
7378 static void
7379 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7380 {
7381 tree var, ovar, nvar, t, f, x, record_type;
7382
7383 if (ctx->record_type == NULL)
7384 return;
7385
7386 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7387 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7388 {
7389 ovar = DECL_ABSTRACT_ORIGIN (f);
7390 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7391 continue;
7392
7393 nvar = maybe_lookup_decl (ovar, ctx);
7394 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7395 continue;
7396
7397 /* If CTX is a nested parallel directive. Find the immediately
7398 enclosing parallel or workshare construct that contains a
7399 mapping for OVAR. */
7400 var = lookup_decl_in_outer_ctx (ovar, ctx);
7401
7402 t = omp_member_access_dummy_var (var);
7403 if (t)
7404 {
7405 var = DECL_VALUE_EXPR (var);
7406 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7407 if (o != t)
7408 var = unshare_and_remap (var, t, o);
7409 else
7410 var = unshare_expr (var);
7411 }
7412
7413 if (use_pointer_for_field (ovar, ctx))
7414 {
7415 x = build_sender_ref (ovar, ctx);
7416 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7417 && TREE_TYPE (f) == TREE_TYPE (ovar))
7418 {
7419 gcc_assert (is_parallel_ctx (ctx)
7420 && DECL_ARTIFICIAL (ovar));
7421 /* _condtemp_ clause. */
7422 var = build_constructor (TREE_TYPE (x), NULL);
7423 }
7424 else
7425 var = build_fold_addr_expr (var);
7426 gimplify_assign (x, var, ilist);
7427 }
7428 else
7429 {
7430 x = build_sender_ref (ovar, ctx);
7431 gimplify_assign (x, var, ilist);
7432
7433 if (!TREE_READONLY (var)
7434 /* We don't need to receive a new reference to a result
7435 or parm decl. In fact we may not store to it as we will
7436 invalidate any pending RSO and generate wrong gimple
7437 during inlining. */
7438 && !((TREE_CODE (var) == RESULT_DECL
7439 || TREE_CODE (var) == PARM_DECL)
7440 && DECL_BY_REFERENCE (var)))
7441 {
7442 x = build_sender_ref (ovar, ctx);
7443 gimplify_assign (var, x, olist);
7444 }
7445 }
7446 }
7447 }
7448
7449 /* Emit an OpenACC head marker call, encapulating the partitioning and
7450 other information that must be processed by the target compiler.
7451 Return the maximum number of dimensions the associated loop might
7452 be partitioned over. */
7453
7454 static unsigned
7455 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7456 gimple_seq *seq, omp_context *ctx)
7457 {
7458 unsigned levels = 0;
7459 unsigned tag = 0;
7460 tree gang_static = NULL_TREE;
7461 auto_vec<tree, 5> args;
7462
7463 args.quick_push (build_int_cst
7464 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7465 args.quick_push (ddvar);
7466 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7467 {
7468 switch (OMP_CLAUSE_CODE (c))
7469 {
7470 case OMP_CLAUSE_GANG:
7471 tag |= OLF_DIM_GANG;
7472 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7473 /* static:* is represented by -1, and we can ignore it, as
7474 scheduling is always static. */
7475 if (gang_static && integer_minus_onep (gang_static))
7476 gang_static = NULL_TREE;
7477 levels++;
7478 break;
7479
7480 case OMP_CLAUSE_WORKER:
7481 tag |= OLF_DIM_WORKER;
7482 levels++;
7483 break;
7484
7485 case OMP_CLAUSE_VECTOR:
7486 tag |= OLF_DIM_VECTOR;
7487 levels++;
7488 break;
7489
7490 case OMP_CLAUSE_SEQ:
7491 tag |= OLF_SEQ;
7492 break;
7493
7494 case OMP_CLAUSE_AUTO:
7495 tag |= OLF_AUTO;
7496 break;
7497
7498 case OMP_CLAUSE_INDEPENDENT:
7499 tag |= OLF_INDEPENDENT;
7500 break;
7501
7502 case OMP_CLAUSE_TILE:
7503 tag |= OLF_TILE;
7504 break;
7505
7506 default:
7507 continue;
7508 }
7509 }
7510
7511 if (gang_static)
7512 {
7513 if (DECL_P (gang_static))
7514 gang_static = build_outer_var_ref (gang_static, ctx);
7515 tag |= OLF_GANG_STATIC;
7516 }
7517
7518 /* In a parallel region, loops are implicitly INDEPENDENT. */
7519 omp_context *tgt = enclosing_target_ctx (ctx);
7520 if (!tgt || is_oacc_parallel_or_serial (tgt))
7521 tag |= OLF_INDEPENDENT;
7522
7523 if (tag & OLF_TILE)
7524 /* Tiling could use all 3 levels. */
7525 levels = 3;
7526 else
7527 {
7528 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7529 Ensure at least one level, or 2 for possible auto
7530 partitioning */
7531 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7532 << OLF_DIM_BASE) | OLF_SEQ));
7533
7534 if (levels < 1u + maybe_auto)
7535 levels = 1u + maybe_auto;
7536 }
7537
7538 args.quick_push (build_int_cst (integer_type_node, levels));
7539 args.quick_push (build_int_cst (integer_type_node, tag));
7540 if (gang_static)
7541 args.quick_push (gang_static);
7542
7543 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7544 gimple_set_location (call, loc);
7545 gimple_set_lhs (call, ddvar);
7546 gimple_seq_add_stmt (seq, call);
7547
7548 return levels;
7549 }
7550
7551 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7552 partitioning level of the enclosed region. */
7553
7554 static void
7555 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7556 tree tofollow, gimple_seq *seq)
7557 {
7558 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7559 : IFN_UNIQUE_OACC_TAIL_MARK);
7560 tree marker = build_int_cst (integer_type_node, marker_kind);
7561 int nargs = 2 + (tofollow != NULL_TREE);
7562 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7563 marker, ddvar, tofollow);
7564 gimple_set_location (call, loc);
7565 gimple_set_lhs (call, ddvar);
7566 gimple_seq_add_stmt (seq, call);
7567 }
7568
7569 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7570 the loop clauses, from which we extract reductions. Initialize
7571 HEAD and TAIL. */
7572
7573 static void
7574 lower_oacc_head_tail (location_t loc, tree clauses,
7575 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7576 {
7577 bool inner = false;
7578 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7579 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7580
7581 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7582 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7583 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7584
7585 gcc_assert (count);
7586 for (unsigned done = 1; count; count--, done++)
7587 {
7588 gimple_seq fork_seq = NULL;
7589 gimple_seq join_seq = NULL;
7590
7591 tree place = build_int_cst (integer_type_node, -1);
7592 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7593 fork_kind, ddvar, place);
7594 gimple_set_location (fork, loc);
7595 gimple_set_lhs (fork, ddvar);
7596
7597 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7598 join_kind, ddvar, place);
7599 gimple_set_location (join, loc);
7600 gimple_set_lhs (join, ddvar);
7601
7602 /* Mark the beginning of this level sequence. */
7603 if (inner)
7604 lower_oacc_loop_marker (loc, ddvar, true,
7605 build_int_cst (integer_type_node, count),
7606 &fork_seq);
7607 lower_oacc_loop_marker (loc, ddvar, false,
7608 build_int_cst (integer_type_node, done),
7609 &join_seq);
7610
7611 lower_oacc_reductions (loc, clauses, place, inner,
7612 fork, join, &fork_seq, &join_seq, ctx);
7613
7614 /* Append this level to head. */
7615 gimple_seq_add_seq (head, fork_seq);
7616 /* Prepend it to tail. */
7617 gimple_seq_add_seq (&join_seq, *tail);
7618 *tail = join_seq;
7619
7620 inner = true;
7621 }
7622
7623 /* Mark the end of the sequence. */
7624 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7625 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7626 }
7627
7628 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7629 catch handler and return it. This prevents programs from violating the
7630 structured block semantics with throws. */
7631
7632 static gimple_seq
7633 maybe_catch_exception (gimple_seq body)
7634 {
7635 gimple *g;
7636 tree decl;
7637
7638 if (!flag_exceptions)
7639 return body;
7640
7641 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7642 decl = lang_hooks.eh_protect_cleanup_actions ();
7643 else
7644 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7645
7646 g = gimple_build_eh_must_not_throw (decl);
7647 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7648 GIMPLE_TRY_CATCH);
7649
7650 return gimple_seq_alloc_with_stmt (g);
7651 }
7652
7653 \f
7654 /* Routines to lower OMP directives into OMP-GIMPLE. */
7655
7656 /* If ctx is a worksharing context inside of a cancellable parallel
7657 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7658 and conditional branch to parallel's cancel_label to handle
7659 cancellation in the implicit barrier. */
7660
7661 static void
7662 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7663 gimple_seq *body)
7664 {
7665 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7666 if (gimple_omp_return_nowait_p (omp_return))
7667 return;
7668 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7669 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7670 && outer->cancellable)
7671 {
7672 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7673 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7674 tree lhs = create_tmp_var (c_bool_type);
7675 gimple_omp_return_set_lhs (omp_return, lhs);
7676 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7677 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7678 fold_convert (c_bool_type,
7679 boolean_false_node),
7680 outer->cancel_label, fallthru_label);
7681 gimple_seq_add_stmt (body, g);
7682 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7683 }
7684 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7685 return;
7686 }
7687
7688 /* Find the first task_reduction or reduction clause or return NULL
7689 if there are none. */
7690
7691 static inline tree
7692 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7693 enum omp_clause_code ccode)
7694 {
7695 while (1)
7696 {
7697 clauses = omp_find_clause (clauses, ccode);
7698 if (clauses == NULL_TREE)
7699 return NULL_TREE;
7700 if (ccode != OMP_CLAUSE_REDUCTION
7701 || code == OMP_TASKLOOP
7702 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7703 return clauses;
7704 clauses = OMP_CLAUSE_CHAIN (clauses);
7705 }
7706 }
7707
7708 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7709 gimple_seq *, gimple_seq *);
7710
7711 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7712 CTX is the enclosing OMP context for the current statement. */
7713
7714 static void
7715 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7716 {
7717 tree block, control;
7718 gimple_stmt_iterator tgsi;
7719 gomp_sections *stmt;
7720 gimple *t;
7721 gbind *new_stmt, *bind;
7722 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7723
7724 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7725
7726 push_gimplify_context ();
7727
7728 dlist = NULL;
7729 ilist = NULL;
7730
7731 tree rclauses
7732 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7733 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7734 tree rtmp = NULL_TREE;
7735 if (rclauses)
7736 {
7737 tree type = build_pointer_type (pointer_sized_int_node);
7738 tree temp = create_tmp_var (type);
7739 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7740 OMP_CLAUSE_DECL (c) = temp;
7741 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7742 gimple_omp_sections_set_clauses (stmt, c);
7743 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7744 gimple_omp_sections_clauses (stmt),
7745 &ilist, &tred_dlist);
7746 rclauses = c;
7747 rtmp = make_ssa_name (type);
7748 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7749 }
7750
7751 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7752 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7753
7754 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7755 &ilist, &dlist, ctx, NULL);
7756
7757 control = create_tmp_var (unsigned_type_node, ".section");
7758 gimple_omp_sections_set_control (stmt, control);
7759
7760 new_body = gimple_omp_body (stmt);
7761 gimple_omp_set_body (stmt, NULL);
7762 tgsi = gsi_start (new_body);
7763 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7764 {
7765 omp_context *sctx;
7766 gimple *sec_start;
7767
7768 sec_start = gsi_stmt (tgsi);
7769 sctx = maybe_lookup_ctx (sec_start);
7770 gcc_assert (sctx);
7771
7772 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7773 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7774 GSI_CONTINUE_LINKING);
7775 gimple_omp_set_body (sec_start, NULL);
7776
7777 if (gsi_one_before_end_p (tgsi))
7778 {
7779 gimple_seq l = NULL;
7780 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7781 &ilist, &l, &clist, ctx);
7782 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7783 gimple_omp_section_set_last (sec_start);
7784 }
7785
7786 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7787 GSI_CONTINUE_LINKING);
7788 }
7789
7790 block = make_node (BLOCK);
7791 bind = gimple_build_bind (NULL, new_body, block);
7792
7793 olist = NULL;
7794 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7795 &clist, ctx);
7796 if (clist)
7797 {
7798 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7799 gcall *g = gimple_build_call (fndecl, 0);
7800 gimple_seq_add_stmt (&olist, g);
7801 gimple_seq_add_seq (&olist, clist);
7802 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7803 g = gimple_build_call (fndecl, 0);
7804 gimple_seq_add_stmt (&olist, g);
7805 }
7806
7807 block = make_node (BLOCK);
7808 new_stmt = gimple_build_bind (NULL, NULL, block);
7809 gsi_replace (gsi_p, new_stmt, true);
7810
7811 pop_gimplify_context (new_stmt);
7812 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7813 BLOCK_VARS (block) = gimple_bind_vars (bind);
7814 if (BLOCK_VARS (block))
7815 TREE_USED (block) = 1;
7816
7817 new_body = NULL;
7818 gimple_seq_add_seq (&new_body, ilist);
7819 gimple_seq_add_stmt (&new_body, stmt);
7820 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7821 gimple_seq_add_stmt (&new_body, bind);
7822
7823 t = gimple_build_omp_continue (control, control);
7824 gimple_seq_add_stmt (&new_body, t);
7825
7826 gimple_seq_add_seq (&new_body, olist);
7827 if (ctx->cancellable)
7828 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7829 gimple_seq_add_seq (&new_body, dlist);
7830
7831 new_body = maybe_catch_exception (new_body);
7832
7833 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7834 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7835 t = gimple_build_omp_return (nowait);
7836 gimple_seq_add_stmt (&new_body, t);
7837 gimple_seq_add_seq (&new_body, tred_dlist);
7838 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7839
7840 if (rclauses)
7841 OMP_CLAUSE_DECL (rclauses) = rtmp;
7842
7843 gimple_bind_set_body (new_stmt, new_body);
7844 }
7845
7846
7847 /* A subroutine of lower_omp_single. Expand the simple form of
7848 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7849
7850 if (GOMP_single_start ())
7851 BODY;
7852 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7853
7854 FIXME. It may be better to delay expanding the logic of this until
7855 pass_expand_omp. The expanded logic may make the job more difficult
7856 to a synchronization analysis pass. */
7857
7858 static void
7859 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7860 {
7861 location_t loc = gimple_location (single_stmt);
7862 tree tlabel = create_artificial_label (loc);
7863 tree flabel = create_artificial_label (loc);
7864 gimple *call, *cond;
7865 tree lhs, decl;
7866
7867 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7868 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7869 call = gimple_build_call (decl, 0);
7870 gimple_call_set_lhs (call, lhs);
7871 gimple_seq_add_stmt (pre_p, call);
7872
7873 cond = gimple_build_cond (EQ_EXPR, lhs,
7874 fold_convert_loc (loc, TREE_TYPE (lhs),
7875 boolean_true_node),
7876 tlabel, flabel);
7877 gimple_seq_add_stmt (pre_p, cond);
7878 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7879 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7880 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7881 }
7882
7883
7884 /* A subroutine of lower_omp_single. Expand the simple form of
7885 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7886
7887 #pragma omp single copyprivate (a, b, c)
7888
7889 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7890
7891 {
7892 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7893 {
7894 BODY;
7895 copyout.a = a;
7896 copyout.b = b;
7897 copyout.c = c;
7898 GOMP_single_copy_end (&copyout);
7899 }
7900 else
7901 {
7902 a = copyout_p->a;
7903 b = copyout_p->b;
7904 c = copyout_p->c;
7905 }
7906 GOMP_barrier ();
7907 }
7908
7909 FIXME. It may be better to delay expanding the logic of this until
7910 pass_expand_omp. The expanded logic may make the job more difficult
7911 to a synchronization analysis pass. */
7912
7913 static void
7914 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7915 omp_context *ctx)
7916 {
7917 tree ptr_type, t, l0, l1, l2, bfn_decl;
7918 gimple_seq copyin_seq;
7919 location_t loc = gimple_location (single_stmt);
7920
7921 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7922
7923 ptr_type = build_pointer_type (ctx->record_type);
7924 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7925
7926 l0 = create_artificial_label (loc);
7927 l1 = create_artificial_label (loc);
7928 l2 = create_artificial_label (loc);
7929
7930 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7931 t = build_call_expr_loc (loc, bfn_decl, 0);
7932 t = fold_convert_loc (loc, ptr_type, t);
7933 gimplify_assign (ctx->receiver_decl, t, pre_p);
7934
7935 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7936 build_int_cst (ptr_type, 0));
7937 t = build3 (COND_EXPR, void_type_node, t,
7938 build_and_jump (&l0), build_and_jump (&l1));
7939 gimplify_and_add (t, pre_p);
7940
7941 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7942
7943 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7944
7945 copyin_seq = NULL;
7946 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7947 &copyin_seq, ctx);
7948
7949 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7950 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7951 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7952 gimplify_and_add (t, pre_p);
7953
7954 t = build_and_jump (&l2);
7955 gimplify_and_add (t, pre_p);
7956
7957 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7958
7959 gimple_seq_add_seq (pre_p, copyin_seq);
7960
7961 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7962 }
7963
7964
7965 /* Expand code for an OpenMP single directive. */
7966
7967 static void
7968 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7969 {
7970 tree block;
7971 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7972 gbind *bind;
7973 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7974
7975 push_gimplify_context ();
7976
7977 block = make_node (BLOCK);
7978 bind = gimple_build_bind (NULL, NULL, block);
7979 gsi_replace (gsi_p, bind, true);
7980 bind_body = NULL;
7981 dlist = NULL;
7982 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7983 &bind_body, &dlist, ctx, NULL);
7984 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7985
7986 gimple_seq_add_stmt (&bind_body, single_stmt);
7987
7988 if (ctx->record_type)
7989 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7990 else
7991 lower_omp_single_simple (single_stmt, &bind_body);
7992
7993 gimple_omp_set_body (single_stmt, NULL);
7994
7995 gimple_seq_add_seq (&bind_body, dlist);
7996
7997 bind_body = maybe_catch_exception (bind_body);
7998
7999 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8000 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8001 gimple *g = gimple_build_omp_return (nowait);
8002 gimple_seq_add_stmt (&bind_body_tail, g);
8003 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8004 if (ctx->record_type)
8005 {
8006 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8007 tree clobber = build_clobber (ctx->record_type);
8008 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8009 clobber), GSI_SAME_STMT);
8010 }
8011 gimple_seq_add_seq (&bind_body, bind_body_tail);
8012 gimple_bind_set_body (bind, bind_body);
8013
8014 pop_gimplify_context (bind);
8015
8016 gimple_bind_append_vars (bind, ctx->block_vars);
8017 BLOCK_VARS (block) = ctx->block_vars;
8018 if (BLOCK_VARS (block))
8019 TREE_USED (block) = 1;
8020 }
8021
8022
8023 /* Expand code for an OpenMP master directive. */
8024
8025 static void
8026 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8027 {
8028 tree block, lab = NULL, x, bfn_decl;
8029 gimple *stmt = gsi_stmt (*gsi_p);
8030 gbind *bind;
8031 location_t loc = gimple_location (stmt);
8032 gimple_seq tseq;
8033
8034 push_gimplify_context ();
8035
8036 block = make_node (BLOCK);
8037 bind = gimple_build_bind (NULL, NULL, block);
8038 gsi_replace (gsi_p, bind, true);
8039 gimple_bind_add_stmt (bind, stmt);
8040
8041 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8042 x = build_call_expr_loc (loc, bfn_decl, 0);
8043 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8044 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8045 tseq = NULL;
8046 gimplify_and_add (x, &tseq);
8047 gimple_bind_add_seq (bind, tseq);
8048
8049 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8050 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8051 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8052 gimple_omp_set_body (stmt, NULL);
8053
8054 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8055
8056 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8057
8058 pop_gimplify_context (bind);
8059
8060 gimple_bind_append_vars (bind, ctx->block_vars);
8061 BLOCK_VARS (block) = ctx->block_vars;
8062 }
8063
8064 /* Helper function for lower_omp_task_reductions. For a specific PASS
8065 find out the current clause it should be processed, or return false
8066 if all have been processed already. */
8067
8068 static inline bool
8069 omp_task_reduction_iterate (int pass, enum tree_code code,
8070 enum omp_clause_code ccode, tree *c, tree *decl,
8071 tree *type, tree *next)
8072 {
8073 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8074 {
8075 if (ccode == OMP_CLAUSE_REDUCTION
8076 && code != OMP_TASKLOOP
8077 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8078 continue;
8079 *decl = OMP_CLAUSE_DECL (*c);
8080 *type = TREE_TYPE (*decl);
8081 if (TREE_CODE (*decl) == MEM_REF)
8082 {
8083 if (pass != 1)
8084 continue;
8085 }
8086 else
8087 {
8088 if (omp_is_reference (*decl))
8089 *type = TREE_TYPE (*type);
8090 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8091 continue;
8092 }
8093 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8094 return true;
8095 }
8096 *decl = NULL_TREE;
8097 *type = NULL_TREE;
8098 *next = NULL_TREE;
8099 return false;
8100 }
8101
8102 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8103 OMP_TASKGROUP only with task modifier). Register mapping of those in
8104 START sequence and reducing them and unregister them in the END sequence. */
8105
8106 static void
8107 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8108 gimple_seq *start, gimple_seq *end)
8109 {
8110 enum omp_clause_code ccode
8111 = (code == OMP_TASKGROUP
8112 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8113 tree cancellable = NULL_TREE;
8114 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8115 if (clauses == NULL_TREE)
8116 return;
8117 if (code == OMP_FOR || code == OMP_SECTIONS)
8118 {
8119 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8120 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8121 && outer->cancellable)
8122 {
8123 cancellable = error_mark_node;
8124 break;
8125 }
8126 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8127 break;
8128 }
8129 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8130 tree *last = &TYPE_FIELDS (record_type);
8131 unsigned cnt = 0;
8132 if (cancellable)
8133 {
8134 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8135 ptr_type_node);
8136 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8137 integer_type_node);
8138 *last = field;
8139 DECL_CHAIN (field) = ifield;
8140 last = &DECL_CHAIN (ifield);
8141 DECL_CONTEXT (field) = record_type;
8142 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8143 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8144 DECL_CONTEXT (ifield) = record_type;
8145 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8146 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8147 }
8148 for (int pass = 0; pass < 2; pass++)
8149 {
8150 tree decl, type, next;
8151 for (tree c = clauses;
8152 omp_task_reduction_iterate (pass, code, ccode,
8153 &c, &decl, &type, &next); c = next)
8154 {
8155 ++cnt;
8156 tree new_type = type;
8157 if (ctx->outer)
8158 new_type = remap_type (type, &ctx->outer->cb);
8159 tree field
8160 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8161 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8162 new_type);
8163 if (DECL_P (decl) && type == TREE_TYPE (decl))
8164 {
8165 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8166 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8167 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8168 }
8169 else
8170 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8171 DECL_CONTEXT (field) = record_type;
8172 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8173 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8174 *last = field;
8175 last = &DECL_CHAIN (field);
8176 tree bfield
8177 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8178 boolean_type_node);
8179 DECL_CONTEXT (bfield) = record_type;
8180 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8181 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8182 *last = bfield;
8183 last = &DECL_CHAIN (bfield);
8184 }
8185 }
8186 *last = NULL_TREE;
8187 layout_type (record_type);
8188
8189 /* Build up an array which registers with the runtime all the reductions
8190 and deregisters them at the end. Format documented in libgomp/task.c. */
8191 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8192 tree avar = create_tmp_var_raw (atype);
8193 gimple_add_tmp_var (avar);
8194 TREE_ADDRESSABLE (avar) = 1;
8195 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8196 NULL_TREE, NULL_TREE);
8197 tree t = build_int_cst (pointer_sized_int_node, cnt);
8198 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8199 gimple_seq seq = NULL;
8200 tree sz = fold_convert (pointer_sized_int_node,
8201 TYPE_SIZE_UNIT (record_type));
8202 int cachesz = 64;
8203 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8204 build_int_cst (pointer_sized_int_node, cachesz - 1));
8205 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8206 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8207 ctx->task_reductions.create (1 + cnt);
8208 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8209 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8210 ? sz : NULL_TREE);
8211 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8212 gimple_seq_add_seq (start, seq);
8213 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8214 NULL_TREE, NULL_TREE);
8215 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8216 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8217 NULL_TREE, NULL_TREE);
8218 t = build_int_cst (pointer_sized_int_node,
8219 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8220 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8221 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8222 NULL_TREE, NULL_TREE);
8223 t = build_int_cst (pointer_sized_int_node, -1);
8224 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8225 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8226 NULL_TREE, NULL_TREE);
8227 t = build_int_cst (pointer_sized_int_node, 0);
8228 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8229
8230 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8231 and for each task reduction checks a bool right after the private variable
8232 within that thread's chunk; if the bool is clear, it hasn't been
8233 initialized and thus isn't going to be reduced nor destructed, otherwise
8234 reduce and destruct it. */
8235 tree idx = create_tmp_var (size_type_node);
8236 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8237 tree num_thr_sz = create_tmp_var (size_type_node);
8238 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8239 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8240 tree lab3 = NULL_TREE;
8241 gimple *g;
8242 if (code == OMP_FOR || code == OMP_SECTIONS)
8243 {
8244 /* For worksharing constructs, only perform it in the master thread,
8245 with the exception of cancelled implicit barriers - then only handle
8246 the current thread. */
8247 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8248 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8249 tree thr_num = create_tmp_var (integer_type_node);
8250 g = gimple_build_call (t, 0);
8251 gimple_call_set_lhs (g, thr_num);
8252 gimple_seq_add_stmt (end, g);
8253 if (cancellable)
8254 {
8255 tree c;
8256 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8257 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8258 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8259 if (code == OMP_FOR)
8260 c = gimple_omp_for_clauses (ctx->stmt);
8261 else /* if (code == OMP_SECTIONS) */
8262 c = gimple_omp_sections_clauses (ctx->stmt);
8263 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8264 cancellable = c;
8265 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8266 lab5, lab6);
8267 gimple_seq_add_stmt (end, g);
8268 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8269 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8270 gimple_seq_add_stmt (end, g);
8271 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8272 build_one_cst (TREE_TYPE (idx)));
8273 gimple_seq_add_stmt (end, g);
8274 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8275 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8276 }
8277 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8278 gimple_seq_add_stmt (end, g);
8279 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8280 }
8281 if (code != OMP_PARALLEL)
8282 {
8283 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8284 tree num_thr = create_tmp_var (integer_type_node);
8285 g = gimple_build_call (t, 0);
8286 gimple_call_set_lhs (g, num_thr);
8287 gimple_seq_add_stmt (end, g);
8288 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8289 gimple_seq_add_stmt (end, g);
8290 if (cancellable)
8291 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8292 }
8293 else
8294 {
8295 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8296 OMP_CLAUSE__REDUCTEMP_);
8297 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8298 t = fold_convert (size_type_node, t);
8299 gimplify_assign (num_thr_sz, t, end);
8300 }
8301 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8302 NULL_TREE, NULL_TREE);
8303 tree data = create_tmp_var (pointer_sized_int_node);
8304 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8305 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8306 tree ptr;
8307 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8308 ptr = create_tmp_var (build_pointer_type (record_type));
8309 else
8310 ptr = create_tmp_var (ptr_type_node);
8311 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8312
8313 tree field = TYPE_FIELDS (record_type);
8314 cnt = 0;
8315 if (cancellable)
8316 field = DECL_CHAIN (DECL_CHAIN (field));
8317 for (int pass = 0; pass < 2; pass++)
8318 {
8319 tree decl, type, next;
8320 for (tree c = clauses;
8321 omp_task_reduction_iterate (pass, code, ccode,
8322 &c, &decl, &type, &next); c = next)
8323 {
8324 tree var = decl, ref;
8325 if (TREE_CODE (decl) == MEM_REF)
8326 {
8327 var = TREE_OPERAND (var, 0);
8328 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8329 var = TREE_OPERAND (var, 0);
8330 tree v = var;
8331 if (TREE_CODE (var) == ADDR_EXPR)
8332 var = TREE_OPERAND (var, 0);
8333 else if (TREE_CODE (var) == INDIRECT_REF)
8334 var = TREE_OPERAND (var, 0);
8335 tree orig_var = var;
8336 if (is_variable_sized (var))
8337 {
8338 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8339 var = DECL_VALUE_EXPR (var);
8340 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8341 var = TREE_OPERAND (var, 0);
8342 gcc_assert (DECL_P (var));
8343 }
8344 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8345 if (orig_var != var)
8346 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8347 else if (TREE_CODE (v) == ADDR_EXPR)
8348 t = build_fold_addr_expr (t);
8349 else if (TREE_CODE (v) == INDIRECT_REF)
8350 t = build_fold_indirect_ref (t);
8351 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8352 {
8353 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8354 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8355 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8356 }
8357 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8358 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8359 fold_convert (size_type_node,
8360 TREE_OPERAND (decl, 1)));
8361 }
8362 else
8363 {
8364 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8365 if (!omp_is_reference (decl))
8366 t = build_fold_addr_expr (t);
8367 }
8368 t = fold_convert (pointer_sized_int_node, t);
8369 seq = NULL;
8370 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8371 gimple_seq_add_seq (start, seq);
8372 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8373 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8374 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8375 t = unshare_expr (byte_position (field));
8376 t = fold_convert (pointer_sized_int_node, t);
8377 ctx->task_reduction_map->put (c, cnt);
8378 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8379 ? t : NULL_TREE);
8380 seq = NULL;
8381 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8382 gimple_seq_add_seq (start, seq);
8383 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8384 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8385 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8386
8387 tree bfield = DECL_CHAIN (field);
8388 tree cond;
8389 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8390 /* In parallel or worksharing all threads unconditionally
8391 initialize all their task reduction private variables. */
8392 cond = boolean_true_node;
8393 else if (TREE_TYPE (ptr) == ptr_type_node)
8394 {
8395 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8396 unshare_expr (byte_position (bfield)));
8397 seq = NULL;
8398 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8399 gimple_seq_add_seq (end, seq);
8400 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8401 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8402 build_int_cst (pbool, 0));
8403 }
8404 else
8405 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8406 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8407 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8408 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8409 tree condv = create_tmp_var (boolean_type_node);
8410 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8411 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8412 lab3, lab4);
8413 gimple_seq_add_stmt (end, g);
8414 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8415 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8416 {
8417 /* If this reduction doesn't need destruction and parallel
8418 has been cancelled, there is nothing to do for this
8419 reduction, so jump around the merge operation. */
8420 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8421 g = gimple_build_cond (NE_EXPR, cancellable,
8422 build_zero_cst (TREE_TYPE (cancellable)),
8423 lab4, lab5);
8424 gimple_seq_add_stmt (end, g);
8425 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8426 }
8427
8428 tree new_var;
8429 if (TREE_TYPE (ptr) == ptr_type_node)
8430 {
8431 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8432 unshare_expr (byte_position (field)));
8433 seq = NULL;
8434 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8435 gimple_seq_add_seq (end, seq);
8436 tree pbool = build_pointer_type (TREE_TYPE (field));
8437 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8438 build_int_cst (pbool, 0));
8439 }
8440 else
8441 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8442 build_simple_mem_ref (ptr), field, NULL_TREE);
8443
8444 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8445 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8446 ref = build_simple_mem_ref (ref);
8447 /* reduction(-:var) sums up the partial results, so it acts
8448 identically to reduction(+:var). */
8449 if (rcode == MINUS_EXPR)
8450 rcode = PLUS_EXPR;
8451 if (TREE_CODE (decl) == MEM_REF)
8452 {
8453 tree type = TREE_TYPE (new_var);
8454 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8455 tree i = create_tmp_var (TREE_TYPE (v));
8456 tree ptype = build_pointer_type (TREE_TYPE (type));
8457 if (DECL_P (v))
8458 {
8459 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8460 tree vv = create_tmp_var (TREE_TYPE (v));
8461 gimplify_assign (vv, v, start);
8462 v = vv;
8463 }
8464 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8465 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8466 new_var = build_fold_addr_expr (new_var);
8467 new_var = fold_convert (ptype, new_var);
8468 ref = fold_convert (ptype, ref);
8469 tree m = create_tmp_var (ptype);
8470 gimplify_assign (m, new_var, end);
8471 new_var = m;
8472 m = create_tmp_var (ptype);
8473 gimplify_assign (m, ref, end);
8474 ref = m;
8475 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8476 tree body = create_artificial_label (UNKNOWN_LOCATION);
8477 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8478 gimple_seq_add_stmt (end, gimple_build_label (body));
8479 tree priv = build_simple_mem_ref (new_var);
8480 tree out = build_simple_mem_ref (ref);
8481 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8482 {
8483 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8484 tree decl_placeholder
8485 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8486 tree lab6 = NULL_TREE;
8487 if (cancellable)
8488 {
8489 /* If this reduction needs destruction and parallel
8490 has been cancelled, jump around the merge operation
8491 to the destruction. */
8492 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8493 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8494 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8495 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8496 lab6, lab5);
8497 gimple_seq_add_stmt (end, g);
8498 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8499 }
8500 SET_DECL_VALUE_EXPR (placeholder, out);
8501 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8502 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8503 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8504 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8505 gimple_seq_add_seq (end,
8506 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8507 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8508 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8509 {
8510 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8511 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8512 }
8513 if (cancellable)
8514 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8515 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8516 if (x)
8517 {
8518 gimple_seq tseq = NULL;
8519 gimplify_stmt (&x, &tseq);
8520 gimple_seq_add_seq (end, tseq);
8521 }
8522 }
8523 else
8524 {
8525 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8526 out = unshare_expr (out);
8527 gimplify_assign (out, x, end);
8528 }
8529 gimple *g
8530 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8531 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8532 gimple_seq_add_stmt (end, g);
8533 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8534 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8535 gimple_seq_add_stmt (end, g);
8536 g = gimple_build_assign (i, PLUS_EXPR, i,
8537 build_int_cst (TREE_TYPE (i), 1));
8538 gimple_seq_add_stmt (end, g);
8539 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8540 gimple_seq_add_stmt (end, g);
8541 gimple_seq_add_stmt (end, gimple_build_label (endl));
8542 }
8543 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8544 {
8545 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8546 tree oldv = NULL_TREE;
8547 tree lab6 = NULL_TREE;
8548 if (cancellable)
8549 {
8550 /* If this reduction needs destruction and parallel
8551 has been cancelled, jump around the merge operation
8552 to the destruction. */
8553 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8554 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8555 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8556 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8557 lab6, lab5);
8558 gimple_seq_add_stmt (end, g);
8559 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8560 }
8561 if (omp_is_reference (decl)
8562 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8563 TREE_TYPE (ref)))
8564 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8565 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8566 tree refv = create_tmp_var (TREE_TYPE (ref));
8567 gimplify_assign (refv, ref, end);
8568 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8569 SET_DECL_VALUE_EXPR (placeholder, ref);
8570 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8571 tree d = maybe_lookup_decl (decl, ctx);
8572 gcc_assert (d);
8573 if (DECL_HAS_VALUE_EXPR_P (d))
8574 oldv = DECL_VALUE_EXPR (d);
8575 if (omp_is_reference (var))
8576 {
8577 tree v = fold_convert (TREE_TYPE (d),
8578 build_fold_addr_expr (new_var));
8579 SET_DECL_VALUE_EXPR (d, v);
8580 }
8581 else
8582 SET_DECL_VALUE_EXPR (d, new_var);
8583 DECL_HAS_VALUE_EXPR_P (d) = 1;
8584 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8585 if (oldv)
8586 SET_DECL_VALUE_EXPR (d, oldv);
8587 else
8588 {
8589 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8590 DECL_HAS_VALUE_EXPR_P (d) = 0;
8591 }
8592 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8593 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8594 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8595 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8596 if (cancellable)
8597 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8598 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8599 if (x)
8600 {
8601 gimple_seq tseq = NULL;
8602 gimplify_stmt (&x, &tseq);
8603 gimple_seq_add_seq (end, tseq);
8604 }
8605 }
8606 else
8607 {
8608 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8609 ref = unshare_expr (ref);
8610 gimplify_assign (ref, x, end);
8611 }
8612 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8613 ++cnt;
8614 field = DECL_CHAIN (bfield);
8615 }
8616 }
8617
8618 if (code == OMP_TASKGROUP)
8619 {
8620 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8621 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8622 gimple_seq_add_stmt (start, g);
8623 }
8624 else
8625 {
8626 tree c;
8627 if (code == OMP_FOR)
8628 c = gimple_omp_for_clauses (ctx->stmt);
8629 else if (code == OMP_SECTIONS)
8630 c = gimple_omp_sections_clauses (ctx->stmt);
8631 else
8632 c = gimple_omp_taskreg_clauses (ctx->stmt);
8633 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8634 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8635 build_fold_addr_expr (avar));
8636 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8637 }
8638
8639 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8640 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8641 size_one_node));
8642 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8643 gimple_seq_add_stmt (end, g);
8644 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8645 if (code == OMP_FOR || code == OMP_SECTIONS)
8646 {
8647 enum built_in_function bfn
8648 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8649 t = builtin_decl_explicit (bfn);
8650 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8651 tree arg;
8652 if (cancellable)
8653 {
8654 arg = create_tmp_var (c_bool_type);
8655 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8656 cancellable));
8657 }
8658 else
8659 arg = build_int_cst (c_bool_type, 0);
8660 g = gimple_build_call (t, 1, arg);
8661 }
8662 else
8663 {
8664 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8665 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8666 }
8667 gimple_seq_add_stmt (end, g);
8668 t = build_constructor (atype, NULL);
8669 TREE_THIS_VOLATILE (t) = 1;
8670 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8671 }
8672
8673 /* Expand code for an OpenMP taskgroup directive. */
8674
8675 static void
8676 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8677 {
8678 gimple *stmt = gsi_stmt (*gsi_p);
8679 gcall *x;
8680 gbind *bind;
8681 gimple_seq dseq = NULL;
8682 tree block = make_node (BLOCK);
8683
8684 bind = gimple_build_bind (NULL, NULL, block);
8685 gsi_replace (gsi_p, bind, true);
8686 gimple_bind_add_stmt (bind, stmt);
8687
8688 push_gimplify_context ();
8689
8690 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8691 0);
8692 gimple_bind_add_stmt (bind, x);
8693
8694 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8695 gimple_omp_taskgroup_clauses (stmt),
8696 gimple_bind_body_ptr (bind), &dseq);
8697
8698 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8699 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8700 gimple_omp_set_body (stmt, NULL);
8701
8702 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8703 gimple_bind_add_seq (bind, dseq);
8704
8705 pop_gimplify_context (bind);
8706
8707 gimple_bind_append_vars (bind, ctx->block_vars);
8708 BLOCK_VARS (block) = ctx->block_vars;
8709 }
8710
8711
8712 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8713
8714 static void
8715 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8716 omp_context *ctx)
8717 {
8718 struct omp_for_data fd;
8719 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8720 return;
8721
8722 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8723 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8724 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8725 if (!fd.ordered)
8726 return;
8727
8728 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8729 tree c = gimple_omp_ordered_clauses (ord_stmt);
8730 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8731 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8732 {
8733 /* Merge depend clauses from multiple adjacent
8734 #pragma omp ordered depend(sink:...) constructs
8735 into one #pragma omp ordered depend(sink:...), so that
8736 we can optimize them together. */
8737 gimple_stmt_iterator gsi = *gsi_p;
8738 gsi_next (&gsi);
8739 while (!gsi_end_p (gsi))
8740 {
8741 gimple *stmt = gsi_stmt (gsi);
8742 if (is_gimple_debug (stmt)
8743 || gimple_code (stmt) == GIMPLE_NOP)
8744 {
8745 gsi_next (&gsi);
8746 continue;
8747 }
8748 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8749 break;
8750 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8751 c = gimple_omp_ordered_clauses (ord_stmt2);
8752 if (c == NULL_TREE
8753 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8754 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8755 break;
8756 while (*list_p)
8757 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8758 *list_p = c;
8759 gsi_remove (&gsi, true);
8760 }
8761 }
8762
8763 /* Canonicalize sink dependence clauses into one folded clause if
8764 possible.
8765
8766 The basic algorithm is to create a sink vector whose first
8767 element is the GCD of all the first elements, and whose remaining
8768 elements are the minimum of the subsequent columns.
8769
8770 We ignore dependence vectors whose first element is zero because
8771 such dependencies are known to be executed by the same thread.
8772
8773 We take into account the direction of the loop, so a minimum
8774 becomes a maximum if the loop is iterating forwards. We also
8775 ignore sink clauses where the loop direction is unknown, or where
8776 the offsets are clearly invalid because they are not a multiple
8777 of the loop increment.
8778
8779 For example:
8780
8781 #pragma omp for ordered(2)
8782 for (i=0; i < N; ++i)
8783 for (j=0; j < M; ++j)
8784 {
8785 #pragma omp ordered \
8786 depend(sink:i-8,j-2) \
8787 depend(sink:i,j-1) \ // Completely ignored because i+0.
8788 depend(sink:i-4,j-3) \
8789 depend(sink:i-6,j-4)
8790 #pragma omp ordered depend(source)
8791 }
8792
8793 Folded clause is:
8794
8795 depend(sink:-gcd(8,4,6),-min(2,3,4))
8796 -or-
8797 depend(sink:-2,-2)
8798 */
8799
8800 /* FIXME: Computing GCD's where the first element is zero is
8801 non-trivial in the presence of collapsed loops. Do this later. */
8802 if (fd.collapse > 1)
8803 return;
8804
8805 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8806
8807 /* wide_int is not a POD so it must be default-constructed. */
8808 for (unsigned i = 0; i != 2 * len - 1; ++i)
8809 new (static_cast<void*>(folded_deps + i)) wide_int ();
8810
8811 tree folded_dep = NULL_TREE;
8812 /* TRUE if the first dimension's offset is negative. */
8813 bool neg_offset_p = false;
8814
8815 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8816 unsigned int i;
8817 while ((c = *list_p) != NULL)
8818 {
8819 bool remove = false;
8820
8821 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8822 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8823 goto next_ordered_clause;
8824
8825 tree vec;
8826 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8827 vec && TREE_CODE (vec) == TREE_LIST;
8828 vec = TREE_CHAIN (vec), ++i)
8829 {
8830 gcc_assert (i < len);
8831
8832 /* omp_extract_for_data has canonicalized the condition. */
8833 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8834 || fd.loops[i].cond_code == GT_EXPR);
8835 bool forward = fd.loops[i].cond_code == LT_EXPR;
8836 bool maybe_lexically_later = true;
8837
8838 /* While the committee makes up its mind, bail if we have any
8839 non-constant steps. */
8840 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8841 goto lower_omp_ordered_ret;
8842
8843 tree itype = TREE_TYPE (TREE_VALUE (vec));
8844 if (POINTER_TYPE_P (itype))
8845 itype = sizetype;
8846 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8847 TYPE_PRECISION (itype),
8848 TYPE_SIGN (itype));
8849
8850 /* Ignore invalid offsets that are not multiples of the step. */
8851 if (!wi::multiple_of_p (wi::abs (offset),
8852 wi::abs (wi::to_wide (fd.loops[i].step)),
8853 UNSIGNED))
8854 {
8855 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8856 "ignoring sink clause with offset that is not "
8857 "a multiple of the loop step");
8858 remove = true;
8859 goto next_ordered_clause;
8860 }
8861
8862 /* Calculate the first dimension. The first dimension of
8863 the folded dependency vector is the GCD of the first
8864 elements, while ignoring any first elements whose offset
8865 is 0. */
8866 if (i == 0)
8867 {
8868 /* Ignore dependence vectors whose first dimension is 0. */
8869 if (offset == 0)
8870 {
8871 remove = true;
8872 goto next_ordered_clause;
8873 }
8874 else
8875 {
8876 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8877 {
8878 error_at (OMP_CLAUSE_LOCATION (c),
8879 "first offset must be in opposite direction "
8880 "of loop iterations");
8881 goto lower_omp_ordered_ret;
8882 }
8883 if (forward)
8884 offset = -offset;
8885 neg_offset_p = forward;
8886 /* Initialize the first time around. */
8887 if (folded_dep == NULL_TREE)
8888 {
8889 folded_dep = c;
8890 folded_deps[0] = offset;
8891 }
8892 else
8893 folded_deps[0] = wi::gcd (folded_deps[0],
8894 offset, UNSIGNED);
8895 }
8896 }
8897 /* Calculate minimum for the remaining dimensions. */
8898 else
8899 {
8900 folded_deps[len + i - 1] = offset;
8901 if (folded_dep == c)
8902 folded_deps[i] = offset;
8903 else if (maybe_lexically_later
8904 && !wi::eq_p (folded_deps[i], offset))
8905 {
8906 if (forward ^ wi::gts_p (folded_deps[i], offset))
8907 {
8908 unsigned int j;
8909 folded_dep = c;
8910 for (j = 1; j <= i; j++)
8911 folded_deps[j] = folded_deps[len + j - 1];
8912 }
8913 else
8914 maybe_lexically_later = false;
8915 }
8916 }
8917 }
8918 gcc_assert (i == len);
8919
8920 remove = true;
8921
8922 next_ordered_clause:
8923 if (remove)
8924 *list_p = OMP_CLAUSE_CHAIN (c);
8925 else
8926 list_p = &OMP_CLAUSE_CHAIN (c);
8927 }
8928
8929 if (folded_dep)
8930 {
8931 if (neg_offset_p)
8932 folded_deps[0] = -folded_deps[0];
8933
8934 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8935 if (POINTER_TYPE_P (itype))
8936 itype = sizetype;
8937
8938 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8939 = wide_int_to_tree (itype, folded_deps[0]);
8940 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8941 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8942 }
8943
8944 lower_omp_ordered_ret:
8945
8946 /* Ordered without clauses is #pragma omp threads, while we want
8947 a nop instead if we remove all clauses. */
8948 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8949 gsi_replace (gsi_p, gimple_build_nop (), true);
8950 }
8951
8952
8953 /* Expand code for an OpenMP ordered directive. */
8954
8955 static void
8956 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8957 {
8958 tree block;
8959 gimple *stmt = gsi_stmt (*gsi_p), *g;
8960 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8961 gcall *x;
8962 gbind *bind;
8963 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8964 OMP_CLAUSE_SIMD);
8965 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8966 loop. */
8967 bool maybe_simt
8968 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8969 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8970 OMP_CLAUSE_THREADS);
8971
8972 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8973 OMP_CLAUSE_DEPEND))
8974 {
8975 /* FIXME: This is needs to be moved to the expansion to verify various
8976 conditions only testable on cfg with dominators computed, and also
8977 all the depend clauses to be merged still might need to be available
8978 for the runtime checks. */
8979 if (0)
8980 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8981 return;
8982 }
8983
8984 push_gimplify_context ();
8985
8986 block = make_node (BLOCK);
8987 bind = gimple_build_bind (NULL, NULL, block);
8988 gsi_replace (gsi_p, bind, true);
8989 gimple_bind_add_stmt (bind, stmt);
8990
8991 if (simd)
8992 {
8993 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8994 build_int_cst (NULL_TREE, threads));
8995 cfun->has_simduid_loops = true;
8996 }
8997 else
8998 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8999 0);
9000 gimple_bind_add_stmt (bind, x);
9001
9002 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9003 if (maybe_simt)
9004 {
9005 counter = create_tmp_var (integer_type_node);
9006 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9007 gimple_call_set_lhs (g, counter);
9008 gimple_bind_add_stmt (bind, g);
9009
9010 body = create_artificial_label (UNKNOWN_LOCATION);
9011 test = create_artificial_label (UNKNOWN_LOCATION);
9012 gimple_bind_add_stmt (bind, gimple_build_label (body));
9013
9014 tree simt_pred = create_tmp_var (integer_type_node);
9015 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9016 gimple_call_set_lhs (g, simt_pred);
9017 gimple_bind_add_stmt (bind, g);
9018
9019 tree t = create_artificial_label (UNKNOWN_LOCATION);
9020 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9021 gimple_bind_add_stmt (bind, g);
9022
9023 gimple_bind_add_stmt (bind, gimple_build_label (t));
9024 }
9025 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9026 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9027 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9028 gimple_omp_set_body (stmt, NULL);
9029
9030 if (maybe_simt)
9031 {
9032 gimple_bind_add_stmt (bind, gimple_build_label (test));
9033 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9034 gimple_bind_add_stmt (bind, g);
9035
9036 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9037 tree nonneg = create_tmp_var (integer_type_node);
9038 gimple_seq tseq = NULL;
9039 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9040 gimple_bind_add_seq (bind, tseq);
9041
9042 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9043 gimple_call_set_lhs (g, nonneg);
9044 gimple_bind_add_stmt (bind, g);
9045
9046 tree end = create_artificial_label (UNKNOWN_LOCATION);
9047 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9048 gimple_bind_add_stmt (bind, g);
9049
9050 gimple_bind_add_stmt (bind, gimple_build_label (end));
9051 }
9052 if (simd)
9053 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9054 build_int_cst (NULL_TREE, threads));
9055 else
9056 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9057 0);
9058 gimple_bind_add_stmt (bind, x);
9059
9060 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9061
9062 pop_gimplify_context (bind);
9063
9064 gimple_bind_append_vars (bind, ctx->block_vars);
9065 BLOCK_VARS (block) = gimple_bind_vars (bind);
9066 }
9067
9068
9069 /* Expand code for an OpenMP scan directive and the structured block
9070 before the scan directive. */
9071
9072 static void
9073 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9074 {
9075 gimple *stmt = gsi_stmt (*gsi_p);
9076 bool has_clauses
9077 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9078 tree lane = NULL_TREE;
9079 gimple_seq before = NULL;
9080 omp_context *octx = ctx->outer;
9081 gcc_assert (octx);
9082 if (octx->scan_exclusive && !has_clauses)
9083 {
9084 gimple_stmt_iterator gsi2 = *gsi_p;
9085 gsi_next (&gsi2);
9086 gimple *stmt2 = gsi_stmt (gsi2);
9087 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9088 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9089 the one with exclusive clause(s), comes first. */
9090 if (stmt2
9091 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9092 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9093 {
9094 gsi_remove (gsi_p, false);
9095 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9096 ctx = maybe_lookup_ctx (stmt2);
9097 gcc_assert (ctx);
9098 lower_omp_scan (gsi_p, ctx);
9099 return;
9100 }
9101 }
9102
9103 bool input_phase = has_clauses ^ octx->scan_inclusive;
9104 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9105 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9106 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9107 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9108 && !gimple_omp_for_combined_p (octx->stmt));
9109 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9110 if (is_for_simd && octx->for_simd_scan_phase)
9111 is_simd = false;
9112 if (is_simd)
9113 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9114 OMP_CLAUSE__SIMDUID_))
9115 {
9116 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9117 lane = create_tmp_var (unsigned_type_node);
9118 tree t = build_int_cst (integer_type_node,
9119 input_phase ? 1
9120 : octx->scan_inclusive ? 2 : 3);
9121 gimple *g
9122 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9123 gimple_call_set_lhs (g, lane);
9124 gimple_seq_add_stmt (&before, g);
9125 }
9126
9127 if (is_simd || is_for)
9128 {
9129 for (tree c = gimple_omp_for_clauses (octx->stmt);
9130 c; c = OMP_CLAUSE_CHAIN (c))
9131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9132 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9133 {
9134 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9135 tree var = OMP_CLAUSE_DECL (c);
9136 tree new_var = lookup_decl (var, octx);
9137 tree val = new_var;
9138 tree var2 = NULL_TREE;
9139 tree var3 = NULL_TREE;
9140 tree var4 = NULL_TREE;
9141 tree lane0 = NULL_TREE;
9142 tree new_vard = new_var;
9143 if (omp_is_reference (var))
9144 {
9145 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9146 val = new_var;
9147 }
9148 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9149 {
9150 val = DECL_VALUE_EXPR (new_vard);
9151 if (new_vard != new_var)
9152 {
9153 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9154 val = TREE_OPERAND (val, 0);
9155 }
9156 if (TREE_CODE (val) == ARRAY_REF
9157 && VAR_P (TREE_OPERAND (val, 0)))
9158 {
9159 tree v = TREE_OPERAND (val, 0);
9160 if (lookup_attribute ("omp simd array",
9161 DECL_ATTRIBUTES (v)))
9162 {
9163 val = unshare_expr (val);
9164 lane0 = TREE_OPERAND (val, 1);
9165 TREE_OPERAND (val, 1) = lane;
9166 var2 = lookup_decl (v, octx);
9167 if (octx->scan_exclusive)
9168 var4 = lookup_decl (var2, octx);
9169 if (input_phase
9170 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9171 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9172 if (!input_phase)
9173 {
9174 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9175 var2, lane, NULL_TREE, NULL_TREE);
9176 TREE_THIS_NOTRAP (var2) = 1;
9177 if (octx->scan_exclusive)
9178 {
9179 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9180 var4, lane, NULL_TREE,
9181 NULL_TREE);
9182 TREE_THIS_NOTRAP (var4) = 1;
9183 }
9184 }
9185 else
9186 var2 = val;
9187 }
9188 }
9189 gcc_assert (var2);
9190 }
9191 else
9192 {
9193 var2 = build_outer_var_ref (var, octx);
9194 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9195 {
9196 var3 = maybe_lookup_decl (new_vard, octx);
9197 if (var3 == new_vard || var3 == NULL_TREE)
9198 var3 = NULL_TREE;
9199 else if (is_simd && octx->scan_exclusive && !input_phase)
9200 {
9201 var4 = maybe_lookup_decl (var3, octx);
9202 if (var4 == var3 || var4 == NULL_TREE)
9203 {
9204 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9205 {
9206 var4 = var3;
9207 var3 = NULL_TREE;
9208 }
9209 else
9210 var4 = NULL_TREE;
9211 }
9212 }
9213 }
9214 if (is_simd
9215 && octx->scan_exclusive
9216 && !input_phase
9217 && var4 == NULL_TREE)
9218 var4 = create_tmp_var (TREE_TYPE (val));
9219 }
9220 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9221 {
9222 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9223 if (input_phase)
9224 {
9225 if (var3)
9226 {
9227 /* If we've added a separate identity element
9228 variable, copy it over into val. */
9229 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9230 var3);
9231 gimplify_and_add (x, &before);
9232 }
9233 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9234 {
9235 /* Otherwise, assign to it the identity element. */
9236 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9237 if (is_for)
9238 tseq = copy_gimple_seq_and_replace_locals (tseq);
9239 tree ref = build_outer_var_ref (var, octx);
9240 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9241 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9242 if (x)
9243 {
9244 if (new_vard != new_var)
9245 val = build_fold_addr_expr_loc (clause_loc, val);
9246 SET_DECL_VALUE_EXPR (new_vard, val);
9247 }
9248 SET_DECL_VALUE_EXPR (placeholder, ref);
9249 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9250 lower_omp (&tseq, octx);
9251 if (x)
9252 SET_DECL_VALUE_EXPR (new_vard, x);
9253 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9254 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9255 gimple_seq_add_seq (&before, tseq);
9256 if (is_simd)
9257 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9258 }
9259 }
9260 else if (is_simd)
9261 {
9262 tree x;
9263 if (octx->scan_exclusive)
9264 {
9265 tree v4 = unshare_expr (var4);
9266 tree v2 = unshare_expr (var2);
9267 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9268 gimplify_and_add (x, &before);
9269 }
9270 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9271 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9272 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9273 tree vexpr = val;
9274 if (x && new_vard != new_var)
9275 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9276 if (x)
9277 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9278 SET_DECL_VALUE_EXPR (placeholder, var2);
9279 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9280 lower_omp (&tseq, octx);
9281 gimple_seq_add_seq (&before, tseq);
9282 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9283 if (x)
9284 SET_DECL_VALUE_EXPR (new_vard, x);
9285 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9286 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9287 if (octx->scan_inclusive)
9288 {
9289 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9290 var2);
9291 gimplify_and_add (x, &before);
9292 }
9293 else if (lane0 == NULL_TREE)
9294 {
9295 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9296 var4);
9297 gimplify_and_add (x, &before);
9298 }
9299 }
9300 }
9301 else
9302 {
9303 if (input_phase)
9304 {
9305 /* input phase. Set val to initializer before
9306 the body. */
9307 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9308 gimplify_assign (val, x, &before);
9309 }
9310 else if (is_simd)
9311 {
9312 /* scan phase. */
9313 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9314 if (code == MINUS_EXPR)
9315 code = PLUS_EXPR;
9316
9317 tree x = build2 (code, TREE_TYPE (var2),
9318 unshare_expr (var2), unshare_expr (val));
9319 if (octx->scan_inclusive)
9320 {
9321 gimplify_assign (unshare_expr (var2), x, &before);
9322 gimplify_assign (val, var2, &before);
9323 }
9324 else
9325 {
9326 gimplify_assign (unshare_expr (var4),
9327 unshare_expr (var2), &before);
9328 gimplify_assign (var2, x, &before);
9329 if (lane0 == NULL_TREE)
9330 gimplify_assign (val, var4, &before);
9331 }
9332 }
9333 }
9334 if (octx->scan_exclusive && !input_phase && lane0)
9335 {
9336 tree vexpr = unshare_expr (var4);
9337 TREE_OPERAND (vexpr, 1) = lane0;
9338 if (new_vard != new_var)
9339 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9340 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9341 }
9342 }
9343 }
9344 if (is_simd && !is_for_simd)
9345 {
9346 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9347 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9348 gsi_replace (gsi_p, gimple_build_nop (), true);
9349 return;
9350 }
9351 lower_omp (gimple_omp_body_ptr (stmt), octx);
9352 if (before)
9353 {
9354 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9355 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9356 }
9357 }
9358
9359
9360 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9361 substitution of a couple of function calls. But in the NAMED case,
9362 requires that languages coordinate a symbol name. It is therefore
9363 best put here in common code. */
9364
9365 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9366
9367 static void
9368 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9369 {
9370 tree block;
9371 tree name, lock, unlock;
9372 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9373 gbind *bind;
9374 location_t loc = gimple_location (stmt);
9375 gimple_seq tbody;
9376
9377 name = gimple_omp_critical_name (stmt);
9378 if (name)
9379 {
9380 tree decl;
9381
9382 if (!critical_name_mutexes)
9383 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9384
9385 tree *n = critical_name_mutexes->get (name);
9386 if (n == NULL)
9387 {
9388 char *new_str;
9389
9390 decl = create_tmp_var_raw (ptr_type_node);
9391
9392 new_str = ACONCAT ((".gomp_critical_user_",
9393 IDENTIFIER_POINTER (name), NULL));
9394 DECL_NAME (decl) = get_identifier (new_str);
9395 TREE_PUBLIC (decl) = 1;
9396 TREE_STATIC (decl) = 1;
9397 DECL_COMMON (decl) = 1;
9398 DECL_ARTIFICIAL (decl) = 1;
9399 DECL_IGNORED_P (decl) = 1;
9400
9401 varpool_node::finalize_decl (decl);
9402
9403 critical_name_mutexes->put (name, decl);
9404 }
9405 else
9406 decl = *n;
9407
9408 /* If '#pragma omp critical' is inside offloaded region or
9409 inside function marked as offloadable, the symbol must be
9410 marked as offloadable too. */
9411 omp_context *octx;
9412 if (cgraph_node::get (current_function_decl)->offloadable)
9413 varpool_node::get_create (decl)->offloadable = 1;
9414 else
9415 for (octx = ctx->outer; octx; octx = octx->outer)
9416 if (is_gimple_omp_offloaded (octx->stmt))
9417 {
9418 varpool_node::get_create (decl)->offloadable = 1;
9419 break;
9420 }
9421
9422 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9423 lock = build_call_expr_loc (loc, lock, 1,
9424 build_fold_addr_expr_loc (loc, decl));
9425
9426 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9427 unlock = build_call_expr_loc (loc, unlock, 1,
9428 build_fold_addr_expr_loc (loc, decl));
9429 }
9430 else
9431 {
9432 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9433 lock = build_call_expr_loc (loc, lock, 0);
9434
9435 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9436 unlock = build_call_expr_loc (loc, unlock, 0);
9437 }
9438
9439 push_gimplify_context ();
9440
9441 block = make_node (BLOCK);
9442 bind = gimple_build_bind (NULL, NULL, block);
9443 gsi_replace (gsi_p, bind, true);
9444 gimple_bind_add_stmt (bind, stmt);
9445
9446 tbody = gimple_bind_body (bind);
9447 gimplify_and_add (lock, &tbody);
9448 gimple_bind_set_body (bind, tbody);
9449
9450 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9451 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9452 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9453 gimple_omp_set_body (stmt, NULL);
9454
9455 tbody = gimple_bind_body (bind);
9456 gimplify_and_add (unlock, &tbody);
9457 gimple_bind_set_body (bind, tbody);
9458
9459 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9460
9461 pop_gimplify_context (bind);
9462 gimple_bind_append_vars (bind, ctx->block_vars);
9463 BLOCK_VARS (block) = gimple_bind_vars (bind);
9464 }
9465
9466 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9467 for a lastprivate clause. Given a loop control predicate of (V
9468 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9469 is appended to *DLIST, iterator initialization is appended to
9470 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9471 to be emitted in a critical section. */
9472
9473 static void
9474 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9475 gimple_seq *dlist, gimple_seq *clist,
9476 struct omp_context *ctx)
9477 {
9478 tree clauses, cond, vinit;
9479 enum tree_code cond_code;
9480 gimple_seq stmts;
9481
9482 cond_code = fd->loop.cond_code;
9483 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9484
9485 /* When possible, use a strict equality expression. This can let VRP
9486 type optimizations deduce the value and remove a copy. */
9487 if (tree_fits_shwi_p (fd->loop.step))
9488 {
9489 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9490 if (step == 1 || step == -1)
9491 cond_code = EQ_EXPR;
9492 }
9493
9494 tree n2 = fd->loop.n2;
9495 if (fd->collapse > 1
9496 && TREE_CODE (n2) != INTEGER_CST
9497 && gimple_omp_for_combined_into_p (fd->for_stmt))
9498 {
9499 struct omp_context *taskreg_ctx = NULL;
9500 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9501 {
9502 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9503 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9504 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9505 {
9506 if (gimple_omp_for_combined_into_p (gfor))
9507 {
9508 gcc_assert (ctx->outer->outer
9509 && is_parallel_ctx (ctx->outer->outer));
9510 taskreg_ctx = ctx->outer->outer;
9511 }
9512 else
9513 {
9514 struct omp_for_data outer_fd;
9515 omp_extract_for_data (gfor, &outer_fd, NULL);
9516 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9517 }
9518 }
9519 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9520 taskreg_ctx = ctx->outer->outer;
9521 }
9522 else if (is_taskreg_ctx (ctx->outer))
9523 taskreg_ctx = ctx->outer;
9524 if (taskreg_ctx)
9525 {
9526 int i;
9527 tree taskreg_clauses
9528 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9529 tree innerc = omp_find_clause (taskreg_clauses,
9530 OMP_CLAUSE__LOOPTEMP_);
9531 gcc_assert (innerc);
9532 for (i = 0; i < fd->collapse; i++)
9533 {
9534 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9535 OMP_CLAUSE__LOOPTEMP_);
9536 gcc_assert (innerc);
9537 }
9538 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9539 OMP_CLAUSE__LOOPTEMP_);
9540 if (innerc)
9541 n2 = fold_convert (TREE_TYPE (n2),
9542 lookup_decl (OMP_CLAUSE_DECL (innerc),
9543 taskreg_ctx));
9544 }
9545 }
9546 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9547
9548 clauses = gimple_omp_for_clauses (fd->for_stmt);
9549 stmts = NULL;
9550 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9551 if (!gimple_seq_empty_p (stmts))
9552 {
9553 gimple_seq_add_seq (&stmts, *dlist);
9554 *dlist = stmts;
9555
9556 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9557 vinit = fd->loop.n1;
9558 if (cond_code == EQ_EXPR
9559 && tree_fits_shwi_p (fd->loop.n2)
9560 && ! integer_zerop (fd->loop.n2))
9561 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9562 else
9563 vinit = unshare_expr (vinit);
9564
9565 /* Initialize the iterator variable, so that threads that don't execute
9566 any iterations don't execute the lastprivate clauses by accident. */
9567 gimplify_assign (fd->loop.v, vinit, body_p);
9568 }
9569 }
9570
9571 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9572
9573 static tree
9574 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9575 struct walk_stmt_info *wi)
9576 {
9577 gimple *stmt = gsi_stmt (*gsi_p);
9578
9579 *handled_ops_p = true;
9580 switch (gimple_code (stmt))
9581 {
9582 WALK_SUBSTMTS;
9583
9584 case GIMPLE_OMP_FOR:
9585 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9586 && gimple_omp_for_combined_into_p (stmt))
9587 *handled_ops_p = false;
9588 break;
9589
9590 case GIMPLE_OMP_SCAN:
9591 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9592 return integer_zero_node;
9593 default:
9594 break;
9595 }
9596 return NULL;
9597 }
9598
9599 /* Helper function for lower_omp_for, add transformations for a worksharing
9600 loop with scan directives inside of it.
9601 For worksharing loop not combined with simd, transform:
9602 #pragma omp for reduction(inscan,+:r) private(i)
9603 for (i = 0; i < n; i = i + 1)
9604 {
9605 {
9606 update (r);
9607 }
9608 #pragma omp scan inclusive(r)
9609 {
9610 use (r);
9611 }
9612 }
9613
9614 into two worksharing loops + code to merge results:
9615
9616 num_threads = omp_get_num_threads ();
9617 thread_num = omp_get_thread_num ();
9618 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9619 <D.2099>:
9620 var2 = r;
9621 goto <D.2101>;
9622 <D.2100>:
9623 // For UDRs this is UDR init, or if ctors are needed, copy from
9624 // var3 that has been constructed to contain the neutral element.
9625 var2 = 0;
9626 <D.2101>:
9627 ivar = 0;
9628 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9629 // a shared array with num_threads elements and rprivb to a local array
9630 // number of elements equal to the number of (contiguous) iterations the
9631 // current thread will perform. controlb and controlp variables are
9632 // temporaries to handle deallocation of rprivb at the end of second
9633 // GOMP_FOR.
9634 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9635 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9636 for (i = 0; i < n; i = i + 1)
9637 {
9638 {
9639 // For UDRs this is UDR init or copy from var3.
9640 r = 0;
9641 // This is the input phase from user code.
9642 update (r);
9643 }
9644 {
9645 // For UDRs this is UDR merge.
9646 var2 = var2 + r;
9647 // Rather than handing it over to the user, save to local thread's
9648 // array.
9649 rprivb[ivar] = var2;
9650 // For exclusive scan, the above two statements are swapped.
9651 ivar = ivar + 1;
9652 }
9653 }
9654 // And remember the final value from this thread's into the shared
9655 // rpriva array.
9656 rpriva[(sizetype) thread_num] = var2;
9657 // If more than one thread, compute using Work-Efficient prefix sum
9658 // the inclusive parallel scan of the rpriva array.
9659 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9660 <D.2102>:
9661 GOMP_barrier ();
9662 down = 0;
9663 k = 1;
9664 num_threadsu = (unsigned int) num_threads;
9665 thread_numup1 = (unsigned int) thread_num + 1;
9666 <D.2108>:
9667 twok = k << 1;
9668 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9669 <D.2110>:
9670 down = 4294967295;
9671 k = k >> 1;
9672 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9673 <D.2112>:
9674 k = k >> 1;
9675 <D.2111>:
9676 twok = k << 1;
9677 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9678 mul = REALPART_EXPR <cplx>;
9679 ovf = IMAGPART_EXPR <cplx>;
9680 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9681 <D.2116>:
9682 andv = k & down;
9683 andvm1 = andv + 4294967295;
9684 l = mul + andvm1;
9685 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9686 <D.2120>:
9687 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9688 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9689 rpriva[l] = rpriva[l - k] + rpriva[l];
9690 <D.2117>:
9691 if (down == 0) goto <D.2121>; else goto <D.2122>;
9692 <D.2121>:
9693 k = k << 1;
9694 goto <D.2123>;
9695 <D.2122>:
9696 k = k >> 1;
9697 <D.2123>:
9698 GOMP_barrier ();
9699 if (k != 0) goto <D.2108>; else goto <D.2103>;
9700 <D.2103>:
9701 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9702 <D.2124>:
9703 // For UDRs this is UDR init or copy from var3.
9704 var2 = 0;
9705 goto <D.2126>;
9706 <D.2125>:
9707 var2 = rpriva[thread_num - 1];
9708 <D.2126>:
9709 ivar = 0;
9710 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9711 reduction(inscan,+:r) private(i)
9712 for (i = 0; i < n; i = i + 1)
9713 {
9714 {
9715 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9716 r = var2 + rprivb[ivar];
9717 }
9718 {
9719 // This is the scan phase from user code.
9720 use (r);
9721 // Plus a bump of the iterator.
9722 ivar = ivar + 1;
9723 }
9724 } */
9725
9726 static void
9727 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9728 struct omp_for_data *fd, omp_context *ctx)
9729 {
9730 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9731 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9732
9733 gimple_seq body = gimple_omp_body (stmt);
9734 gimple_stmt_iterator input1_gsi = gsi_none ();
9735 struct walk_stmt_info wi;
9736 memset (&wi, 0, sizeof (wi));
9737 wi.val_only = true;
9738 wi.info = (void *) &input1_gsi;
9739 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9740 gcc_assert (!gsi_end_p (input1_gsi));
9741
9742 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9743 gimple_stmt_iterator gsi = input1_gsi;
9744 gsi_next (&gsi);
9745 gimple_stmt_iterator scan1_gsi = gsi;
9746 gimple *scan_stmt1 = gsi_stmt (gsi);
9747 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9748
9749 gimple_seq input_body = gimple_omp_body (input_stmt1);
9750 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9751 gimple_omp_set_body (input_stmt1, NULL);
9752 gimple_omp_set_body (scan_stmt1, NULL);
9753 gimple_omp_set_body (stmt, NULL);
9754
9755 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9756 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9757 gimple_omp_set_body (stmt, body);
9758 gimple_omp_set_body (input_stmt1, input_body);
9759
9760 gimple_stmt_iterator input2_gsi = gsi_none ();
9761 memset (&wi, 0, sizeof (wi));
9762 wi.val_only = true;
9763 wi.info = (void *) &input2_gsi;
9764 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9765 gcc_assert (!gsi_end_p (input2_gsi));
9766
9767 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9768 gsi = input2_gsi;
9769 gsi_next (&gsi);
9770 gimple_stmt_iterator scan2_gsi = gsi;
9771 gimple *scan_stmt2 = gsi_stmt (gsi);
9772 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9773 gimple_omp_set_body (scan_stmt2, scan_body);
9774
9775 gimple_stmt_iterator input3_gsi = gsi_none ();
9776 gimple_stmt_iterator scan3_gsi = gsi_none ();
9777 gimple_stmt_iterator input4_gsi = gsi_none ();
9778 gimple_stmt_iterator scan4_gsi = gsi_none ();
9779 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9780 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9781 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9782 if (is_for_simd)
9783 {
9784 memset (&wi, 0, sizeof (wi));
9785 wi.val_only = true;
9786 wi.info = (void *) &input3_gsi;
9787 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9788 gcc_assert (!gsi_end_p (input3_gsi));
9789
9790 input_stmt3 = gsi_stmt (input3_gsi);
9791 gsi = input3_gsi;
9792 gsi_next (&gsi);
9793 scan3_gsi = gsi;
9794 scan_stmt3 = gsi_stmt (gsi);
9795 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9796
9797 memset (&wi, 0, sizeof (wi));
9798 wi.val_only = true;
9799 wi.info = (void *) &input4_gsi;
9800 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9801 gcc_assert (!gsi_end_p (input4_gsi));
9802
9803 input_stmt4 = gsi_stmt (input4_gsi);
9804 gsi = input4_gsi;
9805 gsi_next (&gsi);
9806 scan4_gsi = gsi;
9807 scan_stmt4 = gsi_stmt (gsi);
9808 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9809
9810 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9811 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9812 }
9813
9814 tree num_threads = create_tmp_var (integer_type_node);
9815 tree thread_num = create_tmp_var (integer_type_node);
9816 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9817 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9818 gimple *g = gimple_build_call (nthreads_decl, 0);
9819 gimple_call_set_lhs (g, num_threads);
9820 gimple_seq_add_stmt (body_p, g);
9821 g = gimple_build_call (threadnum_decl, 0);
9822 gimple_call_set_lhs (g, thread_num);
9823 gimple_seq_add_stmt (body_p, g);
9824
9825 tree ivar = create_tmp_var (sizetype);
9826 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9827 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9828 tree k = create_tmp_var (unsigned_type_node);
9829 tree l = create_tmp_var (unsigned_type_node);
9830
9831 gimple_seq clist = NULL, mdlist = NULL;
9832 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9833 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9834 gimple_seq scan1_list = NULL, input2_list = NULL;
9835 gimple_seq last_list = NULL, reduc_list = NULL;
9836 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9838 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9839 {
9840 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9841 tree var = OMP_CLAUSE_DECL (c);
9842 tree new_var = lookup_decl (var, ctx);
9843 tree var3 = NULL_TREE;
9844 tree new_vard = new_var;
9845 if (omp_is_reference (var))
9846 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9848 {
9849 var3 = maybe_lookup_decl (new_vard, ctx);
9850 if (var3 == new_vard)
9851 var3 = NULL_TREE;
9852 }
9853
9854 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9855 tree rpriva = create_tmp_var (ptype);
9856 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9857 OMP_CLAUSE_DECL (nc) = rpriva;
9858 *cp1 = nc;
9859 cp1 = &OMP_CLAUSE_CHAIN (nc);
9860
9861 tree rprivb = create_tmp_var (ptype);
9862 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9863 OMP_CLAUSE_DECL (nc) = rprivb;
9864 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9865 *cp1 = nc;
9866 cp1 = &OMP_CLAUSE_CHAIN (nc);
9867
9868 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9869 if (new_vard != new_var)
9870 TREE_ADDRESSABLE (var2) = 1;
9871 gimple_add_tmp_var (var2);
9872
9873 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9874 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9875 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9876 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9877 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9878
9879 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9880 thread_num, integer_minus_one_node);
9881 x = fold_convert_loc (clause_loc, sizetype, x);
9882 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9883 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9884 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9885 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9886
9887 x = fold_convert_loc (clause_loc, sizetype, l);
9888 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9889 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9890 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9891 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9892
9893 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9894 x = fold_convert_loc (clause_loc, sizetype, x);
9895 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9896 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9897 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9898 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9899
9900 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9901 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9902 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9903 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9904
9905 tree var4 = is_for_simd ? new_var : var2;
9906 tree var5 = NULL_TREE, var6 = NULL_TREE;
9907 if (is_for_simd)
9908 {
9909 var5 = lookup_decl (var, input_simd_ctx);
9910 var6 = lookup_decl (var, scan_simd_ctx);
9911 if (new_vard != new_var)
9912 {
9913 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9914 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9915 }
9916 }
9917 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9918 {
9919 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9920 tree val = var2;
9921
9922 x = lang_hooks.decls.omp_clause_default_ctor
9923 (c, var2, build_outer_var_ref (var, ctx));
9924 if (x)
9925 gimplify_and_add (x, &clist);
9926
9927 x = build_outer_var_ref (var, ctx);
9928 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9929 x);
9930 gimplify_and_add (x, &thr01_list);
9931
9932 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9933 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9934 if (var3)
9935 {
9936 x = unshare_expr (var4);
9937 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9938 gimplify_and_add (x, &thrn1_list);
9939 x = unshare_expr (var4);
9940 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9941 gimplify_and_add (x, &thr02_list);
9942 }
9943 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9944 {
9945 /* Otherwise, assign to it the identity element. */
9946 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9947 tseq = copy_gimple_seq_and_replace_locals (tseq);
9948 if (!is_for_simd)
9949 {
9950 if (new_vard != new_var)
9951 val = build_fold_addr_expr_loc (clause_loc, val);
9952 SET_DECL_VALUE_EXPR (new_vard, val);
9953 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9954 }
9955 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9956 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9957 lower_omp (&tseq, ctx);
9958 gimple_seq_add_seq (&thrn1_list, tseq);
9959 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9960 lower_omp (&tseq, ctx);
9961 gimple_seq_add_seq (&thr02_list, tseq);
9962 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9963 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9964 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9965 if (y)
9966 SET_DECL_VALUE_EXPR (new_vard, y);
9967 else
9968 {
9969 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9970 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9971 }
9972 }
9973
9974 x = unshare_expr (var4);
9975 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9976 gimplify_and_add (x, &thrn2_list);
9977
9978 if (is_for_simd)
9979 {
9980 x = unshare_expr (rprivb_ref);
9981 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9982 gimplify_and_add (x, &scan1_list);
9983 }
9984 else
9985 {
9986 if (ctx->scan_exclusive)
9987 {
9988 x = unshare_expr (rprivb_ref);
9989 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9990 gimplify_and_add (x, &scan1_list);
9991 }
9992
9993 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9994 tseq = copy_gimple_seq_and_replace_locals (tseq);
9995 SET_DECL_VALUE_EXPR (placeholder, var2);
9996 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9997 lower_omp (&tseq, ctx);
9998 gimple_seq_add_seq (&scan1_list, tseq);
9999
10000 if (ctx->scan_inclusive)
10001 {
10002 x = unshare_expr (rprivb_ref);
10003 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10004 gimplify_and_add (x, &scan1_list);
10005 }
10006 }
10007
10008 x = unshare_expr (rpriva_ref);
10009 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10010 unshare_expr (var4));
10011 gimplify_and_add (x, &mdlist);
10012
10013 x = unshare_expr (is_for_simd ? var6 : new_var);
10014 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10015 gimplify_and_add (x, &input2_list);
10016
10017 val = rprivb_ref;
10018 if (new_vard != new_var)
10019 val = build_fold_addr_expr_loc (clause_loc, val);
10020
10021 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10022 tseq = copy_gimple_seq_and_replace_locals (tseq);
10023 SET_DECL_VALUE_EXPR (new_vard, val);
10024 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10025 if (is_for_simd)
10026 {
10027 SET_DECL_VALUE_EXPR (placeholder, var6);
10028 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10029 }
10030 else
10031 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10032 lower_omp (&tseq, ctx);
10033 if (y)
10034 SET_DECL_VALUE_EXPR (new_vard, y);
10035 else
10036 {
10037 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10038 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10039 }
10040 if (!is_for_simd)
10041 {
10042 SET_DECL_VALUE_EXPR (placeholder, new_var);
10043 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10044 lower_omp (&tseq, ctx);
10045 }
10046 gimple_seq_add_seq (&input2_list, tseq);
10047
10048 x = build_outer_var_ref (var, ctx);
10049 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10050 gimplify_and_add (x, &last_list);
10051
10052 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10053 gimplify_and_add (x, &reduc_list);
10054 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10055 tseq = copy_gimple_seq_and_replace_locals (tseq);
10056 val = rprival_ref;
10057 if (new_vard != new_var)
10058 val = build_fold_addr_expr_loc (clause_loc, val);
10059 SET_DECL_VALUE_EXPR (new_vard, val);
10060 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10061 SET_DECL_VALUE_EXPR (placeholder, var2);
10062 lower_omp (&tseq, ctx);
10063 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10064 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10065 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10066 if (y)
10067 SET_DECL_VALUE_EXPR (new_vard, y);
10068 else
10069 {
10070 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10071 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10072 }
10073 gimple_seq_add_seq (&reduc_list, tseq);
10074 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10075 gimplify_and_add (x, &reduc_list);
10076
10077 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10078 if (x)
10079 gimplify_and_add (x, dlist);
10080 }
10081 else
10082 {
10083 x = build_outer_var_ref (var, ctx);
10084 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10085
10086 x = omp_reduction_init (c, TREE_TYPE (new_var));
10087 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10088 &thrn1_list);
10089 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10090
10091 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10092
10093 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10094 if (code == MINUS_EXPR)
10095 code = PLUS_EXPR;
10096
10097 if (is_for_simd)
10098 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10099 else
10100 {
10101 if (ctx->scan_exclusive)
10102 gimplify_assign (unshare_expr (rprivb_ref), var2,
10103 &scan1_list);
10104 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10105 gimplify_assign (var2, x, &scan1_list);
10106 if (ctx->scan_inclusive)
10107 gimplify_assign (unshare_expr (rprivb_ref), var2,
10108 &scan1_list);
10109 }
10110
10111 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10112 &mdlist);
10113
10114 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10115 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10116
10117 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10118 &last_list);
10119
10120 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10121 unshare_expr (rprival_ref));
10122 gimplify_assign (rprival_ref, x, &reduc_list);
10123 }
10124 }
10125
10126 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10127 gimple_seq_add_stmt (&scan1_list, g);
10128 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10129 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10130 ? scan_stmt4 : scan_stmt2), g);
10131
10132 tree controlb = create_tmp_var (boolean_type_node);
10133 tree controlp = create_tmp_var (ptr_type_node);
10134 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10135 OMP_CLAUSE_DECL (nc) = controlb;
10136 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10137 *cp1 = nc;
10138 cp1 = &OMP_CLAUSE_CHAIN (nc);
10139 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10140 OMP_CLAUSE_DECL (nc) = controlp;
10141 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10142 *cp1 = nc;
10143 cp1 = &OMP_CLAUSE_CHAIN (nc);
10144 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10145 OMP_CLAUSE_DECL (nc) = controlb;
10146 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10147 *cp2 = nc;
10148 cp2 = &OMP_CLAUSE_CHAIN (nc);
10149 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10150 OMP_CLAUSE_DECL (nc) = controlp;
10151 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10152 *cp2 = nc;
10153 cp2 = &OMP_CLAUSE_CHAIN (nc);
10154
10155 *cp1 = gimple_omp_for_clauses (stmt);
10156 gimple_omp_for_set_clauses (stmt, new_clauses1);
10157 *cp2 = gimple_omp_for_clauses (new_stmt);
10158 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10159
10160 if (is_for_simd)
10161 {
10162 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10163 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10164
10165 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10166 GSI_SAME_STMT);
10167 gsi_remove (&input3_gsi, true);
10168 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10169 GSI_SAME_STMT);
10170 gsi_remove (&scan3_gsi, true);
10171 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10172 GSI_SAME_STMT);
10173 gsi_remove (&input4_gsi, true);
10174 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10175 GSI_SAME_STMT);
10176 gsi_remove (&scan4_gsi, true);
10177 }
10178 else
10179 {
10180 gimple_omp_set_body (scan_stmt1, scan1_list);
10181 gimple_omp_set_body (input_stmt2, input2_list);
10182 }
10183
10184 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10185 GSI_SAME_STMT);
10186 gsi_remove (&input1_gsi, true);
10187 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10188 GSI_SAME_STMT);
10189 gsi_remove (&scan1_gsi, true);
10190 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10191 GSI_SAME_STMT);
10192 gsi_remove (&input2_gsi, true);
10193 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10194 GSI_SAME_STMT);
10195 gsi_remove (&scan2_gsi, true);
10196
10197 gimple_seq_add_seq (body_p, clist);
10198
10199 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10200 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10201 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10202 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10203 gimple_seq_add_stmt (body_p, g);
10204 g = gimple_build_label (lab1);
10205 gimple_seq_add_stmt (body_p, g);
10206 gimple_seq_add_seq (body_p, thr01_list);
10207 g = gimple_build_goto (lab3);
10208 gimple_seq_add_stmt (body_p, g);
10209 g = gimple_build_label (lab2);
10210 gimple_seq_add_stmt (body_p, g);
10211 gimple_seq_add_seq (body_p, thrn1_list);
10212 g = gimple_build_label (lab3);
10213 gimple_seq_add_stmt (body_p, g);
10214
10215 g = gimple_build_assign (ivar, size_zero_node);
10216 gimple_seq_add_stmt (body_p, g);
10217
10218 gimple_seq_add_stmt (body_p, stmt);
10219 gimple_seq_add_seq (body_p, body);
10220 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10221 fd->loop.v));
10222
10223 g = gimple_build_omp_return (true);
10224 gimple_seq_add_stmt (body_p, g);
10225 gimple_seq_add_seq (body_p, mdlist);
10226
10227 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10228 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10229 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10230 gimple_seq_add_stmt (body_p, g);
10231 g = gimple_build_label (lab1);
10232 gimple_seq_add_stmt (body_p, g);
10233
10234 g = omp_build_barrier (NULL);
10235 gimple_seq_add_stmt (body_p, g);
10236
10237 tree down = create_tmp_var (unsigned_type_node);
10238 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10239 gimple_seq_add_stmt (body_p, g);
10240
10241 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10242 gimple_seq_add_stmt (body_p, g);
10243
10244 tree num_threadsu = create_tmp_var (unsigned_type_node);
10245 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10246 gimple_seq_add_stmt (body_p, g);
10247
10248 tree thread_numu = create_tmp_var (unsigned_type_node);
10249 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10250 gimple_seq_add_stmt (body_p, g);
10251
10252 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10253 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10254 build_int_cst (unsigned_type_node, 1));
10255 gimple_seq_add_stmt (body_p, g);
10256
10257 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10258 g = gimple_build_label (lab3);
10259 gimple_seq_add_stmt (body_p, g);
10260
10261 tree twok = create_tmp_var (unsigned_type_node);
10262 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10263 gimple_seq_add_stmt (body_p, g);
10264
10265 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10266 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10267 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10268 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10269 gimple_seq_add_stmt (body_p, g);
10270 g = gimple_build_label (lab4);
10271 gimple_seq_add_stmt (body_p, g);
10272 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10273 gimple_seq_add_stmt (body_p, g);
10274 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10275 gimple_seq_add_stmt (body_p, g);
10276
10277 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10278 gimple_seq_add_stmt (body_p, g);
10279 g = gimple_build_label (lab6);
10280 gimple_seq_add_stmt (body_p, g);
10281
10282 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10283 gimple_seq_add_stmt (body_p, g);
10284
10285 g = gimple_build_label (lab5);
10286 gimple_seq_add_stmt (body_p, g);
10287
10288 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10289 gimple_seq_add_stmt (body_p, g);
10290
10291 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10292 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10293 gimple_call_set_lhs (g, cplx);
10294 gimple_seq_add_stmt (body_p, g);
10295 tree mul = create_tmp_var (unsigned_type_node);
10296 g = gimple_build_assign (mul, REALPART_EXPR,
10297 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10298 gimple_seq_add_stmt (body_p, g);
10299 tree ovf = create_tmp_var (unsigned_type_node);
10300 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10301 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10302 gimple_seq_add_stmt (body_p, g);
10303
10304 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10305 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10306 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10307 lab7, lab8);
10308 gimple_seq_add_stmt (body_p, g);
10309 g = gimple_build_label (lab7);
10310 gimple_seq_add_stmt (body_p, g);
10311
10312 tree andv = create_tmp_var (unsigned_type_node);
10313 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10314 gimple_seq_add_stmt (body_p, g);
10315 tree andvm1 = create_tmp_var (unsigned_type_node);
10316 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10317 build_minus_one_cst (unsigned_type_node));
10318 gimple_seq_add_stmt (body_p, g);
10319
10320 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10321 gimple_seq_add_stmt (body_p, g);
10322
10323 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10324 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10325 gimple_seq_add_stmt (body_p, g);
10326 g = gimple_build_label (lab9);
10327 gimple_seq_add_stmt (body_p, g);
10328 gimple_seq_add_seq (body_p, reduc_list);
10329 g = gimple_build_label (lab8);
10330 gimple_seq_add_stmt (body_p, g);
10331
10332 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10333 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10334 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10335 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10336 lab10, lab11);
10337 gimple_seq_add_stmt (body_p, g);
10338 g = gimple_build_label (lab10);
10339 gimple_seq_add_stmt (body_p, g);
10340 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10341 gimple_seq_add_stmt (body_p, g);
10342 g = gimple_build_goto (lab12);
10343 gimple_seq_add_stmt (body_p, g);
10344 g = gimple_build_label (lab11);
10345 gimple_seq_add_stmt (body_p, g);
10346 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10347 gimple_seq_add_stmt (body_p, g);
10348 g = gimple_build_label (lab12);
10349 gimple_seq_add_stmt (body_p, g);
10350
10351 g = omp_build_barrier (NULL);
10352 gimple_seq_add_stmt (body_p, g);
10353
10354 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10355 lab3, lab2);
10356 gimple_seq_add_stmt (body_p, g);
10357
10358 g = gimple_build_label (lab2);
10359 gimple_seq_add_stmt (body_p, g);
10360
10361 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10362 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10363 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10364 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10365 gimple_seq_add_stmt (body_p, g);
10366 g = gimple_build_label (lab1);
10367 gimple_seq_add_stmt (body_p, g);
10368 gimple_seq_add_seq (body_p, thr02_list);
10369 g = gimple_build_goto (lab3);
10370 gimple_seq_add_stmt (body_p, g);
10371 g = gimple_build_label (lab2);
10372 gimple_seq_add_stmt (body_p, g);
10373 gimple_seq_add_seq (body_p, thrn2_list);
10374 g = gimple_build_label (lab3);
10375 gimple_seq_add_stmt (body_p, g);
10376
10377 g = gimple_build_assign (ivar, size_zero_node);
10378 gimple_seq_add_stmt (body_p, g);
10379 gimple_seq_add_stmt (body_p, new_stmt);
10380 gimple_seq_add_seq (body_p, new_body);
10381
10382 gimple_seq new_dlist = NULL;
10383 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10384 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10385 tree num_threadsm1 = create_tmp_var (integer_type_node);
10386 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10387 integer_minus_one_node);
10388 gimple_seq_add_stmt (&new_dlist, g);
10389 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10390 gimple_seq_add_stmt (&new_dlist, g);
10391 g = gimple_build_label (lab1);
10392 gimple_seq_add_stmt (&new_dlist, g);
10393 gimple_seq_add_seq (&new_dlist, last_list);
10394 g = gimple_build_label (lab2);
10395 gimple_seq_add_stmt (&new_dlist, g);
10396 gimple_seq_add_seq (&new_dlist, *dlist);
10397 *dlist = new_dlist;
10398 }
10399
10400 /* Lower code for an OMP loop directive. */
10401
10402 static void
10403 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10404 {
10405 tree *rhs_p, block;
10406 struct omp_for_data fd, *fdp = NULL;
10407 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10408 gbind *new_stmt;
10409 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10410 gimple_seq cnt_list = NULL, clist = NULL;
10411 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10412 size_t i;
10413
10414 push_gimplify_context ();
10415
10416 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10417
10418 block = make_node (BLOCK);
10419 new_stmt = gimple_build_bind (NULL, NULL, block);
10420 /* Replace at gsi right away, so that 'stmt' is no member
10421 of a sequence anymore as we're going to add to a different
10422 one below. */
10423 gsi_replace (gsi_p, new_stmt, true);
10424
10425 /* Move declaration of temporaries in the loop body before we make
10426 it go away. */
10427 omp_for_body = gimple_omp_body (stmt);
10428 if (!gimple_seq_empty_p (omp_for_body)
10429 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10430 {
10431 gbind *inner_bind
10432 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10433 tree vars = gimple_bind_vars (inner_bind);
10434 gimple_bind_append_vars (new_stmt, vars);
10435 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10436 keep them on the inner_bind and it's block. */
10437 gimple_bind_set_vars (inner_bind, NULL_TREE);
10438 if (gimple_bind_block (inner_bind))
10439 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10440 }
10441
10442 if (gimple_omp_for_combined_into_p (stmt))
10443 {
10444 omp_extract_for_data (stmt, &fd, NULL);
10445 fdp = &fd;
10446
10447 /* We need two temporaries with fd.loop.v type (istart/iend)
10448 and then (fd.collapse - 1) temporaries with the same
10449 type for count2 ... countN-1 vars if not constant. */
10450 size_t count = 2;
10451 tree type = fd.iter_type;
10452 if (fd.collapse > 1
10453 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10454 count += fd.collapse - 1;
10455 bool taskreg_for
10456 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10457 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10458 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10459 tree simtc = NULL;
10460 tree clauses = *pc;
10461 if (taskreg_for)
10462 outerc
10463 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10464 OMP_CLAUSE__LOOPTEMP_);
10465 if (ctx->simt_stmt)
10466 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10467 OMP_CLAUSE__LOOPTEMP_);
10468 for (i = 0; i < count; i++)
10469 {
10470 tree temp;
10471 if (taskreg_for)
10472 {
10473 gcc_assert (outerc);
10474 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10475 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10476 OMP_CLAUSE__LOOPTEMP_);
10477 }
10478 else
10479 {
10480 /* If there are 2 adjacent SIMD stmts, one with _simt_
10481 clause, another without, make sure they have the same
10482 decls in _looptemp_ clauses, because the outer stmt
10483 they are combined into will look up just one inner_stmt. */
10484 if (ctx->simt_stmt)
10485 temp = OMP_CLAUSE_DECL (simtc);
10486 else
10487 temp = create_tmp_var (type);
10488 insert_decl_map (&ctx->outer->cb, temp, temp);
10489 }
10490 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10491 OMP_CLAUSE_DECL (*pc) = temp;
10492 pc = &OMP_CLAUSE_CHAIN (*pc);
10493 if (ctx->simt_stmt)
10494 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10495 OMP_CLAUSE__LOOPTEMP_);
10496 }
10497 *pc = clauses;
10498 }
10499
10500 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10501 dlist = NULL;
10502 body = NULL;
10503 tree rclauses
10504 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10505 OMP_CLAUSE_REDUCTION);
10506 tree rtmp = NULL_TREE;
10507 if (rclauses)
10508 {
10509 tree type = build_pointer_type (pointer_sized_int_node);
10510 tree temp = create_tmp_var (type);
10511 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10512 OMP_CLAUSE_DECL (c) = temp;
10513 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10514 gimple_omp_for_set_clauses (stmt, c);
10515 lower_omp_task_reductions (ctx, OMP_FOR,
10516 gimple_omp_for_clauses (stmt),
10517 &tred_ilist, &tred_dlist);
10518 rclauses = c;
10519 rtmp = make_ssa_name (type);
10520 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10521 }
10522
10523 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10524 ctx);
10525
10526 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10527 fdp);
10528 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10529 gimple_omp_for_pre_body (stmt));
10530
10531 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10532
10533 /* Lower the header expressions. At this point, we can assume that
10534 the header is of the form:
10535
10536 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10537
10538 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10539 using the .omp_data_s mapping, if needed. */
10540 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10541 {
10542 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10543 if (TREE_CODE (*rhs_p) == TREE_VEC)
10544 {
10545 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10546 TREE_VEC_ELT (*rhs_p, 1)
10547 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10548 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10549 TREE_VEC_ELT (*rhs_p, 2)
10550 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10551 }
10552 else if (!is_gimple_min_invariant (*rhs_p))
10553 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10554 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10555 recompute_tree_invariant_for_addr_expr (*rhs_p);
10556
10557 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10558 if (TREE_CODE (*rhs_p) == TREE_VEC)
10559 {
10560 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10561 TREE_VEC_ELT (*rhs_p, 1)
10562 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10563 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10564 TREE_VEC_ELT (*rhs_p, 2)
10565 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10566 }
10567 else if (!is_gimple_min_invariant (*rhs_p))
10568 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10569 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10570 recompute_tree_invariant_for_addr_expr (*rhs_p);
10571
10572 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10573 if (!is_gimple_min_invariant (*rhs_p))
10574 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10575 }
10576 if (rclauses)
10577 gimple_seq_add_seq (&tred_ilist, cnt_list);
10578 else
10579 gimple_seq_add_seq (&body, cnt_list);
10580
10581 /* Once lowered, extract the bounds and clauses. */
10582 omp_extract_for_data (stmt, &fd, NULL);
10583
10584 if (is_gimple_omp_oacc (ctx->stmt)
10585 && !ctx_in_oacc_kernels_region (ctx))
10586 lower_oacc_head_tail (gimple_location (stmt),
10587 gimple_omp_for_clauses (stmt),
10588 &oacc_head, &oacc_tail, ctx);
10589
10590 /* Add OpenACC partitioning and reduction markers just before the loop. */
10591 if (oacc_head)
10592 gimple_seq_add_seq (&body, oacc_head);
10593
10594 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10595
10596 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10597 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10599 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10600 {
10601 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10602 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10603 OMP_CLAUSE_LINEAR_STEP (c)
10604 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10605 ctx);
10606 }
10607
10608 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10609 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10610 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10611 else
10612 {
10613 gimple_seq_add_stmt (&body, stmt);
10614 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10615 }
10616
10617 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10618 fd.loop.v));
10619
10620 /* After the loop, add exit clauses. */
10621 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10622
10623 if (clist)
10624 {
10625 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10626 gcall *g = gimple_build_call (fndecl, 0);
10627 gimple_seq_add_stmt (&body, g);
10628 gimple_seq_add_seq (&body, clist);
10629 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10630 g = gimple_build_call (fndecl, 0);
10631 gimple_seq_add_stmt (&body, g);
10632 }
10633
10634 if (ctx->cancellable)
10635 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10636
10637 gimple_seq_add_seq (&body, dlist);
10638
10639 if (rclauses)
10640 {
10641 gimple_seq_add_seq (&tred_ilist, body);
10642 body = tred_ilist;
10643 }
10644
10645 body = maybe_catch_exception (body);
10646
10647 /* Region exit marker goes at the end of the loop body. */
10648 gimple *g = gimple_build_omp_return (fd.have_nowait);
10649 gimple_seq_add_stmt (&body, g);
10650
10651 gimple_seq_add_seq (&body, tred_dlist);
10652
10653 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10654
10655 if (rclauses)
10656 OMP_CLAUSE_DECL (rclauses) = rtmp;
10657
10658 /* Add OpenACC joining and reduction markers just after the loop. */
10659 if (oacc_tail)
10660 gimple_seq_add_seq (&body, oacc_tail);
10661
10662 pop_gimplify_context (new_stmt);
10663
10664 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10665 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10666 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10667 if (BLOCK_VARS (block))
10668 TREE_USED (block) = 1;
10669
10670 gimple_bind_set_body (new_stmt, body);
10671 gimple_omp_set_body (stmt, NULL);
10672 gimple_omp_for_set_pre_body (stmt, NULL);
10673 }
10674
10675 /* Callback for walk_stmts. Check if the current statement only contains
10676 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10677
10678 static tree
10679 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10680 bool *handled_ops_p,
10681 struct walk_stmt_info *wi)
10682 {
10683 int *info = (int *) wi->info;
10684 gimple *stmt = gsi_stmt (*gsi_p);
10685
10686 *handled_ops_p = true;
10687 switch (gimple_code (stmt))
10688 {
10689 WALK_SUBSTMTS;
10690
10691 case GIMPLE_DEBUG:
10692 break;
10693 case GIMPLE_OMP_FOR:
10694 case GIMPLE_OMP_SECTIONS:
10695 *info = *info == 0 ? 1 : -1;
10696 break;
10697 default:
10698 *info = -1;
10699 break;
10700 }
10701 return NULL;
10702 }
10703
10704 struct omp_taskcopy_context
10705 {
10706 /* This field must be at the beginning, as we do "inheritance": Some
10707 callback functions for tree-inline.c (e.g., omp_copy_decl)
10708 receive a copy_body_data pointer that is up-casted to an
10709 omp_context pointer. */
10710 copy_body_data cb;
10711 omp_context *ctx;
10712 };
10713
10714 static tree
10715 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10716 {
10717 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10718
10719 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10720 return create_tmp_var (TREE_TYPE (var));
10721
10722 return var;
10723 }
10724
10725 static tree
10726 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10727 {
10728 tree name, new_fields = NULL, type, f;
10729
10730 type = lang_hooks.types.make_type (RECORD_TYPE);
10731 name = DECL_NAME (TYPE_NAME (orig_type));
10732 name = build_decl (gimple_location (tcctx->ctx->stmt),
10733 TYPE_DECL, name, type);
10734 TYPE_NAME (type) = name;
10735
10736 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10737 {
10738 tree new_f = copy_node (f);
10739 DECL_CONTEXT (new_f) = type;
10740 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10741 TREE_CHAIN (new_f) = new_fields;
10742 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10743 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10744 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10745 &tcctx->cb, NULL);
10746 new_fields = new_f;
10747 tcctx->cb.decl_map->put (f, new_f);
10748 }
10749 TYPE_FIELDS (type) = nreverse (new_fields);
10750 layout_type (type);
10751 return type;
10752 }
10753
10754 /* Create task copyfn. */
10755
10756 static void
10757 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10758 {
10759 struct function *child_cfun;
10760 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10761 tree record_type, srecord_type, bind, list;
10762 bool record_needs_remap = false, srecord_needs_remap = false;
10763 splay_tree_node n;
10764 struct omp_taskcopy_context tcctx;
10765 location_t loc = gimple_location (task_stmt);
10766 size_t looptempno = 0;
10767
10768 child_fn = gimple_omp_task_copy_fn (task_stmt);
10769 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10770 gcc_assert (child_cfun->cfg == NULL);
10771 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10772
10773 /* Reset DECL_CONTEXT on function arguments. */
10774 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10775 DECL_CONTEXT (t) = child_fn;
10776
10777 /* Populate the function. */
10778 push_gimplify_context ();
10779 push_cfun (child_cfun);
10780
10781 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10782 TREE_SIDE_EFFECTS (bind) = 1;
10783 list = NULL;
10784 DECL_SAVED_TREE (child_fn) = bind;
10785 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10786
10787 /* Remap src and dst argument types if needed. */
10788 record_type = ctx->record_type;
10789 srecord_type = ctx->srecord_type;
10790 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10791 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10792 {
10793 record_needs_remap = true;
10794 break;
10795 }
10796 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10797 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10798 {
10799 srecord_needs_remap = true;
10800 break;
10801 }
10802
10803 if (record_needs_remap || srecord_needs_remap)
10804 {
10805 memset (&tcctx, '\0', sizeof (tcctx));
10806 tcctx.cb.src_fn = ctx->cb.src_fn;
10807 tcctx.cb.dst_fn = child_fn;
10808 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10809 gcc_checking_assert (tcctx.cb.src_node);
10810 tcctx.cb.dst_node = tcctx.cb.src_node;
10811 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10812 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10813 tcctx.cb.eh_lp_nr = 0;
10814 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10815 tcctx.cb.decl_map = new hash_map<tree, tree>;
10816 tcctx.ctx = ctx;
10817
10818 if (record_needs_remap)
10819 record_type = task_copyfn_remap_type (&tcctx, record_type);
10820 if (srecord_needs_remap)
10821 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10822 }
10823 else
10824 tcctx.cb.decl_map = NULL;
10825
10826 arg = DECL_ARGUMENTS (child_fn);
10827 TREE_TYPE (arg) = build_pointer_type (record_type);
10828 sarg = DECL_CHAIN (arg);
10829 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10830
10831 /* First pass: initialize temporaries used in record_type and srecord_type
10832 sizes and field offsets. */
10833 if (tcctx.cb.decl_map)
10834 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10836 {
10837 tree *p;
10838
10839 decl = OMP_CLAUSE_DECL (c);
10840 p = tcctx.cb.decl_map->get (decl);
10841 if (p == NULL)
10842 continue;
10843 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10844 sf = (tree) n->value;
10845 sf = *tcctx.cb.decl_map->get (sf);
10846 src = build_simple_mem_ref_loc (loc, sarg);
10847 src = omp_build_component_ref (src, sf);
10848 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10849 append_to_statement_list (t, &list);
10850 }
10851
10852 /* Second pass: copy shared var pointers and copy construct non-VLA
10853 firstprivate vars. */
10854 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10855 switch (OMP_CLAUSE_CODE (c))
10856 {
10857 splay_tree_key key;
10858 case OMP_CLAUSE_SHARED:
10859 decl = OMP_CLAUSE_DECL (c);
10860 key = (splay_tree_key) decl;
10861 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10862 key = (splay_tree_key) &DECL_UID (decl);
10863 n = splay_tree_lookup (ctx->field_map, key);
10864 if (n == NULL)
10865 break;
10866 f = (tree) n->value;
10867 if (tcctx.cb.decl_map)
10868 f = *tcctx.cb.decl_map->get (f);
10869 n = splay_tree_lookup (ctx->sfield_map, key);
10870 sf = (tree) n->value;
10871 if (tcctx.cb.decl_map)
10872 sf = *tcctx.cb.decl_map->get (sf);
10873 src = build_simple_mem_ref_loc (loc, sarg);
10874 src = omp_build_component_ref (src, sf);
10875 dst = build_simple_mem_ref_loc (loc, arg);
10876 dst = omp_build_component_ref (dst, f);
10877 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10878 append_to_statement_list (t, &list);
10879 break;
10880 case OMP_CLAUSE_REDUCTION:
10881 case OMP_CLAUSE_IN_REDUCTION:
10882 decl = OMP_CLAUSE_DECL (c);
10883 if (TREE_CODE (decl) == MEM_REF)
10884 {
10885 decl = TREE_OPERAND (decl, 0);
10886 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10887 decl = TREE_OPERAND (decl, 0);
10888 if (TREE_CODE (decl) == INDIRECT_REF
10889 || TREE_CODE (decl) == ADDR_EXPR)
10890 decl = TREE_OPERAND (decl, 0);
10891 }
10892 key = (splay_tree_key) decl;
10893 n = splay_tree_lookup (ctx->field_map, key);
10894 if (n == NULL)
10895 break;
10896 f = (tree) n->value;
10897 if (tcctx.cb.decl_map)
10898 f = *tcctx.cb.decl_map->get (f);
10899 n = splay_tree_lookup (ctx->sfield_map, key);
10900 sf = (tree) n->value;
10901 if (tcctx.cb.decl_map)
10902 sf = *tcctx.cb.decl_map->get (sf);
10903 src = build_simple_mem_ref_loc (loc, sarg);
10904 src = omp_build_component_ref (src, sf);
10905 if (decl != OMP_CLAUSE_DECL (c)
10906 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10907 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10908 src = build_simple_mem_ref_loc (loc, src);
10909 dst = build_simple_mem_ref_loc (loc, arg);
10910 dst = omp_build_component_ref (dst, f);
10911 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10912 append_to_statement_list (t, &list);
10913 break;
10914 case OMP_CLAUSE__LOOPTEMP_:
10915 /* Fields for first two _looptemp_ clauses are initialized by
10916 GOMP_taskloop*, the rest are handled like firstprivate. */
10917 if (looptempno < 2)
10918 {
10919 looptempno++;
10920 break;
10921 }
10922 /* FALLTHRU */
10923 case OMP_CLAUSE__REDUCTEMP_:
10924 case OMP_CLAUSE_FIRSTPRIVATE:
10925 decl = OMP_CLAUSE_DECL (c);
10926 if (is_variable_sized (decl))
10927 break;
10928 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10929 if (n == NULL)
10930 break;
10931 f = (tree) n->value;
10932 if (tcctx.cb.decl_map)
10933 f = *tcctx.cb.decl_map->get (f);
10934 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10935 if (n != NULL)
10936 {
10937 sf = (tree) n->value;
10938 if (tcctx.cb.decl_map)
10939 sf = *tcctx.cb.decl_map->get (sf);
10940 src = build_simple_mem_ref_loc (loc, sarg);
10941 src = omp_build_component_ref (src, sf);
10942 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10943 src = build_simple_mem_ref_loc (loc, src);
10944 }
10945 else
10946 src = decl;
10947 dst = build_simple_mem_ref_loc (loc, arg);
10948 dst = omp_build_component_ref (dst, f);
10949 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10950 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10951 else
10952 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10953 append_to_statement_list (t, &list);
10954 break;
10955 case OMP_CLAUSE_PRIVATE:
10956 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10957 break;
10958 decl = OMP_CLAUSE_DECL (c);
10959 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10960 f = (tree) n->value;
10961 if (tcctx.cb.decl_map)
10962 f = *tcctx.cb.decl_map->get (f);
10963 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10964 if (n != NULL)
10965 {
10966 sf = (tree) n->value;
10967 if (tcctx.cb.decl_map)
10968 sf = *tcctx.cb.decl_map->get (sf);
10969 src = build_simple_mem_ref_loc (loc, sarg);
10970 src = omp_build_component_ref (src, sf);
10971 if (use_pointer_for_field (decl, NULL))
10972 src = build_simple_mem_ref_loc (loc, src);
10973 }
10974 else
10975 src = decl;
10976 dst = build_simple_mem_ref_loc (loc, arg);
10977 dst = omp_build_component_ref (dst, f);
10978 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10979 append_to_statement_list (t, &list);
10980 break;
10981 default:
10982 break;
10983 }
10984
10985 /* Last pass: handle VLA firstprivates. */
10986 if (tcctx.cb.decl_map)
10987 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10988 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10989 {
10990 tree ind, ptr, df;
10991
10992 decl = OMP_CLAUSE_DECL (c);
10993 if (!is_variable_sized (decl))
10994 continue;
10995 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10996 if (n == NULL)
10997 continue;
10998 f = (tree) n->value;
10999 f = *tcctx.cb.decl_map->get (f);
11000 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11001 ind = DECL_VALUE_EXPR (decl);
11002 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11003 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11004 n = splay_tree_lookup (ctx->sfield_map,
11005 (splay_tree_key) TREE_OPERAND (ind, 0));
11006 sf = (tree) n->value;
11007 sf = *tcctx.cb.decl_map->get (sf);
11008 src = build_simple_mem_ref_loc (loc, sarg);
11009 src = omp_build_component_ref (src, sf);
11010 src = build_simple_mem_ref_loc (loc, src);
11011 dst = build_simple_mem_ref_loc (loc, arg);
11012 dst = omp_build_component_ref (dst, f);
11013 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11014 append_to_statement_list (t, &list);
11015 n = splay_tree_lookup (ctx->field_map,
11016 (splay_tree_key) TREE_OPERAND (ind, 0));
11017 df = (tree) n->value;
11018 df = *tcctx.cb.decl_map->get (df);
11019 ptr = build_simple_mem_ref_loc (loc, arg);
11020 ptr = omp_build_component_ref (ptr, df);
11021 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11022 build_fold_addr_expr_loc (loc, dst));
11023 append_to_statement_list (t, &list);
11024 }
11025
11026 t = build1 (RETURN_EXPR, void_type_node, NULL);
11027 append_to_statement_list (t, &list);
11028
11029 if (tcctx.cb.decl_map)
11030 delete tcctx.cb.decl_map;
11031 pop_gimplify_context (NULL);
11032 BIND_EXPR_BODY (bind) = list;
11033 pop_cfun ();
11034 }
11035
11036 static void
11037 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11038 {
11039 tree c, clauses;
11040 gimple *g;
11041 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11042
11043 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11044 gcc_assert (clauses);
11045 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11046 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11047 switch (OMP_CLAUSE_DEPEND_KIND (c))
11048 {
11049 case OMP_CLAUSE_DEPEND_LAST:
11050 /* Lowering already done at gimplification. */
11051 return;
11052 case OMP_CLAUSE_DEPEND_IN:
11053 cnt[2]++;
11054 break;
11055 case OMP_CLAUSE_DEPEND_OUT:
11056 case OMP_CLAUSE_DEPEND_INOUT:
11057 cnt[0]++;
11058 break;
11059 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11060 cnt[1]++;
11061 break;
11062 case OMP_CLAUSE_DEPEND_DEPOBJ:
11063 cnt[3]++;
11064 break;
11065 case OMP_CLAUSE_DEPEND_SOURCE:
11066 case OMP_CLAUSE_DEPEND_SINK:
11067 /* FALLTHRU */
11068 default:
11069 gcc_unreachable ();
11070 }
11071 if (cnt[1] || cnt[3])
11072 idx = 5;
11073 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11074 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11075 tree array = create_tmp_var (type);
11076 TREE_ADDRESSABLE (array) = 1;
11077 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11078 NULL_TREE);
11079 if (idx == 5)
11080 {
11081 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11082 gimple_seq_add_stmt (iseq, g);
11083 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11084 NULL_TREE);
11085 }
11086 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11087 gimple_seq_add_stmt (iseq, g);
11088 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11089 {
11090 r = build4 (ARRAY_REF, ptr_type_node, array,
11091 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11092 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11093 gimple_seq_add_stmt (iseq, g);
11094 }
11095 for (i = 0; i < 4; i++)
11096 {
11097 if (cnt[i] == 0)
11098 continue;
11099 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11100 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11101 continue;
11102 else
11103 {
11104 switch (OMP_CLAUSE_DEPEND_KIND (c))
11105 {
11106 case OMP_CLAUSE_DEPEND_IN:
11107 if (i != 2)
11108 continue;
11109 break;
11110 case OMP_CLAUSE_DEPEND_OUT:
11111 case OMP_CLAUSE_DEPEND_INOUT:
11112 if (i != 0)
11113 continue;
11114 break;
11115 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11116 if (i != 1)
11117 continue;
11118 break;
11119 case OMP_CLAUSE_DEPEND_DEPOBJ:
11120 if (i != 3)
11121 continue;
11122 break;
11123 default:
11124 gcc_unreachable ();
11125 }
11126 tree t = OMP_CLAUSE_DECL (c);
11127 t = fold_convert (ptr_type_node, t);
11128 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11129 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11130 NULL_TREE, NULL_TREE);
11131 g = gimple_build_assign (r, t);
11132 gimple_seq_add_stmt (iseq, g);
11133 }
11134 }
11135 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11136 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11137 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11138 OMP_CLAUSE_CHAIN (c) = *pclauses;
11139 *pclauses = c;
11140 tree clobber = build_clobber (type);
11141 g = gimple_build_assign (array, clobber);
11142 gimple_seq_add_stmt (oseq, g);
11143 }
11144
11145 /* Lower the OpenMP parallel or task directive in the current statement
11146 in GSI_P. CTX holds context information for the directive. */
11147
11148 static void
11149 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11150 {
11151 tree clauses;
11152 tree child_fn, t;
11153 gimple *stmt = gsi_stmt (*gsi_p);
11154 gbind *par_bind, *bind, *dep_bind = NULL;
11155 gimple_seq par_body;
11156 location_t loc = gimple_location (stmt);
11157
11158 clauses = gimple_omp_taskreg_clauses (stmt);
11159 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11160 && gimple_omp_task_taskwait_p (stmt))
11161 {
11162 par_bind = NULL;
11163 par_body = NULL;
11164 }
11165 else
11166 {
11167 par_bind
11168 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11169 par_body = gimple_bind_body (par_bind);
11170 }
11171 child_fn = ctx->cb.dst_fn;
11172 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11173 && !gimple_omp_parallel_combined_p (stmt))
11174 {
11175 struct walk_stmt_info wi;
11176 int ws_num = 0;
11177
11178 memset (&wi, 0, sizeof (wi));
11179 wi.info = &ws_num;
11180 wi.val_only = true;
11181 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11182 if (ws_num == 1)
11183 gimple_omp_parallel_set_combined_p (stmt, true);
11184 }
11185 gimple_seq dep_ilist = NULL;
11186 gimple_seq dep_olist = NULL;
11187 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11188 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11189 {
11190 push_gimplify_context ();
11191 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11192 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11193 &dep_ilist, &dep_olist);
11194 }
11195
11196 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11197 && gimple_omp_task_taskwait_p (stmt))
11198 {
11199 if (dep_bind)
11200 {
11201 gsi_replace (gsi_p, dep_bind, true);
11202 gimple_bind_add_seq (dep_bind, dep_ilist);
11203 gimple_bind_add_stmt (dep_bind, stmt);
11204 gimple_bind_add_seq (dep_bind, dep_olist);
11205 pop_gimplify_context (dep_bind);
11206 }
11207 return;
11208 }
11209
11210 if (ctx->srecord_type)
11211 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11212
11213 gimple_seq tskred_ilist = NULL;
11214 gimple_seq tskred_olist = NULL;
11215 if ((is_task_ctx (ctx)
11216 && gimple_omp_task_taskloop_p (ctx->stmt)
11217 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11218 OMP_CLAUSE_REDUCTION))
11219 || (is_parallel_ctx (ctx)
11220 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11221 OMP_CLAUSE__REDUCTEMP_)))
11222 {
11223 if (dep_bind == NULL)
11224 {
11225 push_gimplify_context ();
11226 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11227 }
11228 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11229 : OMP_PARALLEL,
11230 gimple_omp_taskreg_clauses (ctx->stmt),
11231 &tskred_ilist, &tskred_olist);
11232 }
11233
11234 push_gimplify_context ();
11235
11236 gimple_seq par_olist = NULL;
11237 gimple_seq par_ilist = NULL;
11238 gimple_seq par_rlist = NULL;
11239 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11240 lower_omp (&par_body, ctx);
11241 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11242 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11243
11244 /* Declare all the variables created by mapping and the variables
11245 declared in the scope of the parallel body. */
11246 record_vars_into (ctx->block_vars, child_fn);
11247 maybe_remove_omp_member_access_dummy_vars (par_bind);
11248 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11249
11250 if (ctx->record_type)
11251 {
11252 ctx->sender_decl
11253 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11254 : ctx->record_type, ".omp_data_o");
11255 DECL_NAMELESS (ctx->sender_decl) = 1;
11256 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11257 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11258 }
11259
11260 gimple_seq olist = NULL;
11261 gimple_seq ilist = NULL;
11262 lower_send_clauses (clauses, &ilist, &olist, ctx);
11263 lower_send_shared_vars (&ilist, &olist, ctx);
11264
11265 if (ctx->record_type)
11266 {
11267 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11268 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11269 clobber));
11270 }
11271
11272 /* Once all the expansions are done, sequence all the different
11273 fragments inside gimple_omp_body. */
11274
11275 gimple_seq new_body = NULL;
11276
11277 if (ctx->record_type)
11278 {
11279 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11280 /* fixup_child_record_type might have changed receiver_decl's type. */
11281 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11282 gimple_seq_add_stmt (&new_body,
11283 gimple_build_assign (ctx->receiver_decl, t));
11284 }
11285
11286 gimple_seq_add_seq (&new_body, par_ilist);
11287 gimple_seq_add_seq (&new_body, par_body);
11288 gimple_seq_add_seq (&new_body, par_rlist);
11289 if (ctx->cancellable)
11290 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11291 gimple_seq_add_seq (&new_body, par_olist);
11292 new_body = maybe_catch_exception (new_body);
11293 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11294 gimple_seq_add_stmt (&new_body,
11295 gimple_build_omp_continue (integer_zero_node,
11296 integer_zero_node));
11297 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11298 gimple_omp_set_body (stmt, new_body);
11299
11300 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11301 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11302 else
11303 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11304 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11305 gimple_bind_add_seq (bind, ilist);
11306 gimple_bind_add_stmt (bind, stmt);
11307 gimple_bind_add_seq (bind, olist);
11308
11309 pop_gimplify_context (NULL);
11310
11311 if (dep_bind)
11312 {
11313 gimple_bind_add_seq (dep_bind, dep_ilist);
11314 gimple_bind_add_seq (dep_bind, tskred_ilist);
11315 gimple_bind_add_stmt (dep_bind, bind);
11316 gimple_bind_add_seq (dep_bind, tskred_olist);
11317 gimple_bind_add_seq (dep_bind, dep_olist);
11318 pop_gimplify_context (dep_bind);
11319 }
11320 }
11321
11322 /* Lower the GIMPLE_OMP_TARGET in the current statement
11323 in GSI_P. CTX holds context information for the directive. */
11324
11325 static void
11326 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11327 {
11328 tree clauses;
11329 tree child_fn, t, c;
11330 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11331 gbind *tgt_bind, *bind, *dep_bind = NULL;
11332 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11333 location_t loc = gimple_location (stmt);
11334 bool offloaded, data_region;
11335 unsigned int map_cnt = 0;
11336
11337 offloaded = is_gimple_omp_offloaded (stmt);
11338 switch (gimple_omp_target_kind (stmt))
11339 {
11340 case GF_OMP_TARGET_KIND_REGION:
11341 case GF_OMP_TARGET_KIND_UPDATE:
11342 case GF_OMP_TARGET_KIND_ENTER_DATA:
11343 case GF_OMP_TARGET_KIND_EXIT_DATA:
11344 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11345 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11346 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11347 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11348 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11349 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11350 data_region = false;
11351 break;
11352 case GF_OMP_TARGET_KIND_DATA:
11353 case GF_OMP_TARGET_KIND_OACC_DATA:
11354 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11355 data_region = true;
11356 break;
11357 default:
11358 gcc_unreachable ();
11359 }
11360
11361 clauses = gimple_omp_target_clauses (stmt);
11362
11363 gimple_seq dep_ilist = NULL;
11364 gimple_seq dep_olist = NULL;
11365 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11366 {
11367 push_gimplify_context ();
11368 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11369 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11370 &dep_ilist, &dep_olist);
11371 }
11372
11373 tgt_bind = NULL;
11374 tgt_body = NULL;
11375 if (offloaded)
11376 {
11377 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11378 tgt_body = gimple_bind_body (tgt_bind);
11379 }
11380 else if (data_region)
11381 tgt_body = gimple_omp_body (stmt);
11382 child_fn = ctx->cb.dst_fn;
11383
11384 push_gimplify_context ();
11385 fplist = NULL;
11386
11387 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11388 switch (OMP_CLAUSE_CODE (c))
11389 {
11390 tree var, x;
11391
11392 default:
11393 break;
11394 case OMP_CLAUSE_MAP:
11395 #if CHECKING_P
11396 /* First check what we're prepared to handle in the following. */
11397 switch (OMP_CLAUSE_MAP_KIND (c))
11398 {
11399 case GOMP_MAP_ALLOC:
11400 case GOMP_MAP_TO:
11401 case GOMP_MAP_FROM:
11402 case GOMP_MAP_TOFROM:
11403 case GOMP_MAP_POINTER:
11404 case GOMP_MAP_TO_PSET:
11405 case GOMP_MAP_DELETE:
11406 case GOMP_MAP_RELEASE:
11407 case GOMP_MAP_ALWAYS_TO:
11408 case GOMP_MAP_ALWAYS_FROM:
11409 case GOMP_MAP_ALWAYS_TOFROM:
11410 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11411 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11412 case GOMP_MAP_STRUCT:
11413 case GOMP_MAP_ALWAYS_POINTER:
11414 break;
11415 case GOMP_MAP_IF_PRESENT:
11416 case GOMP_MAP_FORCE_ALLOC:
11417 case GOMP_MAP_FORCE_TO:
11418 case GOMP_MAP_FORCE_FROM:
11419 case GOMP_MAP_FORCE_TOFROM:
11420 case GOMP_MAP_FORCE_PRESENT:
11421 case GOMP_MAP_FORCE_DEVICEPTR:
11422 case GOMP_MAP_DEVICE_RESIDENT:
11423 case GOMP_MAP_LINK:
11424 case GOMP_MAP_ATTACH:
11425 case GOMP_MAP_DETACH:
11426 case GOMP_MAP_FORCE_DETACH:
11427 gcc_assert (is_gimple_omp_oacc (stmt));
11428 break;
11429 default:
11430 gcc_unreachable ();
11431 }
11432 #endif
11433 /* FALLTHRU */
11434 case OMP_CLAUSE_TO:
11435 case OMP_CLAUSE_FROM:
11436 oacc_firstprivate:
11437 var = OMP_CLAUSE_DECL (c);
11438 if (!DECL_P (var))
11439 {
11440 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11441 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11442 && (OMP_CLAUSE_MAP_KIND (c)
11443 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11444 map_cnt++;
11445 continue;
11446 }
11447
11448 if (DECL_SIZE (var)
11449 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11450 {
11451 tree var2 = DECL_VALUE_EXPR (var);
11452 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11453 var2 = TREE_OPERAND (var2, 0);
11454 gcc_assert (DECL_P (var2));
11455 var = var2;
11456 }
11457
11458 if (offloaded
11459 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11460 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11461 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11462 {
11463 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11464 {
11465 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11466 && varpool_node::get_create (var)->offloadable)
11467 continue;
11468
11469 tree type = build_pointer_type (TREE_TYPE (var));
11470 tree new_var = lookup_decl (var, ctx);
11471 x = create_tmp_var_raw (type, get_name (new_var));
11472 gimple_add_tmp_var (x);
11473 x = build_simple_mem_ref (x);
11474 SET_DECL_VALUE_EXPR (new_var, x);
11475 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11476 }
11477 continue;
11478 }
11479
11480 if (!maybe_lookup_field (var, ctx))
11481 continue;
11482
11483 /* Don't remap compute constructs' reduction variables, because the
11484 intermediate result must be local to each gang. */
11485 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11486 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11487 {
11488 x = build_receiver_ref (var, true, ctx);
11489 tree new_var = lookup_decl (var, ctx);
11490
11491 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11492 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11493 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11494 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11495 x = build_simple_mem_ref (x);
11496 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11497 {
11498 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11499 if (omp_is_reference (new_var)
11500 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11501 || DECL_BY_REFERENCE (var)))
11502 {
11503 /* Create a local object to hold the instance
11504 value. */
11505 tree type = TREE_TYPE (TREE_TYPE (new_var));
11506 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11507 tree inst = create_tmp_var (type, id);
11508 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11509 x = build_fold_addr_expr (inst);
11510 }
11511 gimplify_assign (new_var, x, &fplist);
11512 }
11513 else if (DECL_P (new_var))
11514 {
11515 SET_DECL_VALUE_EXPR (new_var, x);
11516 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11517 }
11518 else
11519 gcc_unreachable ();
11520 }
11521 map_cnt++;
11522 break;
11523
11524 case OMP_CLAUSE_FIRSTPRIVATE:
11525 if (is_oacc_parallel_or_serial (ctx))
11526 goto oacc_firstprivate;
11527 map_cnt++;
11528 var = OMP_CLAUSE_DECL (c);
11529 if (!omp_is_reference (var)
11530 && !is_gimple_reg_type (TREE_TYPE (var)))
11531 {
11532 tree new_var = lookup_decl (var, ctx);
11533 if (is_variable_sized (var))
11534 {
11535 tree pvar = DECL_VALUE_EXPR (var);
11536 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11537 pvar = TREE_OPERAND (pvar, 0);
11538 gcc_assert (DECL_P (pvar));
11539 tree new_pvar = lookup_decl (pvar, ctx);
11540 x = build_fold_indirect_ref (new_pvar);
11541 TREE_THIS_NOTRAP (x) = 1;
11542 }
11543 else
11544 x = build_receiver_ref (var, true, ctx);
11545 SET_DECL_VALUE_EXPR (new_var, x);
11546 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11547 }
11548 break;
11549
11550 case OMP_CLAUSE_PRIVATE:
11551 if (is_gimple_omp_oacc (ctx->stmt))
11552 break;
11553 var = OMP_CLAUSE_DECL (c);
11554 if (is_variable_sized (var))
11555 {
11556 tree new_var = lookup_decl (var, ctx);
11557 tree pvar = DECL_VALUE_EXPR (var);
11558 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11559 pvar = TREE_OPERAND (pvar, 0);
11560 gcc_assert (DECL_P (pvar));
11561 tree new_pvar = lookup_decl (pvar, ctx);
11562 x = build_fold_indirect_ref (new_pvar);
11563 TREE_THIS_NOTRAP (x) = 1;
11564 SET_DECL_VALUE_EXPR (new_var, x);
11565 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11566 }
11567 break;
11568
11569 case OMP_CLAUSE_USE_DEVICE_PTR:
11570 case OMP_CLAUSE_USE_DEVICE_ADDR:
11571 case OMP_CLAUSE_IS_DEVICE_PTR:
11572 var = OMP_CLAUSE_DECL (c);
11573 map_cnt++;
11574 if (is_variable_sized (var))
11575 {
11576 tree new_var = lookup_decl (var, ctx);
11577 tree pvar = DECL_VALUE_EXPR (var);
11578 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11579 pvar = TREE_OPERAND (pvar, 0);
11580 gcc_assert (DECL_P (pvar));
11581 tree new_pvar = lookup_decl (pvar, ctx);
11582 x = build_fold_indirect_ref (new_pvar);
11583 TREE_THIS_NOTRAP (x) = 1;
11584 SET_DECL_VALUE_EXPR (new_var, x);
11585 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11586 }
11587 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11588 && !omp_is_reference (var)
11589 && !omp_is_allocatable_or_ptr (var)
11590 && !lang_hooks.decls.omp_array_data (var, true))
11591 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11592 {
11593 tree new_var = lookup_decl (var, ctx);
11594 tree type = build_pointer_type (TREE_TYPE (var));
11595 x = create_tmp_var_raw (type, get_name (new_var));
11596 gimple_add_tmp_var (x);
11597 x = build_simple_mem_ref (x);
11598 SET_DECL_VALUE_EXPR (new_var, x);
11599 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11600 }
11601 else
11602 {
11603 tree new_var = lookup_decl (var, ctx);
11604 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11605 gimple_add_tmp_var (x);
11606 SET_DECL_VALUE_EXPR (new_var, x);
11607 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11608 }
11609 break;
11610 }
11611
11612 if (offloaded)
11613 {
11614 target_nesting_level++;
11615 lower_omp (&tgt_body, ctx);
11616 target_nesting_level--;
11617 }
11618 else if (data_region)
11619 lower_omp (&tgt_body, ctx);
11620
11621 if (offloaded)
11622 {
11623 /* Declare all the variables created by mapping and the variables
11624 declared in the scope of the target body. */
11625 record_vars_into (ctx->block_vars, child_fn);
11626 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11627 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11628 }
11629
11630 olist = NULL;
11631 ilist = NULL;
11632 if (ctx->record_type)
11633 {
11634 ctx->sender_decl
11635 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11636 DECL_NAMELESS (ctx->sender_decl) = 1;
11637 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11638 t = make_tree_vec (3);
11639 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11640 TREE_VEC_ELT (t, 1)
11641 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11642 ".omp_data_sizes");
11643 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11644 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11645 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11646 tree tkind_type = short_unsigned_type_node;
11647 int talign_shift = 8;
11648 TREE_VEC_ELT (t, 2)
11649 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11650 ".omp_data_kinds");
11651 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11652 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11653 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11654 gimple_omp_target_set_data_arg (stmt, t);
11655
11656 vec<constructor_elt, va_gc> *vsize;
11657 vec<constructor_elt, va_gc> *vkind;
11658 vec_alloc (vsize, map_cnt);
11659 vec_alloc (vkind, map_cnt);
11660 unsigned int map_idx = 0;
11661
11662 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11663 switch (OMP_CLAUSE_CODE (c))
11664 {
11665 tree ovar, nc, s, purpose, var, x, type;
11666 unsigned int talign;
11667
11668 default:
11669 break;
11670
11671 case OMP_CLAUSE_MAP:
11672 case OMP_CLAUSE_TO:
11673 case OMP_CLAUSE_FROM:
11674 oacc_firstprivate_map:
11675 nc = c;
11676 ovar = OMP_CLAUSE_DECL (c);
11677 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11678 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11679 || (OMP_CLAUSE_MAP_KIND (c)
11680 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11681 break;
11682 if (!DECL_P (ovar))
11683 {
11684 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11685 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11686 {
11687 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11688 == get_base_address (ovar));
11689 nc = OMP_CLAUSE_CHAIN (c);
11690 ovar = OMP_CLAUSE_DECL (nc);
11691 }
11692 else
11693 {
11694 tree x = build_sender_ref (ovar, ctx);
11695 tree v
11696 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11697 gimplify_assign (x, v, &ilist);
11698 nc = NULL_TREE;
11699 }
11700 }
11701 else
11702 {
11703 if (DECL_SIZE (ovar)
11704 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11705 {
11706 tree ovar2 = DECL_VALUE_EXPR (ovar);
11707 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11708 ovar2 = TREE_OPERAND (ovar2, 0);
11709 gcc_assert (DECL_P (ovar2));
11710 ovar = ovar2;
11711 }
11712 if (!maybe_lookup_field (ovar, ctx))
11713 continue;
11714 }
11715
11716 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11717 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11718 talign = DECL_ALIGN_UNIT (ovar);
11719 if (nc)
11720 {
11721 var = lookup_decl_in_outer_ctx (ovar, ctx);
11722 x = build_sender_ref (ovar, ctx);
11723
11724 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11725 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11726 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11727 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11728 {
11729 gcc_assert (offloaded);
11730 tree avar
11731 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11732 mark_addressable (avar);
11733 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11734 talign = DECL_ALIGN_UNIT (avar);
11735 avar = build_fold_addr_expr (avar);
11736 gimplify_assign (x, avar, &ilist);
11737 }
11738 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11739 {
11740 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11741 if (!omp_is_reference (var))
11742 {
11743 if (is_gimple_reg (var)
11744 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11745 TREE_NO_WARNING (var) = 1;
11746 var = build_fold_addr_expr (var);
11747 }
11748 else
11749 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11750 gimplify_assign (x, var, &ilist);
11751 }
11752 else if (is_gimple_reg (var))
11753 {
11754 gcc_assert (offloaded);
11755 tree avar = create_tmp_var (TREE_TYPE (var));
11756 mark_addressable (avar);
11757 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11758 if (GOMP_MAP_COPY_TO_P (map_kind)
11759 || map_kind == GOMP_MAP_POINTER
11760 || map_kind == GOMP_MAP_TO_PSET
11761 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11762 {
11763 /* If we need to initialize a temporary
11764 with VAR because it is not addressable, and
11765 the variable hasn't been initialized yet, then
11766 we'll get a warning for the store to avar.
11767 Don't warn in that case, the mapping might
11768 be implicit. */
11769 TREE_NO_WARNING (var) = 1;
11770 gimplify_assign (avar, var, &ilist);
11771 }
11772 avar = build_fold_addr_expr (avar);
11773 gimplify_assign (x, avar, &ilist);
11774 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11775 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11776 && !TYPE_READONLY (TREE_TYPE (var)))
11777 {
11778 x = unshare_expr (x);
11779 x = build_simple_mem_ref (x);
11780 gimplify_assign (var, x, &olist);
11781 }
11782 }
11783 else
11784 {
11785 /* While MAP is handled explicitly by the FE,
11786 for 'target update', only the identified is passed. */
11787 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11788 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11789 && (omp_is_allocatable_or_ptr (var)
11790 && omp_check_optional_argument (var, false)))
11791 var = build_fold_indirect_ref (var);
11792 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11793 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11794 || (!omp_is_allocatable_or_ptr (var)
11795 && !omp_check_optional_argument (var, false)))
11796 var = build_fold_addr_expr (var);
11797 gimplify_assign (x, var, &ilist);
11798 }
11799 }
11800 s = NULL_TREE;
11801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11802 {
11803 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11804 s = TREE_TYPE (ovar);
11805 if (TREE_CODE (s) == REFERENCE_TYPE
11806 || omp_check_optional_argument (ovar, false))
11807 s = TREE_TYPE (s);
11808 s = TYPE_SIZE_UNIT (s);
11809 }
11810 else
11811 s = OMP_CLAUSE_SIZE (c);
11812 if (s == NULL_TREE)
11813 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11814 s = fold_convert (size_type_node, s);
11815 purpose = size_int (map_idx++);
11816 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11817 if (TREE_CODE (s) != INTEGER_CST)
11818 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11819
11820 unsigned HOST_WIDE_INT tkind, tkind_zero;
11821 switch (OMP_CLAUSE_CODE (c))
11822 {
11823 case OMP_CLAUSE_MAP:
11824 tkind = OMP_CLAUSE_MAP_KIND (c);
11825 tkind_zero = tkind;
11826 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11827 switch (tkind)
11828 {
11829 case GOMP_MAP_ALLOC:
11830 case GOMP_MAP_IF_PRESENT:
11831 case GOMP_MAP_TO:
11832 case GOMP_MAP_FROM:
11833 case GOMP_MAP_TOFROM:
11834 case GOMP_MAP_ALWAYS_TO:
11835 case GOMP_MAP_ALWAYS_FROM:
11836 case GOMP_MAP_ALWAYS_TOFROM:
11837 case GOMP_MAP_RELEASE:
11838 case GOMP_MAP_FORCE_TO:
11839 case GOMP_MAP_FORCE_FROM:
11840 case GOMP_MAP_FORCE_TOFROM:
11841 case GOMP_MAP_FORCE_PRESENT:
11842 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11843 break;
11844 case GOMP_MAP_DELETE:
11845 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11846 default:
11847 break;
11848 }
11849 if (tkind_zero != tkind)
11850 {
11851 if (integer_zerop (s))
11852 tkind = tkind_zero;
11853 else if (integer_nonzerop (s))
11854 tkind_zero = tkind;
11855 }
11856 break;
11857 case OMP_CLAUSE_FIRSTPRIVATE:
11858 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11859 tkind = GOMP_MAP_TO;
11860 tkind_zero = tkind;
11861 break;
11862 case OMP_CLAUSE_TO:
11863 tkind = GOMP_MAP_TO;
11864 tkind_zero = tkind;
11865 break;
11866 case OMP_CLAUSE_FROM:
11867 tkind = GOMP_MAP_FROM;
11868 tkind_zero = tkind;
11869 break;
11870 default:
11871 gcc_unreachable ();
11872 }
11873 gcc_checking_assert (tkind
11874 < (HOST_WIDE_INT_C (1U) << talign_shift));
11875 gcc_checking_assert (tkind_zero
11876 < (HOST_WIDE_INT_C (1U) << talign_shift));
11877 talign = ceil_log2 (talign);
11878 tkind |= talign << talign_shift;
11879 tkind_zero |= talign << talign_shift;
11880 gcc_checking_assert (tkind
11881 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11882 gcc_checking_assert (tkind_zero
11883 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11884 if (tkind == tkind_zero)
11885 x = build_int_cstu (tkind_type, tkind);
11886 else
11887 {
11888 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11889 x = build3 (COND_EXPR, tkind_type,
11890 fold_build2 (EQ_EXPR, boolean_type_node,
11891 unshare_expr (s), size_zero_node),
11892 build_int_cstu (tkind_type, tkind_zero),
11893 build_int_cstu (tkind_type, tkind));
11894 }
11895 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11896 if (nc && nc != c)
11897 c = nc;
11898 break;
11899
11900 case OMP_CLAUSE_FIRSTPRIVATE:
11901 if (is_oacc_parallel_or_serial (ctx))
11902 goto oacc_firstprivate_map;
11903 ovar = OMP_CLAUSE_DECL (c);
11904 if (omp_is_reference (ovar))
11905 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11906 else
11907 talign = DECL_ALIGN_UNIT (ovar);
11908 var = lookup_decl_in_outer_ctx (ovar, ctx);
11909 x = build_sender_ref (ovar, ctx);
11910 tkind = GOMP_MAP_FIRSTPRIVATE;
11911 type = TREE_TYPE (ovar);
11912 if (omp_is_reference (ovar))
11913 type = TREE_TYPE (type);
11914 if ((INTEGRAL_TYPE_P (type)
11915 && TYPE_PRECISION (type) <= POINTER_SIZE)
11916 || TREE_CODE (type) == POINTER_TYPE)
11917 {
11918 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11919 tree t = var;
11920 if (omp_is_reference (var))
11921 t = build_simple_mem_ref (var);
11922 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11923 TREE_NO_WARNING (var) = 1;
11924 if (TREE_CODE (type) != POINTER_TYPE)
11925 t = fold_convert (pointer_sized_int_node, t);
11926 t = fold_convert (TREE_TYPE (x), t);
11927 gimplify_assign (x, t, &ilist);
11928 }
11929 else if (omp_is_reference (var))
11930 gimplify_assign (x, var, &ilist);
11931 else if (is_gimple_reg (var))
11932 {
11933 tree avar = create_tmp_var (TREE_TYPE (var));
11934 mark_addressable (avar);
11935 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11936 TREE_NO_WARNING (var) = 1;
11937 gimplify_assign (avar, var, &ilist);
11938 avar = build_fold_addr_expr (avar);
11939 gimplify_assign (x, avar, &ilist);
11940 }
11941 else
11942 {
11943 var = build_fold_addr_expr (var);
11944 gimplify_assign (x, var, &ilist);
11945 }
11946 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11947 s = size_int (0);
11948 else if (omp_is_reference (ovar))
11949 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11950 else
11951 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11952 s = fold_convert (size_type_node, s);
11953 purpose = size_int (map_idx++);
11954 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11955 if (TREE_CODE (s) != INTEGER_CST)
11956 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11957
11958 gcc_checking_assert (tkind
11959 < (HOST_WIDE_INT_C (1U) << talign_shift));
11960 talign = ceil_log2 (talign);
11961 tkind |= talign << talign_shift;
11962 gcc_checking_assert (tkind
11963 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11964 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11965 build_int_cstu (tkind_type, tkind));
11966 break;
11967
11968 case OMP_CLAUSE_USE_DEVICE_PTR:
11969 case OMP_CLAUSE_USE_DEVICE_ADDR:
11970 case OMP_CLAUSE_IS_DEVICE_PTR:
11971 ovar = OMP_CLAUSE_DECL (c);
11972 var = lookup_decl_in_outer_ctx (ovar, ctx);
11973
11974 if (lang_hooks.decls.omp_array_data (ovar, true))
11975 {
11976 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
11977 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
11978 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
11979 }
11980 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
11981 {
11982 tkind = GOMP_MAP_USE_DEVICE_PTR;
11983 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
11984 }
11985 else
11986 {
11987 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11988 x = build_sender_ref (ovar, ctx);
11989 }
11990
11991 if (is_gimple_omp_oacc (ctx->stmt))
11992 {
11993 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
11994
11995 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
11996 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
11997 }
11998
11999 type = TREE_TYPE (ovar);
12000 if (lang_hooks.decls.omp_array_data (ovar, true))
12001 var = lang_hooks.decls.omp_array_data (ovar, false);
12002 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12003 && !omp_is_reference (ovar)
12004 && !omp_is_allocatable_or_ptr (ovar))
12005 || TREE_CODE (type) == ARRAY_TYPE)
12006 var = build_fold_addr_expr (var);
12007 else
12008 {
12009 if (omp_is_reference (ovar)
12010 || omp_check_optional_argument (ovar, false)
12011 || omp_is_allocatable_or_ptr (ovar))
12012 {
12013 type = TREE_TYPE (type);
12014 if (TREE_CODE (type) != ARRAY_TYPE
12015 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12016 && !omp_is_allocatable_or_ptr (ovar))
12017 || (omp_is_reference (ovar)
12018 && omp_is_allocatable_or_ptr (ovar))))
12019 var = build_simple_mem_ref (var);
12020 var = fold_convert (TREE_TYPE (x), var);
12021 }
12022 }
12023 tree present;
12024 present = omp_check_optional_argument (ovar, true);
12025 if (present)
12026 {
12027 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12028 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12029 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12030 tree new_x = unshare_expr (x);
12031 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12032 fb_rvalue);
12033 gcond *cond = gimple_build_cond_from_tree (present,
12034 notnull_label,
12035 null_label);
12036 gimple_seq_add_stmt (&ilist, cond);
12037 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12038 gimplify_assign (new_x, null_pointer_node, &ilist);
12039 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12040 gimple_seq_add_stmt (&ilist,
12041 gimple_build_label (notnull_label));
12042 gimplify_assign (x, var, &ilist);
12043 gimple_seq_add_stmt (&ilist,
12044 gimple_build_label (opt_arg_label));
12045 }
12046 else
12047 gimplify_assign (x, var, &ilist);
12048 s = size_int (0);
12049 purpose = size_int (map_idx++);
12050 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12051 gcc_checking_assert (tkind
12052 < (HOST_WIDE_INT_C (1U) << talign_shift));
12053 gcc_checking_assert (tkind
12054 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12055 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12056 build_int_cstu (tkind_type, tkind));
12057 break;
12058 }
12059
12060 gcc_assert (map_idx == map_cnt);
12061
12062 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12063 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12064 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12065 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12066 for (int i = 1; i <= 2; i++)
12067 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12068 {
12069 gimple_seq initlist = NULL;
12070 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12071 TREE_VEC_ELT (t, i)),
12072 &initlist, true, NULL_TREE);
12073 gimple_seq_add_seq (&ilist, initlist);
12074
12075 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12076 gimple_seq_add_stmt (&olist,
12077 gimple_build_assign (TREE_VEC_ELT (t, i),
12078 clobber));
12079 }
12080
12081 tree clobber = build_clobber (ctx->record_type);
12082 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12083 clobber));
12084 }
12085
12086 /* Once all the expansions are done, sequence all the different
12087 fragments inside gimple_omp_body. */
12088
12089 new_body = NULL;
12090
12091 if (offloaded
12092 && ctx->record_type)
12093 {
12094 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12095 /* fixup_child_record_type might have changed receiver_decl's type. */
12096 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12097 gimple_seq_add_stmt (&new_body,
12098 gimple_build_assign (ctx->receiver_decl, t));
12099 }
12100 gimple_seq_add_seq (&new_body, fplist);
12101
12102 if (offloaded || data_region)
12103 {
12104 tree prev = NULL_TREE;
12105 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12106 switch (OMP_CLAUSE_CODE (c))
12107 {
12108 tree var, x;
12109 default:
12110 break;
12111 case OMP_CLAUSE_FIRSTPRIVATE:
12112 if (is_gimple_omp_oacc (ctx->stmt))
12113 break;
12114 var = OMP_CLAUSE_DECL (c);
12115 if (omp_is_reference (var)
12116 || is_gimple_reg_type (TREE_TYPE (var)))
12117 {
12118 tree new_var = lookup_decl (var, ctx);
12119 tree type;
12120 type = TREE_TYPE (var);
12121 if (omp_is_reference (var))
12122 type = TREE_TYPE (type);
12123 if ((INTEGRAL_TYPE_P (type)
12124 && TYPE_PRECISION (type) <= POINTER_SIZE)
12125 || TREE_CODE (type) == POINTER_TYPE)
12126 {
12127 x = build_receiver_ref (var, false, ctx);
12128 if (TREE_CODE (type) != POINTER_TYPE)
12129 x = fold_convert (pointer_sized_int_node, x);
12130 x = fold_convert (type, x);
12131 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12132 fb_rvalue);
12133 if (omp_is_reference (var))
12134 {
12135 tree v = create_tmp_var_raw (type, get_name (var));
12136 gimple_add_tmp_var (v);
12137 TREE_ADDRESSABLE (v) = 1;
12138 gimple_seq_add_stmt (&new_body,
12139 gimple_build_assign (v, x));
12140 x = build_fold_addr_expr (v);
12141 }
12142 gimple_seq_add_stmt (&new_body,
12143 gimple_build_assign (new_var, x));
12144 }
12145 else
12146 {
12147 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12148 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12149 fb_rvalue);
12150 gimple_seq_add_stmt (&new_body,
12151 gimple_build_assign (new_var, x));
12152 }
12153 }
12154 else if (is_variable_sized (var))
12155 {
12156 tree pvar = DECL_VALUE_EXPR (var);
12157 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12158 pvar = TREE_OPERAND (pvar, 0);
12159 gcc_assert (DECL_P (pvar));
12160 tree new_var = lookup_decl (pvar, ctx);
12161 x = build_receiver_ref (var, false, ctx);
12162 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12163 gimple_seq_add_stmt (&new_body,
12164 gimple_build_assign (new_var, x));
12165 }
12166 break;
12167 case OMP_CLAUSE_PRIVATE:
12168 if (is_gimple_omp_oacc (ctx->stmt))
12169 break;
12170 var = OMP_CLAUSE_DECL (c);
12171 if (omp_is_reference (var))
12172 {
12173 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12174 tree new_var = lookup_decl (var, ctx);
12175 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12176 if (TREE_CONSTANT (x))
12177 {
12178 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12179 get_name (var));
12180 gimple_add_tmp_var (x);
12181 TREE_ADDRESSABLE (x) = 1;
12182 x = build_fold_addr_expr_loc (clause_loc, x);
12183 }
12184 else
12185 break;
12186
12187 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12188 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12189 gimple_seq_add_stmt (&new_body,
12190 gimple_build_assign (new_var, x));
12191 }
12192 break;
12193 case OMP_CLAUSE_USE_DEVICE_PTR:
12194 case OMP_CLAUSE_USE_DEVICE_ADDR:
12195 case OMP_CLAUSE_IS_DEVICE_PTR:
12196 tree new_var;
12197 gimple_seq assign_body;
12198 bool is_array_data;
12199 bool do_optional_check;
12200 assign_body = NULL;
12201 do_optional_check = false;
12202 var = OMP_CLAUSE_DECL (c);
12203 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12204
12205 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12206 x = build_sender_ref (is_array_data
12207 ? (splay_tree_key) &DECL_NAME (var)
12208 : (splay_tree_key) &DECL_UID (var), ctx);
12209 else
12210 x = build_receiver_ref (var, false, ctx);
12211
12212 if (is_array_data)
12213 {
12214 bool is_ref = omp_is_reference (var);
12215 do_optional_check = true;
12216 /* First, we copy the descriptor data from the host; then
12217 we update its data to point to the target address. */
12218 new_var = lookup_decl (var, ctx);
12219 new_var = DECL_VALUE_EXPR (new_var);
12220 tree v = new_var;
12221
12222 if (is_ref)
12223 {
12224 var = build_fold_indirect_ref (var);
12225 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12226 fb_rvalue);
12227 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12228 gimple_add_tmp_var (v);
12229 TREE_ADDRESSABLE (v) = 1;
12230 gimple_seq_add_stmt (&assign_body,
12231 gimple_build_assign (v, var));
12232 tree rhs = build_fold_addr_expr (v);
12233 gimple_seq_add_stmt (&assign_body,
12234 gimple_build_assign (new_var, rhs));
12235 }
12236 else
12237 gimple_seq_add_stmt (&assign_body,
12238 gimple_build_assign (new_var, var));
12239
12240 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12241 gcc_assert (v2);
12242 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12243 gimple_seq_add_stmt (&assign_body,
12244 gimple_build_assign (v2, x));
12245 }
12246 else if (is_variable_sized (var))
12247 {
12248 tree pvar = DECL_VALUE_EXPR (var);
12249 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12250 pvar = TREE_OPERAND (pvar, 0);
12251 gcc_assert (DECL_P (pvar));
12252 new_var = lookup_decl (pvar, ctx);
12253 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12254 gimple_seq_add_stmt (&assign_body,
12255 gimple_build_assign (new_var, x));
12256 }
12257 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12258 && !omp_is_reference (var)
12259 && !omp_is_allocatable_or_ptr (var))
12260 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12261 {
12262 new_var = lookup_decl (var, ctx);
12263 new_var = DECL_VALUE_EXPR (new_var);
12264 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12265 new_var = TREE_OPERAND (new_var, 0);
12266 gcc_assert (DECL_P (new_var));
12267 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12268 gimple_seq_add_stmt (&assign_body,
12269 gimple_build_assign (new_var, x));
12270 }
12271 else
12272 {
12273 tree type = TREE_TYPE (var);
12274 new_var = lookup_decl (var, ctx);
12275 if (omp_is_reference (var))
12276 {
12277 type = TREE_TYPE (type);
12278 if (TREE_CODE (type) != ARRAY_TYPE
12279 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12280 || (omp_is_reference (var)
12281 && omp_is_allocatable_or_ptr (var))))
12282 {
12283 tree v = create_tmp_var_raw (type, get_name (var));
12284 gimple_add_tmp_var (v);
12285 TREE_ADDRESSABLE (v) = 1;
12286 x = fold_convert (type, x);
12287 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12288 fb_rvalue);
12289 gimple_seq_add_stmt (&assign_body,
12290 gimple_build_assign (v, x));
12291 x = build_fold_addr_expr (v);
12292 do_optional_check = true;
12293 }
12294 }
12295 new_var = DECL_VALUE_EXPR (new_var);
12296 x = fold_convert (TREE_TYPE (new_var), x);
12297 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12298 gimple_seq_add_stmt (&assign_body,
12299 gimple_build_assign (new_var, x));
12300 }
12301 tree present;
12302 present = (do_optional_check
12303 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12304 : NULL_TREE);
12305 if (present)
12306 {
12307 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12308 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12309 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12310 glabel *null_glabel = gimple_build_label (null_label);
12311 glabel *notnull_glabel = gimple_build_label (notnull_label);
12312 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12313 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12314 fb_rvalue);
12315 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12316 fb_rvalue);
12317 gcond *cond = gimple_build_cond_from_tree (present,
12318 notnull_label,
12319 null_label);
12320 gimple_seq_add_stmt (&new_body, cond);
12321 gimple_seq_add_stmt (&new_body, null_glabel);
12322 gimplify_assign (new_var, null_pointer_node, &new_body);
12323 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12324 gimple_seq_add_stmt (&new_body, notnull_glabel);
12325 gimple_seq_add_seq (&new_body, assign_body);
12326 gimple_seq_add_stmt (&new_body,
12327 gimple_build_label (opt_arg_label));
12328 }
12329 else
12330 gimple_seq_add_seq (&new_body, assign_body);
12331 break;
12332 }
12333 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12334 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12335 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12336 or references to VLAs. */
12337 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12338 switch (OMP_CLAUSE_CODE (c))
12339 {
12340 tree var;
12341 default:
12342 break;
12343 case OMP_CLAUSE_MAP:
12344 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12345 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12346 {
12347 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12348 poly_int64 offset = 0;
12349 gcc_assert (prev);
12350 var = OMP_CLAUSE_DECL (c);
12351 if (DECL_P (var)
12352 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12353 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12354 ctx))
12355 && varpool_node::get_create (var)->offloadable)
12356 break;
12357 if (TREE_CODE (var) == INDIRECT_REF
12358 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12359 var = TREE_OPERAND (var, 0);
12360 if (TREE_CODE (var) == COMPONENT_REF)
12361 {
12362 var = get_addr_base_and_unit_offset (var, &offset);
12363 gcc_assert (var != NULL_TREE && DECL_P (var));
12364 }
12365 else if (DECL_SIZE (var)
12366 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12367 {
12368 tree var2 = DECL_VALUE_EXPR (var);
12369 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12370 var2 = TREE_OPERAND (var2, 0);
12371 gcc_assert (DECL_P (var2));
12372 var = var2;
12373 }
12374 tree new_var = lookup_decl (var, ctx), x;
12375 tree type = TREE_TYPE (new_var);
12376 bool is_ref;
12377 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12378 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12379 == COMPONENT_REF))
12380 {
12381 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12382 is_ref = true;
12383 new_var = build2 (MEM_REF, type,
12384 build_fold_addr_expr (new_var),
12385 build_int_cst (build_pointer_type (type),
12386 offset));
12387 }
12388 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12389 {
12390 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12391 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12392 new_var = build2 (MEM_REF, type,
12393 build_fold_addr_expr (new_var),
12394 build_int_cst (build_pointer_type (type),
12395 offset));
12396 }
12397 else
12398 is_ref = omp_is_reference (var);
12399 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12400 is_ref = false;
12401 bool ref_to_array = false;
12402 if (is_ref)
12403 {
12404 type = TREE_TYPE (type);
12405 if (TREE_CODE (type) == ARRAY_TYPE)
12406 {
12407 type = build_pointer_type (type);
12408 ref_to_array = true;
12409 }
12410 }
12411 else if (TREE_CODE (type) == ARRAY_TYPE)
12412 {
12413 tree decl2 = DECL_VALUE_EXPR (new_var);
12414 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12415 decl2 = TREE_OPERAND (decl2, 0);
12416 gcc_assert (DECL_P (decl2));
12417 new_var = decl2;
12418 type = TREE_TYPE (new_var);
12419 }
12420 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12421 x = fold_convert_loc (clause_loc, type, x);
12422 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12423 {
12424 tree bias = OMP_CLAUSE_SIZE (c);
12425 if (DECL_P (bias))
12426 bias = lookup_decl (bias, ctx);
12427 bias = fold_convert_loc (clause_loc, sizetype, bias);
12428 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12429 bias);
12430 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12431 TREE_TYPE (x), x, bias);
12432 }
12433 if (ref_to_array)
12434 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12435 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12436 if (is_ref && !ref_to_array)
12437 {
12438 tree t = create_tmp_var_raw (type, get_name (var));
12439 gimple_add_tmp_var (t);
12440 TREE_ADDRESSABLE (t) = 1;
12441 gimple_seq_add_stmt (&new_body,
12442 gimple_build_assign (t, x));
12443 x = build_fold_addr_expr_loc (clause_loc, t);
12444 }
12445 gimple_seq_add_stmt (&new_body,
12446 gimple_build_assign (new_var, x));
12447 prev = NULL_TREE;
12448 }
12449 else if (OMP_CLAUSE_CHAIN (c)
12450 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12451 == OMP_CLAUSE_MAP
12452 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12453 == GOMP_MAP_FIRSTPRIVATE_POINTER
12454 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12455 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12456 prev = c;
12457 break;
12458 case OMP_CLAUSE_PRIVATE:
12459 var = OMP_CLAUSE_DECL (c);
12460 if (is_variable_sized (var))
12461 {
12462 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12463 tree new_var = lookup_decl (var, ctx);
12464 tree pvar = DECL_VALUE_EXPR (var);
12465 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12466 pvar = TREE_OPERAND (pvar, 0);
12467 gcc_assert (DECL_P (pvar));
12468 tree new_pvar = lookup_decl (pvar, ctx);
12469 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12470 tree al = size_int (DECL_ALIGN (var));
12471 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12472 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12473 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12474 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12475 gimple_seq_add_stmt (&new_body,
12476 gimple_build_assign (new_pvar, x));
12477 }
12478 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12479 {
12480 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12481 tree new_var = lookup_decl (var, ctx);
12482 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12483 if (TREE_CONSTANT (x))
12484 break;
12485 else
12486 {
12487 tree atmp
12488 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12489 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12490 tree al = size_int (TYPE_ALIGN (rtype));
12491 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12492 }
12493
12494 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12495 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12496 gimple_seq_add_stmt (&new_body,
12497 gimple_build_assign (new_var, x));
12498 }
12499 break;
12500 }
12501
12502 gimple_seq fork_seq = NULL;
12503 gimple_seq join_seq = NULL;
12504
12505 if (is_oacc_parallel_or_serial (ctx))
12506 {
12507 /* If there are reductions on the offloaded region itself, treat
12508 them as a dummy GANG loop. */
12509 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12510
12511 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12512 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12513 }
12514
12515 gimple_seq_add_seq (&new_body, fork_seq);
12516 gimple_seq_add_seq (&new_body, tgt_body);
12517 gimple_seq_add_seq (&new_body, join_seq);
12518
12519 if (offloaded)
12520 new_body = maybe_catch_exception (new_body);
12521
12522 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12523 gimple_omp_set_body (stmt, new_body);
12524 }
12525
12526 bind = gimple_build_bind (NULL, NULL,
12527 tgt_bind ? gimple_bind_block (tgt_bind)
12528 : NULL_TREE);
12529 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12530 gimple_bind_add_seq (bind, ilist);
12531 gimple_bind_add_stmt (bind, stmt);
12532 gimple_bind_add_seq (bind, olist);
12533
12534 pop_gimplify_context (NULL);
12535
12536 if (dep_bind)
12537 {
12538 gimple_bind_add_seq (dep_bind, dep_ilist);
12539 gimple_bind_add_stmt (dep_bind, bind);
12540 gimple_bind_add_seq (dep_bind, dep_olist);
12541 pop_gimplify_context (dep_bind);
12542 }
12543 }
12544
12545 /* Expand code for an OpenMP teams directive. */
12546
12547 static void
12548 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12549 {
12550 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12551 push_gimplify_context ();
12552
12553 tree block = make_node (BLOCK);
12554 gbind *bind = gimple_build_bind (NULL, NULL, block);
12555 gsi_replace (gsi_p, bind, true);
12556 gimple_seq bind_body = NULL;
12557 gimple_seq dlist = NULL;
12558 gimple_seq olist = NULL;
12559
12560 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12561 OMP_CLAUSE_NUM_TEAMS);
12562 if (num_teams == NULL_TREE)
12563 num_teams = build_int_cst (unsigned_type_node, 0);
12564 else
12565 {
12566 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12567 num_teams = fold_convert (unsigned_type_node, num_teams);
12568 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12569 }
12570 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12571 OMP_CLAUSE_THREAD_LIMIT);
12572 if (thread_limit == NULL_TREE)
12573 thread_limit = build_int_cst (unsigned_type_node, 0);
12574 else
12575 {
12576 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12577 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12578 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12579 fb_rvalue);
12580 }
12581
12582 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12583 &bind_body, &dlist, ctx, NULL);
12584 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12585 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12586 NULL, ctx);
12587 gimple_seq_add_stmt (&bind_body, teams_stmt);
12588
12589 location_t loc = gimple_location (teams_stmt);
12590 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12591 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12592 gimple_set_location (call, loc);
12593 gimple_seq_add_stmt (&bind_body, call);
12594
12595 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12596 gimple_omp_set_body (teams_stmt, NULL);
12597 gimple_seq_add_seq (&bind_body, olist);
12598 gimple_seq_add_seq (&bind_body, dlist);
12599 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12600 gimple_bind_set_body (bind, bind_body);
12601
12602 pop_gimplify_context (bind);
12603
12604 gimple_bind_append_vars (bind, ctx->block_vars);
12605 BLOCK_VARS (block) = ctx->block_vars;
12606 if (BLOCK_VARS (block))
12607 TREE_USED (block) = 1;
12608 }
12609
12610 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12611 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12612 of OMP context, but with task_shared_vars set. */
12613
12614 static tree
12615 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12616 void *data)
12617 {
12618 tree t = *tp;
12619
12620 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12621 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12622 return t;
12623
12624 if (task_shared_vars
12625 && DECL_P (t)
12626 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12627 return t;
12628
12629 /* If a global variable has been privatized, TREE_CONSTANT on
12630 ADDR_EXPR might be wrong. */
12631 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12632 recompute_tree_invariant_for_addr_expr (t);
12633
12634 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12635 return NULL_TREE;
12636 }
12637
12638 /* Data to be communicated between lower_omp_regimplify_operands and
12639 lower_omp_regimplify_operands_p. */
12640
12641 struct lower_omp_regimplify_operands_data
12642 {
12643 omp_context *ctx;
12644 vec<tree> *decls;
12645 };
12646
12647 /* Helper function for lower_omp_regimplify_operands. Find
12648 omp_member_access_dummy_var vars and adjust temporarily their
12649 DECL_VALUE_EXPRs if needed. */
12650
12651 static tree
12652 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12653 void *data)
12654 {
12655 tree t = omp_member_access_dummy_var (*tp);
12656 if (t)
12657 {
12658 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12659 lower_omp_regimplify_operands_data *ldata
12660 = (lower_omp_regimplify_operands_data *) wi->info;
12661 tree o = maybe_lookup_decl (t, ldata->ctx);
12662 if (o != t)
12663 {
12664 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12665 ldata->decls->safe_push (*tp);
12666 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12667 SET_DECL_VALUE_EXPR (*tp, v);
12668 }
12669 }
12670 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12671 return NULL_TREE;
12672 }
12673
12674 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12675 of omp_member_access_dummy_var vars during regimplification. */
12676
12677 static void
12678 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12679 gimple_stmt_iterator *gsi_p)
12680 {
12681 auto_vec<tree, 10> decls;
12682 if (ctx)
12683 {
12684 struct walk_stmt_info wi;
12685 memset (&wi, '\0', sizeof (wi));
12686 struct lower_omp_regimplify_operands_data data;
12687 data.ctx = ctx;
12688 data.decls = &decls;
12689 wi.info = &data;
12690 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12691 }
12692 gimple_regimplify_operands (stmt, gsi_p);
12693 while (!decls.is_empty ())
12694 {
12695 tree t = decls.pop ();
12696 tree v = decls.pop ();
12697 SET_DECL_VALUE_EXPR (t, v);
12698 }
12699 }
12700
12701 static void
12702 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12703 {
12704 gimple *stmt = gsi_stmt (*gsi_p);
12705 struct walk_stmt_info wi;
12706 gcall *call_stmt;
12707
12708 if (gimple_has_location (stmt))
12709 input_location = gimple_location (stmt);
12710
12711 if (task_shared_vars)
12712 memset (&wi, '\0', sizeof (wi));
12713
12714 /* If we have issued syntax errors, avoid doing any heavy lifting.
12715 Just replace the OMP directives with a NOP to avoid
12716 confusing RTL expansion. */
12717 if (seen_error () && is_gimple_omp (stmt))
12718 {
12719 gsi_replace (gsi_p, gimple_build_nop (), true);
12720 return;
12721 }
12722
12723 switch (gimple_code (stmt))
12724 {
12725 case GIMPLE_COND:
12726 {
12727 gcond *cond_stmt = as_a <gcond *> (stmt);
12728 if ((ctx || task_shared_vars)
12729 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12730 lower_omp_regimplify_p,
12731 ctx ? NULL : &wi, NULL)
12732 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12733 lower_omp_regimplify_p,
12734 ctx ? NULL : &wi, NULL)))
12735 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12736 }
12737 break;
12738 case GIMPLE_CATCH:
12739 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12740 break;
12741 case GIMPLE_EH_FILTER:
12742 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12743 break;
12744 case GIMPLE_TRY:
12745 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12746 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12747 break;
12748 case GIMPLE_TRANSACTION:
12749 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12750 ctx);
12751 break;
12752 case GIMPLE_BIND:
12753 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12754 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12755 break;
12756 case GIMPLE_OMP_PARALLEL:
12757 case GIMPLE_OMP_TASK:
12758 ctx = maybe_lookup_ctx (stmt);
12759 gcc_assert (ctx);
12760 if (ctx->cancellable)
12761 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12762 lower_omp_taskreg (gsi_p, ctx);
12763 break;
12764 case GIMPLE_OMP_FOR:
12765 ctx = maybe_lookup_ctx (stmt);
12766 gcc_assert (ctx);
12767 if (ctx->cancellable)
12768 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12769 lower_omp_for (gsi_p, ctx);
12770 break;
12771 case GIMPLE_OMP_SECTIONS:
12772 ctx = maybe_lookup_ctx (stmt);
12773 gcc_assert (ctx);
12774 if (ctx->cancellable)
12775 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12776 lower_omp_sections (gsi_p, ctx);
12777 break;
12778 case GIMPLE_OMP_SINGLE:
12779 ctx = maybe_lookup_ctx (stmt);
12780 gcc_assert (ctx);
12781 lower_omp_single (gsi_p, ctx);
12782 break;
12783 case GIMPLE_OMP_MASTER:
12784 ctx = maybe_lookup_ctx (stmt);
12785 gcc_assert (ctx);
12786 lower_omp_master (gsi_p, ctx);
12787 break;
12788 case GIMPLE_OMP_TASKGROUP:
12789 ctx = maybe_lookup_ctx (stmt);
12790 gcc_assert (ctx);
12791 lower_omp_taskgroup (gsi_p, ctx);
12792 break;
12793 case GIMPLE_OMP_ORDERED:
12794 ctx = maybe_lookup_ctx (stmt);
12795 gcc_assert (ctx);
12796 lower_omp_ordered (gsi_p, ctx);
12797 break;
12798 case GIMPLE_OMP_SCAN:
12799 ctx = maybe_lookup_ctx (stmt);
12800 gcc_assert (ctx);
12801 lower_omp_scan (gsi_p, ctx);
12802 break;
12803 case GIMPLE_OMP_CRITICAL:
12804 ctx = maybe_lookup_ctx (stmt);
12805 gcc_assert (ctx);
12806 lower_omp_critical (gsi_p, ctx);
12807 break;
12808 case GIMPLE_OMP_ATOMIC_LOAD:
12809 if ((ctx || task_shared_vars)
12810 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12811 as_a <gomp_atomic_load *> (stmt)),
12812 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12813 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12814 break;
12815 case GIMPLE_OMP_TARGET:
12816 ctx = maybe_lookup_ctx (stmt);
12817 gcc_assert (ctx);
12818 lower_omp_target (gsi_p, ctx);
12819 break;
12820 case GIMPLE_OMP_TEAMS:
12821 ctx = maybe_lookup_ctx (stmt);
12822 gcc_assert (ctx);
12823 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12824 lower_omp_taskreg (gsi_p, ctx);
12825 else
12826 lower_omp_teams (gsi_p, ctx);
12827 break;
12828 case GIMPLE_CALL:
12829 tree fndecl;
12830 call_stmt = as_a <gcall *> (stmt);
12831 fndecl = gimple_call_fndecl (call_stmt);
12832 if (fndecl
12833 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12834 switch (DECL_FUNCTION_CODE (fndecl))
12835 {
12836 case BUILT_IN_GOMP_BARRIER:
12837 if (ctx == NULL)
12838 break;
12839 /* FALLTHRU */
12840 case BUILT_IN_GOMP_CANCEL:
12841 case BUILT_IN_GOMP_CANCELLATION_POINT:
12842 omp_context *cctx;
12843 cctx = ctx;
12844 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12845 cctx = cctx->outer;
12846 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12847 if (!cctx->cancellable)
12848 {
12849 if (DECL_FUNCTION_CODE (fndecl)
12850 == BUILT_IN_GOMP_CANCELLATION_POINT)
12851 {
12852 stmt = gimple_build_nop ();
12853 gsi_replace (gsi_p, stmt, false);
12854 }
12855 break;
12856 }
12857 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12858 {
12859 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12860 gimple_call_set_fndecl (call_stmt, fndecl);
12861 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12862 }
12863 tree lhs;
12864 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12865 gimple_call_set_lhs (call_stmt, lhs);
12866 tree fallthru_label;
12867 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12868 gimple *g;
12869 g = gimple_build_label (fallthru_label);
12870 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12871 g = gimple_build_cond (NE_EXPR, lhs,
12872 fold_convert (TREE_TYPE (lhs),
12873 boolean_false_node),
12874 cctx->cancel_label, fallthru_label);
12875 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12876 break;
12877 default:
12878 break;
12879 }
12880 goto regimplify;
12881
12882 case GIMPLE_ASSIGN:
12883 for (omp_context *up = ctx; up; up = up->outer)
12884 {
12885 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12886 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12887 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12888 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12889 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12890 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12891 && (gimple_omp_target_kind (up->stmt)
12892 == GF_OMP_TARGET_KIND_DATA)))
12893 continue;
12894 else if (!up->lastprivate_conditional_map)
12895 break;
12896 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12897 if (TREE_CODE (lhs) == MEM_REF
12898 && DECL_P (TREE_OPERAND (lhs, 0))
12899 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12900 0))) == REFERENCE_TYPE)
12901 lhs = TREE_OPERAND (lhs, 0);
12902 if (DECL_P (lhs))
12903 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12904 {
12905 tree clauses;
12906 if (up->combined_into_simd_safelen1)
12907 {
12908 up = up->outer;
12909 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12910 up = up->outer;
12911 }
12912 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12913 clauses = gimple_omp_for_clauses (up->stmt);
12914 else
12915 clauses = gimple_omp_sections_clauses (up->stmt);
12916 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12917 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12918 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12919 OMP_CLAUSE__CONDTEMP_);
12920 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12921 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12922 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12923 }
12924 }
12925 /* FALLTHRU */
12926
12927 default:
12928 regimplify:
12929 if ((ctx || task_shared_vars)
12930 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12931 ctx ? NULL : &wi))
12932 {
12933 /* Just remove clobbers, this should happen only if we have
12934 "privatized" local addressable variables in SIMD regions,
12935 the clobber isn't needed in that case and gimplifying address
12936 of the ARRAY_REF into a pointer and creating MEM_REF based
12937 clobber would create worse code than we get with the clobber
12938 dropped. */
12939 if (gimple_clobber_p (stmt))
12940 {
12941 gsi_replace (gsi_p, gimple_build_nop (), true);
12942 break;
12943 }
12944 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12945 }
12946 break;
12947 }
12948 }
12949
12950 static void
12951 lower_omp (gimple_seq *body, omp_context *ctx)
12952 {
12953 location_t saved_location = input_location;
12954 gimple_stmt_iterator gsi;
12955 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12956 lower_omp_1 (&gsi, ctx);
12957 /* During gimplification, we haven't folded statments inside offloading
12958 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12959 if (target_nesting_level || taskreg_nesting_level)
12960 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12961 fold_stmt (&gsi);
12962 input_location = saved_location;
12963 }
12964
12965 /* Main entry point. */
12966
12967 static unsigned int
12968 execute_lower_omp (void)
12969 {
12970 gimple_seq body;
12971 int i;
12972 omp_context *ctx;
12973
12974 /* This pass always runs, to provide PROP_gimple_lomp.
12975 But often, there is nothing to do. */
12976 if (flag_openacc == 0 && flag_openmp == 0
12977 && flag_openmp_simd == 0)
12978 return 0;
12979
12980 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12981 delete_omp_context);
12982
12983 body = gimple_body (current_function_decl);
12984
12985 scan_omp (&body, NULL);
12986 gcc_assert (taskreg_nesting_level == 0);
12987 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12988 finish_taskreg_scan (ctx);
12989 taskreg_contexts.release ();
12990
12991 if (all_contexts->root)
12992 {
12993 if (task_shared_vars)
12994 push_gimplify_context ();
12995 lower_omp (&body, NULL);
12996 if (task_shared_vars)
12997 pop_gimplify_context (NULL);
12998 }
12999
13000 if (all_contexts)
13001 {
13002 splay_tree_delete (all_contexts);
13003 all_contexts = NULL;
13004 }
13005 BITMAP_FREE (task_shared_vars);
13006 BITMAP_FREE (global_nonaddressable_vars);
13007
13008 /* If current function is a method, remove artificial dummy VAR_DECL created
13009 for non-static data member privatization, they aren't needed for
13010 debuginfo nor anything else, have been already replaced everywhere in the
13011 IL and cause problems with LTO. */
13012 if (DECL_ARGUMENTS (current_function_decl)
13013 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13014 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13015 == POINTER_TYPE))
13016 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13017 return 0;
13018 }
13019
13020 namespace {
13021
13022 const pass_data pass_data_lower_omp =
13023 {
13024 GIMPLE_PASS, /* type */
13025 "omplower", /* name */
13026 OPTGROUP_OMP, /* optinfo_flags */
13027 TV_NONE, /* tv_id */
13028 PROP_gimple_any, /* properties_required */
13029 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13030 0, /* properties_destroyed */
13031 0, /* todo_flags_start */
13032 0, /* todo_flags_finish */
13033 };
13034
13035 class pass_lower_omp : public gimple_opt_pass
13036 {
13037 public:
13038 pass_lower_omp (gcc::context *ctxt)
13039 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13040 {}
13041
13042 /* opt_pass methods: */
13043 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13044
13045 }; // class pass_lower_omp
13046
13047 } // anon namespace
13048
13049 gimple_opt_pass *
13050 make_pass_lower_omp (gcc::context *ctxt)
13051 {
13052 return new pass_lower_omp (ctxt);
13053 }
13054 \f
13055 /* The following is a utility to diagnose structured block violations.
13056 It is not part of the "omplower" pass, as that's invoked too late. It
13057 should be invoked by the respective front ends after gimplification. */
13058
13059 static splay_tree all_labels;
13060
13061 /* Check for mismatched contexts and generate an error if needed. Return
13062 true if an error is detected. */
13063
13064 static bool
13065 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13066 gimple *branch_ctx, gimple *label_ctx)
13067 {
13068 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13069 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13070
13071 if (label_ctx == branch_ctx)
13072 return false;
13073
13074 const char* kind = NULL;
13075
13076 if (flag_openacc)
13077 {
13078 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13079 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13080 {
13081 gcc_checking_assert (kind == NULL);
13082 kind = "OpenACC";
13083 }
13084 }
13085 if (kind == NULL)
13086 {
13087 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13088 kind = "OpenMP";
13089 }
13090
13091 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13092 so we could traverse it and issue a correct "exit" or "enter" error
13093 message upon a structured block violation.
13094
13095 We built the context by building a list with tree_cons'ing, but there is
13096 no easy counterpart in gimple tuples. It seems like far too much work
13097 for issuing exit/enter error messages. If someone really misses the
13098 distinct error message... patches welcome. */
13099
13100 #if 0
13101 /* Try to avoid confusing the user by producing and error message
13102 with correct "exit" or "enter" verbiage. We prefer "exit"
13103 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13104 if (branch_ctx == NULL)
13105 exit_p = false;
13106 else
13107 {
13108 while (label_ctx)
13109 {
13110 if (TREE_VALUE (label_ctx) == branch_ctx)
13111 {
13112 exit_p = false;
13113 break;
13114 }
13115 label_ctx = TREE_CHAIN (label_ctx);
13116 }
13117 }
13118
13119 if (exit_p)
13120 error ("invalid exit from %s structured block", kind);
13121 else
13122 error ("invalid entry to %s structured block", kind);
13123 #endif
13124
13125 /* If it's obvious we have an invalid entry, be specific about the error. */
13126 if (branch_ctx == NULL)
13127 error ("invalid entry to %s structured block", kind);
13128 else
13129 {
13130 /* Otherwise, be vague and lazy, but efficient. */
13131 error ("invalid branch to/from %s structured block", kind);
13132 }
13133
13134 gsi_replace (gsi_p, gimple_build_nop (), false);
13135 return true;
13136 }
13137
13138 /* Pass 1: Create a minimal tree of structured blocks, and record
13139 where each label is found. */
13140
13141 static tree
13142 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13143 struct walk_stmt_info *wi)
13144 {
13145 gimple *context = (gimple *) wi->info;
13146 gimple *inner_context;
13147 gimple *stmt = gsi_stmt (*gsi_p);
13148
13149 *handled_ops_p = true;
13150
13151 switch (gimple_code (stmt))
13152 {
13153 WALK_SUBSTMTS;
13154
13155 case GIMPLE_OMP_PARALLEL:
13156 case GIMPLE_OMP_TASK:
13157 case GIMPLE_OMP_SECTIONS:
13158 case GIMPLE_OMP_SINGLE:
13159 case GIMPLE_OMP_SECTION:
13160 case GIMPLE_OMP_MASTER:
13161 case GIMPLE_OMP_ORDERED:
13162 case GIMPLE_OMP_SCAN:
13163 case GIMPLE_OMP_CRITICAL:
13164 case GIMPLE_OMP_TARGET:
13165 case GIMPLE_OMP_TEAMS:
13166 case GIMPLE_OMP_TASKGROUP:
13167 /* The minimal context here is just the current OMP construct. */
13168 inner_context = stmt;
13169 wi->info = inner_context;
13170 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13171 wi->info = context;
13172 break;
13173
13174 case GIMPLE_OMP_FOR:
13175 inner_context = stmt;
13176 wi->info = inner_context;
13177 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13178 walk them. */
13179 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13180 diagnose_sb_1, NULL, wi);
13181 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13182 wi->info = context;
13183 break;
13184
13185 case GIMPLE_LABEL:
13186 splay_tree_insert (all_labels,
13187 (splay_tree_key) gimple_label_label (
13188 as_a <glabel *> (stmt)),
13189 (splay_tree_value) context);
13190 break;
13191
13192 default:
13193 break;
13194 }
13195
13196 return NULL_TREE;
13197 }
13198
13199 /* Pass 2: Check each branch and see if its context differs from that of
13200 the destination label's context. */
13201
13202 static tree
13203 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13204 struct walk_stmt_info *wi)
13205 {
13206 gimple *context = (gimple *) wi->info;
13207 splay_tree_node n;
13208 gimple *stmt = gsi_stmt (*gsi_p);
13209
13210 *handled_ops_p = true;
13211
13212 switch (gimple_code (stmt))
13213 {
13214 WALK_SUBSTMTS;
13215
13216 case GIMPLE_OMP_PARALLEL:
13217 case GIMPLE_OMP_TASK:
13218 case GIMPLE_OMP_SECTIONS:
13219 case GIMPLE_OMP_SINGLE:
13220 case GIMPLE_OMP_SECTION:
13221 case GIMPLE_OMP_MASTER:
13222 case GIMPLE_OMP_ORDERED:
13223 case GIMPLE_OMP_SCAN:
13224 case GIMPLE_OMP_CRITICAL:
13225 case GIMPLE_OMP_TARGET:
13226 case GIMPLE_OMP_TEAMS:
13227 case GIMPLE_OMP_TASKGROUP:
13228 wi->info = stmt;
13229 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13230 wi->info = context;
13231 break;
13232
13233 case GIMPLE_OMP_FOR:
13234 wi->info = stmt;
13235 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13236 walk them. */
13237 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13238 diagnose_sb_2, NULL, wi);
13239 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13240 wi->info = context;
13241 break;
13242
13243 case GIMPLE_COND:
13244 {
13245 gcond *cond_stmt = as_a <gcond *> (stmt);
13246 tree lab = gimple_cond_true_label (cond_stmt);
13247 if (lab)
13248 {
13249 n = splay_tree_lookup (all_labels,
13250 (splay_tree_key) lab);
13251 diagnose_sb_0 (gsi_p, context,
13252 n ? (gimple *) n->value : NULL);
13253 }
13254 lab = gimple_cond_false_label (cond_stmt);
13255 if (lab)
13256 {
13257 n = splay_tree_lookup (all_labels,
13258 (splay_tree_key) lab);
13259 diagnose_sb_0 (gsi_p, context,
13260 n ? (gimple *) n->value : NULL);
13261 }
13262 }
13263 break;
13264
13265 case GIMPLE_GOTO:
13266 {
13267 tree lab = gimple_goto_dest (stmt);
13268 if (TREE_CODE (lab) != LABEL_DECL)
13269 break;
13270
13271 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13272 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13273 }
13274 break;
13275
13276 case GIMPLE_SWITCH:
13277 {
13278 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13279 unsigned int i;
13280 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13281 {
13282 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13283 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13284 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13285 break;
13286 }
13287 }
13288 break;
13289
13290 case GIMPLE_RETURN:
13291 diagnose_sb_0 (gsi_p, context, NULL);
13292 break;
13293
13294 default:
13295 break;
13296 }
13297
13298 return NULL_TREE;
13299 }
13300
13301 static unsigned int
13302 diagnose_omp_structured_block_errors (void)
13303 {
13304 struct walk_stmt_info wi;
13305 gimple_seq body = gimple_body (current_function_decl);
13306
13307 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13308
13309 memset (&wi, 0, sizeof (wi));
13310 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13311
13312 memset (&wi, 0, sizeof (wi));
13313 wi.want_locations = true;
13314 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13315
13316 gimple_set_body (current_function_decl, body);
13317
13318 splay_tree_delete (all_labels);
13319 all_labels = NULL;
13320
13321 return 0;
13322 }
13323
13324 namespace {
13325
13326 const pass_data pass_data_diagnose_omp_blocks =
13327 {
13328 GIMPLE_PASS, /* type */
13329 "*diagnose_omp_blocks", /* name */
13330 OPTGROUP_OMP, /* optinfo_flags */
13331 TV_NONE, /* tv_id */
13332 PROP_gimple_any, /* properties_required */
13333 0, /* properties_provided */
13334 0, /* properties_destroyed */
13335 0, /* todo_flags_start */
13336 0, /* todo_flags_finish */
13337 };
13338
13339 class pass_diagnose_omp_blocks : public gimple_opt_pass
13340 {
13341 public:
13342 pass_diagnose_omp_blocks (gcc::context *ctxt)
13343 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13344 {}
13345
13346 /* opt_pass methods: */
13347 virtual bool gate (function *)
13348 {
13349 return flag_openacc || flag_openmp || flag_openmp_simd;
13350 }
13351 virtual unsigned int execute (function *)
13352 {
13353 return diagnose_omp_structured_block_errors ();
13354 }
13355
13356 }; // class pass_diagnose_omp_blocks
13357
13358 } // anon namespace
13359
13360 gimple_opt_pass *
13361 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13362 {
13363 return new pass_diagnose_omp_blocks (ctxt);
13364 }
13365 \f
13366
13367 #include "gt-omp-low.h"