re PR fortran/49792 (OpenMP workshare: Wrong result with array assignment)
[gcc.git] / gcc / fortran / trans-openmp.c
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "gimple.h" /* For create_tmp_var_raw. */
28 #include "diagnostic-core.h" /* For internal_error. */
29 #include "gfortran.h"
30 #include "trans.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
35 #include "arith.h"
36
37 int ompws_flags;
38
39 /* True if OpenMP should privatize what this DECL points to rather
40 than the DECL itself. */
41
42 bool
43 gfc_omp_privatize_by_reference (const_tree decl)
44 {
45 tree type = TREE_TYPE (decl);
46
47 if (TREE_CODE (type) == REFERENCE_TYPE
48 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
49 return true;
50
51 if (TREE_CODE (type) == POINTER_TYPE)
52 {
53 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
54 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
55 set are supposed to be privatized by reference. */
56 if (GFC_POINTER_TYPE_P (type))
57 return false;
58
59 if (!DECL_ARTIFICIAL (decl)
60 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
61 return true;
62
63 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
64 by the frontend. */
65 if (DECL_LANG_SPECIFIC (decl)
66 && GFC_DECL_SAVED_DESCRIPTOR (decl))
67 return true;
68 }
69
70 return false;
71 }
72
73 /* True if OpenMP sharing attribute of DECL is predetermined. */
74
75 enum omp_clause_default_kind
76 gfc_omp_predetermined_sharing (tree decl)
77 {
78 if (DECL_ARTIFICIAL (decl)
79 && ! GFC_DECL_RESULT (decl)
80 && ! (DECL_LANG_SPECIFIC (decl)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
82 return OMP_CLAUSE_DEFAULT_SHARED;
83
84 /* Cray pointees shouldn't be listed in any clauses and should be
85 gimplified to dereference of the corresponding Cray pointer.
86 Make them all private, so that they are emitted in the debug
87 information. */
88 if (GFC_DECL_CRAY_POINTEE (decl))
89 return OMP_CLAUSE_DEFAULT_PRIVATE;
90
91 /* Assumed-size arrays are predetermined shared. */
92 if (TREE_CODE (decl) == PARM_DECL
93 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
94 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
95 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
96 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
97 == NULL)
98 return OMP_CLAUSE_DEFAULT_SHARED;
99
100 /* Dummy procedures aren't considered variables by OpenMP, thus are
101 disallowed in OpenMP clauses. They are represented as PARM_DECLs
102 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
103 to avoid complaining about their uses with default(none). */
104 if (TREE_CODE (decl) == PARM_DECL
105 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
106 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
107 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
108
109 /* COMMON and EQUIVALENCE decls are shared. They
110 are only referenced through DECL_VALUE_EXPR of the variables
111 contained in them. If those are privatized, they will not be
112 gimplified to the COMMON or EQUIVALENCE decls. */
113 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
114 return OMP_CLAUSE_DEFAULT_SHARED;
115
116 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
117 return OMP_CLAUSE_DEFAULT_SHARED;
118
119 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
120 }
121
122 /* Return decl that should be used when reporting DEFAULT(NONE)
123 diagnostics. */
124
125 tree
126 gfc_omp_report_decl (tree decl)
127 {
128 if (DECL_ARTIFICIAL (decl)
129 && DECL_LANG_SPECIFIC (decl)
130 && GFC_DECL_SAVED_DESCRIPTOR (decl))
131 return GFC_DECL_SAVED_DESCRIPTOR (decl);
132
133 return decl;
134 }
135
136 /* Return true if DECL in private clause needs
137 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
138 bool
139 gfc_omp_private_outer_ref (tree decl)
140 {
141 tree type = TREE_TYPE (decl);
142
143 if (GFC_DESCRIPTOR_TYPE_P (type)
144 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
145 return true;
146
147 return false;
148 }
149
150 /* Return code to initialize DECL with its default constructor, or
151 NULL if there's nothing to do. */
152
153 tree
154 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
155 {
156 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
157 stmtblock_t block, cond_block;
158
159 if (! GFC_DESCRIPTOR_TYPE_P (type)
160 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
161 return NULL;
162
163 gcc_assert (outer != NULL);
164 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
165 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
166
167 /* Allocatable arrays in PRIVATE clauses need to be set to
168 "not currently allocated" allocation status if outer
169 array is "not currently allocated", otherwise should be allocated. */
170 gfc_start_block (&block);
171
172 gfc_init_block (&cond_block);
173
174 gfc_add_modify (&cond_block, decl, outer);
175 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
176 size = gfc_conv_descriptor_ubound_get (decl, rank);
177 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
178 size, gfc_conv_descriptor_lbound_get (decl, rank));
179 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
180 size, gfc_index_one_node);
181 if (GFC_TYPE_ARRAY_RANK (type) > 1)
182 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
183 size, gfc_conv_descriptor_stride_get (decl, rank));
184 esize = fold_convert (gfc_array_index_type,
185 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
186 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
187 size, esize);
188 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
189
190 ptr = gfc_create_var (pvoid_type_node, NULL);
191 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
192 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
193
194 then_b = gfc_finish_block (&cond_block);
195
196 gfc_init_block (&cond_block);
197 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
198 else_b = gfc_finish_block (&cond_block);
199
200 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
201 fold_convert (pvoid_type_node,
202 gfc_conv_descriptor_data_get (outer)),
203 null_pointer_node);
204 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
205 void_type_node, cond, then_b, else_b));
206
207 return gfc_finish_block (&block);
208 }
209
210 /* Build and return code for a copy constructor from SRC to DEST. */
211
212 tree
213 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
214 {
215 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
216 tree cond, then_b, else_b;
217 stmtblock_t block, cond_block;
218
219 if (! GFC_DESCRIPTOR_TYPE_P (type)
220 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
221 return build2_v (MODIFY_EXPR, dest, src);
222
223 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
224
225 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
226 and copied from SRC. */
227 gfc_start_block (&block);
228
229 gfc_init_block (&cond_block);
230
231 gfc_add_modify (&cond_block, dest, src);
232 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
233 size = gfc_conv_descriptor_ubound_get (dest, rank);
234 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
235 size, gfc_conv_descriptor_lbound_get (dest, rank));
236 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
237 size, gfc_index_one_node);
238 if (GFC_TYPE_ARRAY_RANK (type) > 1)
239 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
240 size, gfc_conv_descriptor_stride_get (dest, rank));
241 esize = fold_convert (gfc_array_index_type,
242 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
243 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
244 size, esize);
245 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
246
247 ptr = gfc_create_var (pvoid_type_node, NULL);
248 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
249 gfc_conv_descriptor_data_set (&cond_block, dest, ptr);
250
251 call = build_call_expr_loc (input_location,
252 built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
253 fold_convert (pvoid_type_node,
254 gfc_conv_descriptor_data_get (src)),
255 size);
256 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
257 then_b = gfc_finish_block (&cond_block);
258
259 gfc_init_block (&cond_block);
260 gfc_conv_descriptor_data_set (&cond_block, dest, null_pointer_node);
261 else_b = gfc_finish_block (&cond_block);
262
263 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
264 fold_convert (pvoid_type_node,
265 gfc_conv_descriptor_data_get (src)),
266 null_pointer_node);
267 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
268 void_type_node, cond, then_b, else_b));
269
270 return gfc_finish_block (&block);
271 }
272
273 /* Similarly, except use an assignment operator instead. */
274
275 tree
276 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
277 {
278 tree type = TREE_TYPE (dest), rank, size, esize, call;
279 stmtblock_t block;
280
281 if (! GFC_DESCRIPTOR_TYPE_P (type)
282 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
283 return build2_v (MODIFY_EXPR, dest, src);
284
285 /* Handle copying allocatable arrays. */
286 gfc_start_block (&block);
287
288 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
289 size = gfc_conv_descriptor_ubound_get (dest, rank);
290 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
291 size, gfc_conv_descriptor_lbound_get (dest, rank));
292 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
293 size, gfc_index_one_node);
294 if (GFC_TYPE_ARRAY_RANK (type) > 1)
295 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
296 size, gfc_conv_descriptor_stride_get (dest, rank));
297 esize = fold_convert (gfc_array_index_type,
298 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
299 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
300 size, esize);
301 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
302 call = build_call_expr_loc (input_location,
303 built_in_decls[BUILT_IN_MEMCPY], 3,
304 fold_convert (pvoid_type_node,
305 gfc_conv_descriptor_data_get (dest)),
306 fold_convert (pvoid_type_node,
307 gfc_conv_descriptor_data_get (src)),
308 size);
309 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
310
311 return gfc_finish_block (&block);
312 }
313
314 /* Build and return code destructing DECL. Return NULL if nothing
315 to be done. */
316
317 tree
318 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
319 {
320 tree type = TREE_TYPE (decl);
321
322 if (! GFC_DESCRIPTOR_TYPE_P (type)
323 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
324 return NULL;
325
326 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
327 to be deallocated if they were allocated. */
328 return gfc_trans_dealloc_allocated (decl);
329 }
330
331
332 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
333 disregarded in OpenMP construct, because it is going to be
334 remapped during OpenMP lowering. SHARED is true if DECL
335 is going to be shared, false if it is going to be privatized. */
336
337 bool
338 gfc_omp_disregard_value_expr (tree decl, bool shared)
339 {
340 if (GFC_DECL_COMMON_OR_EQUIV (decl)
341 && DECL_HAS_VALUE_EXPR_P (decl))
342 {
343 tree value = DECL_VALUE_EXPR (decl);
344
345 if (TREE_CODE (value) == COMPONENT_REF
346 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
347 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
348 {
349 /* If variable in COMMON or EQUIVALENCE is privatized, return
350 true, as just that variable is supposed to be privatized,
351 not the whole COMMON or whole EQUIVALENCE.
352 For shared variables in COMMON or EQUIVALENCE, let them be
353 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
354 from the same COMMON or EQUIVALENCE just one sharing of the
355 whole COMMON or EQUIVALENCE is enough. */
356 return ! shared;
357 }
358 }
359
360 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
361 return ! shared;
362
363 return false;
364 }
365
366 /* Return true if DECL that is shared iff SHARED is true should
367 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
368 flag set. */
369
370 bool
371 gfc_omp_private_debug_clause (tree decl, bool shared)
372 {
373 if (GFC_DECL_CRAY_POINTEE (decl))
374 return true;
375
376 if (GFC_DECL_COMMON_OR_EQUIV (decl)
377 && DECL_HAS_VALUE_EXPR_P (decl))
378 {
379 tree value = DECL_VALUE_EXPR (decl);
380
381 if (TREE_CODE (value) == COMPONENT_REF
382 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
383 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
384 return shared;
385 }
386
387 return false;
388 }
389
390 /* Register language specific type size variables as potentially OpenMP
391 firstprivate variables. */
392
393 void
394 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
395 {
396 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
397 {
398 int r;
399
400 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
401 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
402 {
403 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
404 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
405 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
406 }
407 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
408 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
409 }
410 }
411
412
413 static inline tree
414 gfc_trans_add_clause (tree node, tree tail)
415 {
416 OMP_CLAUSE_CHAIN (node) = tail;
417 return node;
418 }
419
420 static tree
421 gfc_trans_omp_variable (gfc_symbol *sym)
422 {
423 tree t = gfc_get_symbol_decl (sym);
424 tree parent_decl;
425 int parent_flag;
426 bool return_value;
427 bool alternate_entry;
428 bool entry_master;
429
430 return_value = sym->attr.function && sym->result == sym;
431 alternate_entry = sym->attr.function && sym->attr.entry
432 && sym->result == sym;
433 entry_master = sym->attr.result
434 && sym->ns->proc_name->attr.entry_master
435 && !gfc_return_by_reference (sym->ns->proc_name);
436 parent_decl = DECL_CONTEXT (current_function_decl);
437
438 if ((t == parent_decl && return_value)
439 || (sym->ns && sym->ns->proc_name
440 && sym->ns->proc_name->backend_decl == parent_decl
441 && (alternate_entry || entry_master)))
442 parent_flag = 1;
443 else
444 parent_flag = 0;
445
446 /* Special case for assigning the return value of a function.
447 Self recursive functions must have an explicit return value. */
448 if (return_value && (t == current_function_decl || parent_flag))
449 t = gfc_get_fake_result_decl (sym, parent_flag);
450
451 /* Similarly for alternate entry points. */
452 else if (alternate_entry
453 && (sym->ns->proc_name->backend_decl == current_function_decl
454 || parent_flag))
455 {
456 gfc_entry_list *el = NULL;
457
458 for (el = sym->ns->entries; el; el = el->next)
459 if (sym == el->sym)
460 {
461 t = gfc_get_fake_result_decl (sym, parent_flag);
462 break;
463 }
464 }
465
466 else if (entry_master
467 && (sym->ns->proc_name->backend_decl == current_function_decl
468 || parent_flag))
469 t = gfc_get_fake_result_decl (sym, parent_flag);
470
471 return t;
472 }
473
474 static tree
475 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
476 tree list)
477 {
478 for (; namelist != NULL; namelist = namelist->next)
479 if (namelist->sym->attr.referenced)
480 {
481 tree t = gfc_trans_omp_variable (namelist->sym);
482 if (t != error_mark_node)
483 {
484 tree node = build_omp_clause (input_location, code);
485 OMP_CLAUSE_DECL (node) = t;
486 list = gfc_trans_add_clause (node, list);
487 }
488 }
489 return list;
490 }
491
492 static void
493 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
494 {
495 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
496 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
497 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
498 gfc_expr *e1, *e2, *e3, *e4;
499 gfc_ref *ref;
500 tree decl, backend_decl, stmt, type, outer_decl;
501 locus old_loc = gfc_current_locus;
502 const char *iname;
503 gfc_try t;
504
505 decl = OMP_CLAUSE_DECL (c);
506 gfc_current_locus = where;
507 type = TREE_TYPE (decl);
508 outer_decl = create_tmp_var_raw (type, NULL);
509 if (TREE_CODE (decl) == PARM_DECL
510 && TREE_CODE (type) == REFERENCE_TYPE
511 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
512 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
513 {
514 decl = build_fold_indirect_ref (decl);
515 type = TREE_TYPE (type);
516 }
517
518 /* Create a fake symbol for init value. */
519 memset (&init_val_sym, 0, sizeof (init_val_sym));
520 init_val_sym.ns = sym->ns;
521 init_val_sym.name = sym->name;
522 init_val_sym.ts = sym->ts;
523 init_val_sym.attr.referenced = 1;
524 init_val_sym.declared_at = where;
525 init_val_sym.attr.flavor = FL_VARIABLE;
526 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
527 init_val_sym.backend_decl = backend_decl;
528
529 /* Create a fake symbol for the outer array reference. */
530 outer_sym = *sym;
531 outer_sym.as = gfc_copy_array_spec (sym->as);
532 outer_sym.attr.dummy = 0;
533 outer_sym.attr.result = 0;
534 outer_sym.attr.flavor = FL_VARIABLE;
535 outer_sym.backend_decl = outer_decl;
536 if (decl != OMP_CLAUSE_DECL (c))
537 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
538
539 /* Create fake symtrees for it. */
540 symtree1 = gfc_new_symtree (&root1, sym->name);
541 symtree1->n.sym = sym;
542 gcc_assert (symtree1 == root1);
543
544 symtree2 = gfc_new_symtree (&root2, sym->name);
545 symtree2->n.sym = &init_val_sym;
546 gcc_assert (symtree2 == root2);
547
548 symtree3 = gfc_new_symtree (&root3, sym->name);
549 symtree3->n.sym = &outer_sym;
550 gcc_assert (symtree3 == root3);
551
552 /* Create expressions. */
553 e1 = gfc_get_expr ();
554 e1->expr_type = EXPR_VARIABLE;
555 e1->where = where;
556 e1->symtree = symtree1;
557 e1->ts = sym->ts;
558 e1->ref = ref = gfc_get_ref ();
559 ref->type = REF_ARRAY;
560 ref->u.ar.where = where;
561 ref->u.ar.as = sym->as;
562 ref->u.ar.type = AR_FULL;
563 ref->u.ar.dimen = 0;
564 t = gfc_resolve_expr (e1);
565 gcc_assert (t == SUCCESS);
566
567 e2 = gfc_get_expr ();
568 e2->expr_type = EXPR_VARIABLE;
569 e2->where = where;
570 e2->symtree = symtree2;
571 e2->ts = sym->ts;
572 t = gfc_resolve_expr (e2);
573 gcc_assert (t == SUCCESS);
574
575 e3 = gfc_copy_expr (e1);
576 e3->symtree = symtree3;
577 t = gfc_resolve_expr (e3);
578 gcc_assert (t == SUCCESS);
579
580 iname = NULL;
581 switch (OMP_CLAUSE_REDUCTION_CODE (c))
582 {
583 case PLUS_EXPR:
584 case MINUS_EXPR:
585 e4 = gfc_add (e3, e1);
586 break;
587 case MULT_EXPR:
588 e4 = gfc_multiply (e3, e1);
589 break;
590 case TRUTH_ANDIF_EXPR:
591 e4 = gfc_and (e3, e1);
592 break;
593 case TRUTH_ORIF_EXPR:
594 e4 = gfc_or (e3, e1);
595 break;
596 case EQ_EXPR:
597 e4 = gfc_eqv (e3, e1);
598 break;
599 case NE_EXPR:
600 e4 = gfc_neqv (e3, e1);
601 break;
602 case MIN_EXPR:
603 iname = "min";
604 break;
605 case MAX_EXPR:
606 iname = "max";
607 break;
608 case BIT_AND_EXPR:
609 iname = "iand";
610 break;
611 case BIT_IOR_EXPR:
612 iname = "ior";
613 break;
614 case BIT_XOR_EXPR:
615 iname = "ieor";
616 break;
617 default:
618 gcc_unreachable ();
619 }
620 if (iname != NULL)
621 {
622 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
623 intrinsic_sym.ns = sym->ns;
624 intrinsic_sym.name = iname;
625 intrinsic_sym.ts = sym->ts;
626 intrinsic_sym.attr.referenced = 1;
627 intrinsic_sym.attr.intrinsic = 1;
628 intrinsic_sym.attr.function = 1;
629 intrinsic_sym.result = &intrinsic_sym;
630 intrinsic_sym.declared_at = where;
631
632 symtree4 = gfc_new_symtree (&root4, iname);
633 symtree4->n.sym = &intrinsic_sym;
634 gcc_assert (symtree4 == root4);
635
636 e4 = gfc_get_expr ();
637 e4->expr_type = EXPR_FUNCTION;
638 e4->where = where;
639 e4->symtree = symtree4;
640 e4->value.function.isym = gfc_find_function (iname);
641 e4->value.function.actual = gfc_get_actual_arglist ();
642 e4->value.function.actual->expr = e3;
643 e4->value.function.actual->next = gfc_get_actual_arglist ();
644 e4->value.function.actual->next->expr = e1;
645 }
646 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
647 e1 = gfc_copy_expr (e1);
648 e3 = gfc_copy_expr (e3);
649 t = gfc_resolve_expr (e4);
650 gcc_assert (t == SUCCESS);
651
652 /* Create the init statement list. */
653 pushlevel (0);
654 if (GFC_DESCRIPTOR_TYPE_P (type)
655 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
656 {
657 /* If decl is an allocatable array, it needs to be allocated
658 with the same bounds as the outer var. */
659 tree rank, size, esize, ptr;
660 stmtblock_t block;
661
662 gfc_start_block (&block);
663
664 gfc_add_modify (&block, decl, outer_sym.backend_decl);
665 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
666 size = gfc_conv_descriptor_ubound_get (decl, rank);
667 size = fold_build2_loc (input_location, MINUS_EXPR,
668 gfc_array_index_type, size,
669 gfc_conv_descriptor_lbound_get (decl, rank));
670 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
671 size, gfc_index_one_node);
672 if (GFC_TYPE_ARRAY_RANK (type) > 1)
673 size = fold_build2_loc (input_location, MULT_EXPR,
674 gfc_array_index_type, size,
675 gfc_conv_descriptor_stride_get (decl, rank));
676 esize = fold_convert (gfc_array_index_type,
677 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
678 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
679 size, esize);
680 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
681
682 ptr = gfc_create_var (pvoid_type_node, NULL);
683 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
684 gfc_conv_descriptor_data_set (&block, decl, ptr);
685
686 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
687 false));
688 stmt = gfc_finish_block (&block);
689 }
690 else
691 stmt = gfc_trans_assignment (e1, e2, false, false);
692 if (TREE_CODE (stmt) != BIND_EXPR)
693 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
694 else
695 poplevel (0, 0, 0);
696 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
697
698 /* Create the merge statement list. */
699 pushlevel (0);
700 if (GFC_DESCRIPTOR_TYPE_P (type)
701 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
702 {
703 /* If decl is an allocatable array, it needs to be deallocated
704 afterwards. */
705 stmtblock_t block;
706
707 gfc_start_block (&block);
708 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
709 true));
710 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
711 stmt = gfc_finish_block (&block);
712 }
713 else
714 stmt = gfc_trans_assignment (e3, e4, false, true);
715 if (TREE_CODE (stmt) != BIND_EXPR)
716 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
717 else
718 poplevel (0, 0, 0);
719 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
720
721 /* And stick the placeholder VAR_DECL into the clause as well. */
722 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
723
724 gfc_current_locus = old_loc;
725
726 gfc_free_expr (e1);
727 gfc_free_expr (e2);
728 gfc_free_expr (e3);
729 gfc_free_expr (e4);
730 free (symtree1);
731 free (symtree2);
732 free (symtree3);
733 free (symtree4);
734 gfc_free_array_spec (outer_sym.as);
735 }
736
737 static tree
738 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
739 enum tree_code reduction_code, locus where)
740 {
741 for (; namelist != NULL; namelist = namelist->next)
742 if (namelist->sym->attr.referenced)
743 {
744 tree t = gfc_trans_omp_variable (namelist->sym);
745 if (t != error_mark_node)
746 {
747 tree node = build_omp_clause (where.lb->location,
748 OMP_CLAUSE_REDUCTION);
749 OMP_CLAUSE_DECL (node) = t;
750 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
751 if (namelist->sym->attr.dimension)
752 gfc_trans_omp_array_reduction (node, namelist->sym, where);
753 list = gfc_trans_add_clause (node, list);
754 }
755 }
756 return list;
757 }
758
759 static tree
760 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
761 locus where)
762 {
763 tree omp_clauses = NULL_TREE, chunk_size, c;
764 int list;
765 enum omp_clause_code clause_code;
766 gfc_se se;
767
768 if (clauses == NULL)
769 return NULL_TREE;
770
771 for (list = 0; list < OMP_LIST_NUM; list++)
772 {
773 gfc_namelist *n = clauses->lists[list];
774
775 if (n == NULL)
776 continue;
777 if (list >= OMP_LIST_REDUCTION_FIRST
778 && list <= OMP_LIST_REDUCTION_LAST)
779 {
780 enum tree_code reduction_code;
781 switch (list)
782 {
783 case OMP_LIST_PLUS:
784 reduction_code = PLUS_EXPR;
785 break;
786 case OMP_LIST_MULT:
787 reduction_code = MULT_EXPR;
788 break;
789 case OMP_LIST_SUB:
790 reduction_code = MINUS_EXPR;
791 break;
792 case OMP_LIST_AND:
793 reduction_code = TRUTH_ANDIF_EXPR;
794 break;
795 case OMP_LIST_OR:
796 reduction_code = TRUTH_ORIF_EXPR;
797 break;
798 case OMP_LIST_EQV:
799 reduction_code = EQ_EXPR;
800 break;
801 case OMP_LIST_NEQV:
802 reduction_code = NE_EXPR;
803 break;
804 case OMP_LIST_MAX:
805 reduction_code = MAX_EXPR;
806 break;
807 case OMP_LIST_MIN:
808 reduction_code = MIN_EXPR;
809 break;
810 case OMP_LIST_IAND:
811 reduction_code = BIT_AND_EXPR;
812 break;
813 case OMP_LIST_IOR:
814 reduction_code = BIT_IOR_EXPR;
815 break;
816 case OMP_LIST_IEOR:
817 reduction_code = BIT_XOR_EXPR;
818 break;
819 default:
820 gcc_unreachable ();
821 }
822 omp_clauses
823 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
824 where);
825 continue;
826 }
827 switch (list)
828 {
829 case OMP_LIST_PRIVATE:
830 clause_code = OMP_CLAUSE_PRIVATE;
831 goto add_clause;
832 case OMP_LIST_SHARED:
833 clause_code = OMP_CLAUSE_SHARED;
834 goto add_clause;
835 case OMP_LIST_FIRSTPRIVATE:
836 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
837 goto add_clause;
838 case OMP_LIST_LASTPRIVATE:
839 clause_code = OMP_CLAUSE_LASTPRIVATE;
840 goto add_clause;
841 case OMP_LIST_COPYIN:
842 clause_code = OMP_CLAUSE_COPYIN;
843 goto add_clause;
844 case OMP_LIST_COPYPRIVATE:
845 clause_code = OMP_CLAUSE_COPYPRIVATE;
846 /* FALLTHROUGH */
847 add_clause:
848 omp_clauses
849 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
850 break;
851 default:
852 break;
853 }
854 }
855
856 if (clauses->if_expr)
857 {
858 tree if_var;
859
860 gfc_init_se (&se, NULL);
861 gfc_conv_expr (&se, clauses->if_expr);
862 gfc_add_block_to_block (block, &se.pre);
863 if_var = gfc_evaluate_now (se.expr, block);
864 gfc_add_block_to_block (block, &se.post);
865
866 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
867 OMP_CLAUSE_IF_EXPR (c) = if_var;
868 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
869 }
870
871 if (clauses->final_expr)
872 {
873 tree final_var;
874
875 gfc_init_se (&se, NULL);
876 gfc_conv_expr (&se, clauses->final_expr);
877 gfc_add_block_to_block (block, &se.pre);
878 final_var = gfc_evaluate_now (se.expr, block);
879 gfc_add_block_to_block (block, &se.post);
880
881 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
882 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
883 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
884 }
885
886 if (clauses->num_threads)
887 {
888 tree num_threads;
889
890 gfc_init_se (&se, NULL);
891 gfc_conv_expr (&se, clauses->num_threads);
892 gfc_add_block_to_block (block, &se.pre);
893 num_threads = gfc_evaluate_now (se.expr, block);
894 gfc_add_block_to_block (block, &se.post);
895
896 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
897 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
898 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
899 }
900
901 chunk_size = NULL_TREE;
902 if (clauses->chunk_size)
903 {
904 gfc_init_se (&se, NULL);
905 gfc_conv_expr (&se, clauses->chunk_size);
906 gfc_add_block_to_block (block, &se.pre);
907 chunk_size = gfc_evaluate_now (se.expr, block);
908 gfc_add_block_to_block (block, &se.post);
909 }
910
911 if (clauses->sched_kind != OMP_SCHED_NONE)
912 {
913 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
914 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
915 switch (clauses->sched_kind)
916 {
917 case OMP_SCHED_STATIC:
918 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
919 break;
920 case OMP_SCHED_DYNAMIC:
921 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
922 break;
923 case OMP_SCHED_GUIDED:
924 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
925 break;
926 case OMP_SCHED_RUNTIME:
927 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
928 break;
929 case OMP_SCHED_AUTO:
930 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
931 break;
932 default:
933 gcc_unreachable ();
934 }
935 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
936 }
937
938 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
939 {
940 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
941 switch (clauses->default_sharing)
942 {
943 case OMP_DEFAULT_NONE:
944 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
945 break;
946 case OMP_DEFAULT_SHARED:
947 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
948 break;
949 case OMP_DEFAULT_PRIVATE:
950 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
951 break;
952 case OMP_DEFAULT_FIRSTPRIVATE:
953 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
954 break;
955 default:
956 gcc_unreachable ();
957 }
958 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
959 }
960
961 if (clauses->nowait)
962 {
963 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
964 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
965 }
966
967 if (clauses->ordered)
968 {
969 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
970 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
971 }
972
973 if (clauses->untied)
974 {
975 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
976 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
977 }
978
979 if (clauses->mergeable)
980 {
981 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
982 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
983 }
984
985 if (clauses->collapse)
986 {
987 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
988 OMP_CLAUSE_COLLAPSE_EXPR (c)
989 = build_int_cst (integer_type_node, clauses->collapse);
990 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
991 }
992
993 return omp_clauses;
994 }
995
996 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
997
998 static tree
999 gfc_trans_omp_code (gfc_code *code, bool force_empty)
1000 {
1001 tree stmt;
1002
1003 pushlevel (0);
1004 stmt = gfc_trans_code (code);
1005 if (TREE_CODE (stmt) != BIND_EXPR)
1006 {
1007 if (!IS_EMPTY_STMT (stmt) || force_empty)
1008 {
1009 tree block = poplevel (1, 0, 0);
1010 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
1011 }
1012 else
1013 poplevel (0, 0, 0);
1014 }
1015 else
1016 poplevel (0, 0, 0);
1017 return stmt;
1018 }
1019
1020
1021 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
1022 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
1023
1024 static tree
1025 gfc_trans_omp_atomic (gfc_code *code)
1026 {
1027 gfc_code *atomic_code = code;
1028 gfc_se lse;
1029 gfc_se rse;
1030 gfc_se vse;
1031 gfc_expr *expr2, *e;
1032 gfc_symbol *var;
1033 stmtblock_t block;
1034 tree lhsaddr, type, rhs, x;
1035 enum tree_code op = ERROR_MARK;
1036 enum tree_code aop = OMP_ATOMIC;
1037 bool var_on_left = false;
1038
1039 code = code->block->next;
1040 gcc_assert (code->op == EXEC_ASSIGN);
1041 var = code->expr1->symtree->n.sym;
1042
1043 gfc_init_se (&lse, NULL);
1044 gfc_init_se (&rse, NULL);
1045 gfc_init_se (&vse, NULL);
1046 gfc_start_block (&block);
1047
1048 expr2 = code->expr2;
1049 if (expr2->expr_type == EXPR_FUNCTION
1050 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1051 expr2 = expr2->value.function.actual->expr;
1052
1053 switch (atomic_code->ext.omp_atomic)
1054 {
1055 case GFC_OMP_ATOMIC_READ:
1056 gfc_conv_expr (&vse, code->expr1);
1057 gfc_add_block_to_block (&block, &vse.pre);
1058
1059 gfc_conv_expr (&lse, expr2);
1060 gfc_add_block_to_block (&block, &lse.pre);
1061 type = TREE_TYPE (lse.expr);
1062 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1063
1064 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
1065 x = convert (TREE_TYPE (vse.expr), x);
1066 gfc_add_modify (&block, vse.expr, x);
1067
1068 gfc_add_block_to_block (&block, &lse.pre);
1069 gfc_add_block_to_block (&block, &rse.pre);
1070
1071 return gfc_finish_block (&block);
1072 case GFC_OMP_ATOMIC_CAPTURE:
1073 aop = OMP_ATOMIC_CAPTURE_NEW;
1074 if (expr2->expr_type == EXPR_VARIABLE)
1075 {
1076 aop = OMP_ATOMIC_CAPTURE_OLD;
1077 gfc_conv_expr (&vse, code->expr1);
1078 gfc_add_block_to_block (&block, &vse.pre);
1079
1080 gfc_conv_expr (&lse, expr2);
1081 gfc_add_block_to_block (&block, &lse.pre);
1082 gfc_init_se (&lse, NULL);
1083 code = code->next;
1084 var = code->expr1->symtree->n.sym;
1085 expr2 = code->expr2;
1086 if (expr2->expr_type == EXPR_FUNCTION
1087 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1088 expr2 = expr2->value.function.actual->expr;
1089 }
1090 break;
1091 default:
1092 break;
1093 }
1094
1095 gfc_conv_expr (&lse, code->expr1);
1096 gfc_add_block_to_block (&block, &lse.pre);
1097 type = TREE_TYPE (lse.expr);
1098 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1099
1100 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1101 {
1102 gfc_conv_expr (&rse, expr2);
1103 gfc_add_block_to_block (&block, &rse.pre);
1104 }
1105 else if (expr2->expr_type == EXPR_OP)
1106 {
1107 gfc_expr *e;
1108 switch (expr2->value.op.op)
1109 {
1110 case INTRINSIC_PLUS:
1111 op = PLUS_EXPR;
1112 break;
1113 case INTRINSIC_TIMES:
1114 op = MULT_EXPR;
1115 break;
1116 case INTRINSIC_MINUS:
1117 op = MINUS_EXPR;
1118 break;
1119 case INTRINSIC_DIVIDE:
1120 if (expr2->ts.type == BT_INTEGER)
1121 op = TRUNC_DIV_EXPR;
1122 else
1123 op = RDIV_EXPR;
1124 break;
1125 case INTRINSIC_AND:
1126 op = TRUTH_ANDIF_EXPR;
1127 break;
1128 case INTRINSIC_OR:
1129 op = TRUTH_ORIF_EXPR;
1130 break;
1131 case INTRINSIC_EQV:
1132 op = EQ_EXPR;
1133 break;
1134 case INTRINSIC_NEQV:
1135 op = NE_EXPR;
1136 break;
1137 default:
1138 gcc_unreachable ();
1139 }
1140 e = expr2->value.op.op1;
1141 if (e->expr_type == EXPR_FUNCTION
1142 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1143 e = e->value.function.actual->expr;
1144 if (e->expr_type == EXPR_VARIABLE
1145 && e->symtree != NULL
1146 && e->symtree->n.sym == var)
1147 {
1148 expr2 = expr2->value.op.op2;
1149 var_on_left = true;
1150 }
1151 else
1152 {
1153 e = expr2->value.op.op2;
1154 if (e->expr_type == EXPR_FUNCTION
1155 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1156 e = e->value.function.actual->expr;
1157 gcc_assert (e->expr_type == EXPR_VARIABLE
1158 && e->symtree != NULL
1159 && e->symtree->n.sym == var);
1160 expr2 = expr2->value.op.op1;
1161 var_on_left = false;
1162 }
1163 gfc_conv_expr (&rse, expr2);
1164 gfc_add_block_to_block (&block, &rse.pre);
1165 }
1166 else
1167 {
1168 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1169 switch (expr2->value.function.isym->id)
1170 {
1171 case GFC_ISYM_MIN:
1172 op = MIN_EXPR;
1173 break;
1174 case GFC_ISYM_MAX:
1175 op = MAX_EXPR;
1176 break;
1177 case GFC_ISYM_IAND:
1178 op = BIT_AND_EXPR;
1179 break;
1180 case GFC_ISYM_IOR:
1181 op = BIT_IOR_EXPR;
1182 break;
1183 case GFC_ISYM_IEOR:
1184 op = BIT_XOR_EXPR;
1185 break;
1186 default:
1187 gcc_unreachable ();
1188 }
1189 e = expr2->value.function.actual->expr;
1190 gcc_assert (e->expr_type == EXPR_VARIABLE
1191 && e->symtree != NULL
1192 && e->symtree->n.sym == var);
1193
1194 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1195 gfc_add_block_to_block (&block, &rse.pre);
1196 if (expr2->value.function.actual->next->next != NULL)
1197 {
1198 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1199 gfc_actual_arglist *arg;
1200
1201 gfc_add_modify (&block, accum, rse.expr);
1202 for (arg = expr2->value.function.actual->next->next; arg;
1203 arg = arg->next)
1204 {
1205 gfc_init_block (&rse.pre);
1206 gfc_conv_expr (&rse, arg->expr);
1207 gfc_add_block_to_block (&block, &rse.pre);
1208 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
1209 accum, rse.expr);
1210 gfc_add_modify (&block, accum, x);
1211 }
1212
1213 rse.expr = accum;
1214 }
1215
1216 expr2 = expr2->value.function.actual->next->expr;
1217 }
1218
1219 lhsaddr = save_expr (lhsaddr);
1220 rhs = gfc_evaluate_now (rse.expr, &block);
1221
1222 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1223 x = rhs;
1224 else
1225 {
1226 x = convert (TREE_TYPE (rhs),
1227 build_fold_indirect_ref_loc (input_location, lhsaddr));
1228 if (var_on_left)
1229 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
1230 else
1231 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
1232 }
1233
1234 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1235 && TREE_CODE (type) != COMPLEX_TYPE)
1236 x = fold_build1_loc (input_location, REALPART_EXPR,
1237 TREE_TYPE (TREE_TYPE (rhs)), x);
1238
1239 gfc_add_block_to_block (&block, &lse.pre);
1240 gfc_add_block_to_block (&block, &rse.pre);
1241
1242 if (aop == OMP_ATOMIC)
1243 {
1244 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1245 gfc_add_expr_to_block (&block, x);
1246 }
1247 else
1248 {
1249 if (aop == OMP_ATOMIC_CAPTURE_NEW)
1250 {
1251 code = code->next;
1252 expr2 = code->expr2;
1253 if (expr2->expr_type == EXPR_FUNCTION
1254 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1255 expr2 = expr2->value.function.actual->expr;
1256
1257 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
1258 gfc_conv_expr (&vse, code->expr1);
1259 gfc_add_block_to_block (&block, &vse.pre);
1260
1261 gfc_init_se (&lse, NULL);
1262 gfc_conv_expr (&lse, expr2);
1263 gfc_add_block_to_block (&block, &lse.pre);
1264 }
1265 x = build2 (aop, type, lhsaddr, convert (type, x));
1266 x = convert (TREE_TYPE (vse.expr), x);
1267 gfc_add_modify (&block, vse.expr, x);
1268 }
1269
1270 return gfc_finish_block (&block);
1271 }
1272
1273 static tree
1274 gfc_trans_omp_barrier (void)
1275 {
1276 tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
1277 return build_call_expr_loc (input_location, decl, 0);
1278 }
1279
1280 static tree
1281 gfc_trans_omp_critical (gfc_code *code)
1282 {
1283 tree name = NULL_TREE, stmt;
1284 if (code->ext.omp_name != NULL)
1285 name = get_identifier (code->ext.omp_name);
1286 stmt = gfc_trans_code (code->block->next);
1287 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
1288 }
1289
1290 typedef struct dovar_init_d {
1291 tree var;
1292 tree init;
1293 } dovar_init;
1294
1295 DEF_VEC_O(dovar_init);
1296 DEF_VEC_ALLOC_O(dovar_init,heap);
1297
1298 static tree
1299 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1300 gfc_omp_clauses *do_clauses, tree par_clauses)
1301 {
1302 gfc_se se;
1303 tree dovar, stmt, from, to, step, type, init, cond, incr;
1304 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1305 stmtblock_t block;
1306 stmtblock_t body;
1307 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1308 int i, collapse = clauses->collapse;
1309 VEC(dovar_init,heap) *inits = NULL;
1310 dovar_init *di;
1311 unsigned ix;
1312
1313 if (collapse <= 0)
1314 collapse = 1;
1315
1316 code = code->block->next;
1317 gcc_assert (code->op == EXEC_DO);
1318
1319 init = make_tree_vec (collapse);
1320 cond = make_tree_vec (collapse);
1321 incr = make_tree_vec (collapse);
1322
1323 if (pblock == NULL)
1324 {
1325 gfc_start_block (&block);
1326 pblock = &block;
1327 }
1328
1329 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1330
1331 for (i = 0; i < collapse; i++)
1332 {
1333 int simple = 0;
1334 int dovar_found = 0;
1335 tree dovar_decl;
1336
1337 if (clauses)
1338 {
1339 gfc_namelist *n;
1340 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1341 n = n->next)
1342 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1343 break;
1344 if (n != NULL)
1345 dovar_found = 1;
1346 else if (n == NULL)
1347 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1348 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1349 break;
1350 if (n != NULL)
1351 dovar_found++;
1352 }
1353
1354 /* Evaluate all the expressions in the iterator. */
1355 gfc_init_se (&se, NULL);
1356 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1357 gfc_add_block_to_block (pblock, &se.pre);
1358 dovar = se.expr;
1359 type = TREE_TYPE (dovar);
1360 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1361
1362 gfc_init_se (&se, NULL);
1363 gfc_conv_expr_val (&se, code->ext.iterator->start);
1364 gfc_add_block_to_block (pblock, &se.pre);
1365 from = gfc_evaluate_now (se.expr, pblock);
1366
1367 gfc_init_se (&se, NULL);
1368 gfc_conv_expr_val (&se, code->ext.iterator->end);
1369 gfc_add_block_to_block (pblock, &se.pre);
1370 to = gfc_evaluate_now (se.expr, pblock);
1371
1372 gfc_init_se (&se, NULL);
1373 gfc_conv_expr_val (&se, code->ext.iterator->step);
1374 gfc_add_block_to_block (pblock, &se.pre);
1375 step = gfc_evaluate_now (se.expr, pblock);
1376 dovar_decl = dovar;
1377
1378 /* Special case simple loops. */
1379 if (TREE_CODE (dovar) == VAR_DECL)
1380 {
1381 if (integer_onep (step))
1382 simple = 1;
1383 else if (tree_int_cst_equal (step, integer_minus_one_node))
1384 simple = -1;
1385 }
1386 else
1387 dovar_decl
1388 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
1389
1390 /* Loop body. */
1391 if (simple)
1392 {
1393 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1394 /* The condition should not be folded. */
1395 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
1396 ? LE_EXPR : GE_EXPR,
1397 boolean_type_node, dovar, to);
1398 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1399 type, dovar, step);
1400 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1401 MODIFY_EXPR,
1402 type, dovar,
1403 TREE_VEC_ELT (incr, i));
1404 }
1405 else
1406 {
1407 /* STEP is not 1 or -1. Use:
1408 for (count = 0; count < (to + step - from) / step; count++)
1409 {
1410 dovar = from + count * step;
1411 body;
1412 cycle_label:;
1413 } */
1414 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
1415 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
1416 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
1417 step);
1418 tmp = gfc_evaluate_now (tmp, pblock);
1419 count = gfc_create_var (type, "count");
1420 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1421 build_int_cst (type, 0));
1422 /* The condition should not be folded. */
1423 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
1424 boolean_type_node,
1425 count, tmp);
1426 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1427 type, count,
1428 build_int_cst (type, 1));
1429 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1430 MODIFY_EXPR, type, count,
1431 TREE_VEC_ELT (incr, i));
1432
1433 /* Initialize DOVAR. */
1434 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
1435 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
1436 di = VEC_safe_push (dovar_init, heap, inits, NULL);
1437 di->var = dovar;
1438 di->init = tmp;
1439 }
1440
1441 if (!dovar_found)
1442 {
1443 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1444 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1445 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1446 }
1447 else if (dovar_found == 2)
1448 {
1449 tree c = NULL;
1450
1451 tmp = NULL;
1452 if (!simple)
1453 {
1454 /* If dovar is lastprivate, but different counter is used,
1455 dovar += step needs to be added to
1456 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1457 will have the value on entry of the last loop, rather
1458 than value after iterator increment. */
1459 tmp = gfc_evaluate_now (step, pblock);
1460 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
1461 tmp);
1462 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
1463 dovar, tmp);
1464 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1465 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1466 && OMP_CLAUSE_DECL (c) == dovar_decl)
1467 {
1468 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1469 break;
1470 }
1471 }
1472 if (c == NULL && par_clauses != NULL)
1473 {
1474 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1475 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1476 && OMP_CLAUSE_DECL (c) == dovar_decl)
1477 {
1478 tree l = build_omp_clause (input_location,
1479 OMP_CLAUSE_LASTPRIVATE);
1480 OMP_CLAUSE_DECL (l) = dovar_decl;
1481 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1482 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1483 omp_clauses = l;
1484 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1485 break;
1486 }
1487 }
1488 gcc_assert (simple || c != NULL);
1489 }
1490 if (!simple)
1491 {
1492 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1493 OMP_CLAUSE_DECL (tmp) = count;
1494 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1495 }
1496
1497 if (i + 1 < collapse)
1498 code = code->block->next;
1499 }
1500
1501 if (pblock != &block)
1502 {
1503 pushlevel (0);
1504 gfc_start_block (&block);
1505 }
1506
1507 gfc_start_block (&body);
1508
1509 FOR_EACH_VEC_ELT (dovar_init, inits, ix, di)
1510 gfc_add_modify (&body, di->var, di->init);
1511 VEC_free (dovar_init, heap, inits);
1512
1513 /* Cycle statement is implemented with a goto. Exit statement must not be
1514 present for this loop. */
1515 cycle_label = gfc_build_label_decl (NULL_TREE);
1516
1517 /* Put these labels where they can be found later. */
1518
1519 code->cycle_label = cycle_label;
1520 code->exit_label = NULL_TREE;
1521
1522 /* Main loop body. */
1523 tmp = gfc_trans_omp_code (code->block->next, true);
1524 gfc_add_expr_to_block (&body, tmp);
1525
1526 /* Label for cycle statements (if needed). */
1527 if (TREE_USED (cycle_label))
1528 {
1529 tmp = build1_v (LABEL_EXPR, cycle_label);
1530 gfc_add_expr_to_block (&body, tmp);
1531 }
1532
1533 /* End of loop body. */
1534 stmt = make_node (OMP_FOR);
1535
1536 TREE_TYPE (stmt) = void_type_node;
1537 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1538 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1539 OMP_FOR_INIT (stmt) = init;
1540 OMP_FOR_COND (stmt) = cond;
1541 OMP_FOR_INCR (stmt) = incr;
1542 gfc_add_expr_to_block (&block, stmt);
1543
1544 return gfc_finish_block (&block);
1545 }
1546
1547 static tree
1548 gfc_trans_omp_flush (void)
1549 {
1550 tree decl = built_in_decls [BUILT_IN_SYNC_SYNCHRONIZE];
1551 return build_call_expr_loc (input_location, decl, 0);
1552 }
1553
1554 static tree
1555 gfc_trans_omp_master (gfc_code *code)
1556 {
1557 tree stmt = gfc_trans_code (code->block->next);
1558 if (IS_EMPTY_STMT (stmt))
1559 return stmt;
1560 return build1_v (OMP_MASTER, stmt);
1561 }
1562
1563 static tree
1564 gfc_trans_omp_ordered (gfc_code *code)
1565 {
1566 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1567 }
1568
1569 static tree
1570 gfc_trans_omp_parallel (gfc_code *code)
1571 {
1572 stmtblock_t block;
1573 tree stmt, omp_clauses;
1574
1575 gfc_start_block (&block);
1576 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1577 code->loc);
1578 stmt = gfc_trans_omp_code (code->block->next, true);
1579 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1580 omp_clauses);
1581 gfc_add_expr_to_block (&block, stmt);
1582 return gfc_finish_block (&block);
1583 }
1584
1585 static tree
1586 gfc_trans_omp_parallel_do (gfc_code *code)
1587 {
1588 stmtblock_t block, *pblock = NULL;
1589 gfc_omp_clauses parallel_clauses, do_clauses;
1590 tree stmt, omp_clauses = NULL_TREE;
1591
1592 gfc_start_block (&block);
1593
1594 memset (&do_clauses, 0, sizeof (do_clauses));
1595 if (code->ext.omp_clauses != NULL)
1596 {
1597 memcpy (&parallel_clauses, code->ext.omp_clauses,
1598 sizeof (parallel_clauses));
1599 do_clauses.sched_kind = parallel_clauses.sched_kind;
1600 do_clauses.chunk_size = parallel_clauses.chunk_size;
1601 do_clauses.ordered = parallel_clauses.ordered;
1602 do_clauses.collapse = parallel_clauses.collapse;
1603 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1604 parallel_clauses.chunk_size = NULL;
1605 parallel_clauses.ordered = false;
1606 parallel_clauses.collapse = 0;
1607 omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
1608 code->loc);
1609 }
1610 do_clauses.nowait = true;
1611 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1612 pblock = &block;
1613 else
1614 pushlevel (0);
1615 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1616 if (TREE_CODE (stmt) != BIND_EXPR)
1617 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1618 else
1619 poplevel (0, 0, 0);
1620 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1621 omp_clauses);
1622 OMP_PARALLEL_COMBINED (stmt) = 1;
1623 gfc_add_expr_to_block (&block, stmt);
1624 return gfc_finish_block (&block);
1625 }
1626
1627 static tree
1628 gfc_trans_omp_parallel_sections (gfc_code *code)
1629 {
1630 stmtblock_t block;
1631 gfc_omp_clauses section_clauses;
1632 tree stmt, omp_clauses;
1633
1634 memset (&section_clauses, 0, sizeof (section_clauses));
1635 section_clauses.nowait = true;
1636
1637 gfc_start_block (&block);
1638 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1639 code->loc);
1640 pushlevel (0);
1641 stmt = gfc_trans_omp_sections (code, &section_clauses);
1642 if (TREE_CODE (stmt) != BIND_EXPR)
1643 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1644 else
1645 poplevel (0, 0, 0);
1646 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1647 omp_clauses);
1648 OMP_PARALLEL_COMBINED (stmt) = 1;
1649 gfc_add_expr_to_block (&block, stmt);
1650 return gfc_finish_block (&block);
1651 }
1652
1653 static tree
1654 gfc_trans_omp_parallel_workshare (gfc_code *code)
1655 {
1656 stmtblock_t block;
1657 gfc_omp_clauses workshare_clauses;
1658 tree stmt, omp_clauses;
1659
1660 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1661 workshare_clauses.nowait = true;
1662
1663 gfc_start_block (&block);
1664 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1665 code->loc);
1666 pushlevel (0);
1667 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1668 if (TREE_CODE (stmt) != BIND_EXPR)
1669 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1670 else
1671 poplevel (0, 0, 0);
1672 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1673 omp_clauses);
1674 OMP_PARALLEL_COMBINED (stmt) = 1;
1675 gfc_add_expr_to_block (&block, stmt);
1676 return gfc_finish_block (&block);
1677 }
1678
1679 static tree
1680 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1681 {
1682 stmtblock_t block, body;
1683 tree omp_clauses, stmt;
1684 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1685
1686 gfc_start_block (&block);
1687
1688 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1689
1690 gfc_init_block (&body);
1691 for (code = code->block; code; code = code->block)
1692 {
1693 /* Last section is special because of lastprivate, so even if it
1694 is empty, chain it in. */
1695 stmt = gfc_trans_omp_code (code->next,
1696 has_lastprivate && code->block == NULL);
1697 if (! IS_EMPTY_STMT (stmt))
1698 {
1699 stmt = build1_v (OMP_SECTION, stmt);
1700 gfc_add_expr_to_block (&body, stmt);
1701 }
1702 }
1703 stmt = gfc_finish_block (&body);
1704
1705 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
1706 omp_clauses);
1707 gfc_add_expr_to_block (&block, stmt);
1708
1709 return gfc_finish_block (&block);
1710 }
1711
1712 static tree
1713 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1714 {
1715 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1716 tree stmt = gfc_trans_omp_code (code->block->next, true);
1717 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
1718 omp_clauses);
1719 return stmt;
1720 }
1721
1722 static tree
1723 gfc_trans_omp_task (gfc_code *code)
1724 {
1725 stmtblock_t block;
1726 tree stmt, omp_clauses;
1727
1728 gfc_start_block (&block);
1729 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1730 code->loc);
1731 stmt = gfc_trans_omp_code (code->block->next, true);
1732 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
1733 omp_clauses);
1734 gfc_add_expr_to_block (&block, stmt);
1735 return gfc_finish_block (&block);
1736 }
1737
1738 static tree
1739 gfc_trans_omp_taskwait (void)
1740 {
1741 tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
1742 return build_call_expr_loc (input_location, decl, 0);
1743 }
1744
1745 static tree
1746 gfc_trans_omp_taskyield (void)
1747 {
1748 tree decl = built_in_decls [BUILT_IN_GOMP_TASKYIELD];
1749 return build_call_expr_loc (input_location, decl, 0);
1750 }
1751
1752 static tree
1753 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1754 {
1755 tree res, tmp, stmt;
1756 stmtblock_t block, *pblock = NULL;
1757 stmtblock_t singleblock;
1758 int saved_ompws_flags;
1759 bool singleblock_in_progress = false;
1760 /* True if previous gfc_code in workshare construct is not workshared. */
1761 bool prev_singleunit;
1762
1763 code = code->block->next;
1764
1765 pushlevel (0);
1766
1767 gfc_start_block (&block);
1768 pblock = &block;
1769
1770 ompws_flags = OMPWS_WORKSHARE_FLAG;
1771 prev_singleunit = false;
1772
1773 /* Translate statements one by one to trees until we reach
1774 the end of the workshare construct. Adjacent gfc_codes that
1775 are a single unit of work are clustered and encapsulated in a
1776 single OMP_SINGLE construct. */
1777 for (; code; code = code->next)
1778 {
1779 if (code->here != 0)
1780 {
1781 res = gfc_trans_label_here (code);
1782 gfc_add_expr_to_block (pblock, res);
1783 }
1784
1785 /* No dependence analysis, use for clauses with wait.
1786 If this is the last gfc_code, use default omp_clauses. */
1787 if (code->next == NULL && clauses->nowait)
1788 ompws_flags |= OMPWS_NOWAIT;
1789
1790 /* By default, every gfc_code is a single unit of work. */
1791 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1792 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1793
1794 switch (code->op)
1795 {
1796 case EXEC_NOP:
1797 res = NULL_TREE;
1798 break;
1799
1800 case EXEC_ASSIGN:
1801 res = gfc_trans_assign (code);
1802 break;
1803
1804 case EXEC_POINTER_ASSIGN:
1805 res = gfc_trans_pointer_assign (code);
1806 break;
1807
1808 case EXEC_INIT_ASSIGN:
1809 res = gfc_trans_init_assign (code);
1810 break;
1811
1812 case EXEC_FORALL:
1813 res = gfc_trans_forall (code);
1814 break;
1815
1816 case EXEC_WHERE:
1817 res = gfc_trans_where (code);
1818 break;
1819
1820 case EXEC_OMP_ATOMIC:
1821 res = gfc_trans_omp_directive (code);
1822 break;
1823
1824 case EXEC_OMP_PARALLEL:
1825 case EXEC_OMP_PARALLEL_DO:
1826 case EXEC_OMP_PARALLEL_SECTIONS:
1827 case EXEC_OMP_PARALLEL_WORKSHARE:
1828 case EXEC_OMP_CRITICAL:
1829 saved_ompws_flags = ompws_flags;
1830 ompws_flags = 0;
1831 res = gfc_trans_omp_directive (code);
1832 ompws_flags = saved_ompws_flags;
1833 break;
1834
1835 default:
1836 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1837 }
1838
1839 gfc_set_backend_locus (&code->loc);
1840
1841 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1842 {
1843 if (prev_singleunit)
1844 {
1845 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1846 /* Add current gfc_code to single block. */
1847 gfc_add_expr_to_block (&singleblock, res);
1848 else
1849 {
1850 /* Finish single block and add it to pblock. */
1851 tmp = gfc_finish_block (&singleblock);
1852 tmp = build2_loc (input_location, OMP_SINGLE,
1853 void_type_node, tmp, NULL_TREE);
1854 gfc_add_expr_to_block (pblock, tmp);
1855 /* Add current gfc_code to pblock. */
1856 gfc_add_expr_to_block (pblock, res);
1857 singleblock_in_progress = false;
1858 }
1859 }
1860 else
1861 {
1862 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1863 {
1864 /* Start single block. */
1865 gfc_init_block (&singleblock);
1866 gfc_add_expr_to_block (&singleblock, res);
1867 singleblock_in_progress = true;
1868 }
1869 else
1870 /* Add the new statement to the block. */
1871 gfc_add_expr_to_block (pblock, res);
1872 }
1873 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1874 }
1875 }
1876
1877 /* Finish remaining SINGLE block, if we were in the middle of one. */
1878 if (singleblock_in_progress)
1879 {
1880 /* Finish single block and add it to pblock. */
1881 tmp = gfc_finish_block (&singleblock);
1882 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
1883 clauses->nowait
1884 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
1885 : NULL_TREE);
1886 gfc_add_expr_to_block (pblock, tmp);
1887 }
1888
1889 stmt = gfc_finish_block (pblock);
1890 if (TREE_CODE (stmt) != BIND_EXPR)
1891 {
1892 if (!IS_EMPTY_STMT (stmt))
1893 {
1894 tree bindblock = poplevel (1, 0, 0);
1895 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1896 }
1897 else
1898 poplevel (0, 0, 0);
1899 }
1900 else
1901 poplevel (0, 0, 0);
1902
1903 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
1904 stmt = gfc_trans_omp_barrier ();
1905
1906 ompws_flags = 0;
1907 return stmt;
1908 }
1909
1910 tree
1911 gfc_trans_omp_directive (gfc_code *code)
1912 {
1913 switch (code->op)
1914 {
1915 case EXEC_OMP_ATOMIC:
1916 return gfc_trans_omp_atomic (code);
1917 case EXEC_OMP_BARRIER:
1918 return gfc_trans_omp_barrier ();
1919 case EXEC_OMP_CRITICAL:
1920 return gfc_trans_omp_critical (code);
1921 case EXEC_OMP_DO:
1922 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1923 case EXEC_OMP_FLUSH:
1924 return gfc_trans_omp_flush ();
1925 case EXEC_OMP_MASTER:
1926 return gfc_trans_omp_master (code);
1927 case EXEC_OMP_ORDERED:
1928 return gfc_trans_omp_ordered (code);
1929 case EXEC_OMP_PARALLEL:
1930 return gfc_trans_omp_parallel (code);
1931 case EXEC_OMP_PARALLEL_DO:
1932 return gfc_trans_omp_parallel_do (code);
1933 case EXEC_OMP_PARALLEL_SECTIONS:
1934 return gfc_trans_omp_parallel_sections (code);
1935 case EXEC_OMP_PARALLEL_WORKSHARE:
1936 return gfc_trans_omp_parallel_workshare (code);
1937 case EXEC_OMP_SECTIONS:
1938 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1939 case EXEC_OMP_SINGLE:
1940 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1941 case EXEC_OMP_TASK:
1942 return gfc_trans_omp_task (code);
1943 case EXEC_OMP_TASKWAIT:
1944 return gfc_trans_omp_taskwait ();
1945 case EXEC_OMP_TASKYIELD:
1946 return gfc_trans_omp_taskyield ();
1947 case EXEC_OMP_WORKSHARE:
1948 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
1949 default:
1950 gcc_unreachable ();
1951 }
1952 }