Always pass 0 or option number to gfc_warning*.
[gcc.git] / gcc / fortran / trans-stmt.c
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "options.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stringpool.h"
39 #include "gfortran.h"
40 #include "flags.h"
41 #include "trans.h"
42 #include "trans-stmt.h"
43 #include "trans-types.h"
44 #include "trans-array.h"
45 #include "trans-const.h"
46 #include "arith.h"
47 #include "dependency.h"
48 #include "ggc.h"
49
50 typedef struct iter_info
51 {
52 tree var;
53 tree start;
54 tree end;
55 tree step;
56 struct iter_info *next;
57 }
58 iter_info;
59
60 typedef struct forall_info
61 {
62 iter_info *this_loop;
63 tree mask;
64 tree maskindex;
65 int nvar;
66 tree size;
67 struct forall_info *prev_nest;
68 bool do_concurrent;
69 }
70 forall_info;
71
72 static void gfc_trans_where_2 (gfc_code *, tree, bool,
73 forall_info *, stmtblock_t *);
74
75 /* Translate a F95 label number to a LABEL_EXPR. */
76
77 tree
78 gfc_trans_label_here (gfc_code * code)
79 {
80 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
81 }
82
83
84 /* Given a variable expression which has been ASSIGNed to, find the decl
85 containing the auxiliary variables. For variables in common blocks this
86 is a field_decl. */
87
88 void
89 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
90 {
91 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
92 gfc_conv_expr (se, expr);
93 /* Deals with variable in common block. Get the field declaration. */
94 if (TREE_CODE (se->expr) == COMPONENT_REF)
95 se->expr = TREE_OPERAND (se->expr, 1);
96 /* Deals with dummy argument. Get the parameter declaration. */
97 else if (TREE_CODE (se->expr) == INDIRECT_REF)
98 se->expr = TREE_OPERAND (se->expr, 0);
99 }
100
101 /* Translate a label assignment statement. */
102
103 tree
104 gfc_trans_label_assign (gfc_code * code)
105 {
106 tree label_tree;
107 gfc_se se;
108 tree len;
109 tree addr;
110 tree len_tree;
111 int label_len;
112
113 /* Start a new block. */
114 gfc_init_se (&se, NULL);
115 gfc_start_block (&se.pre);
116 gfc_conv_label_variable (&se, code->expr1);
117
118 len = GFC_DECL_STRING_LEN (se.expr);
119 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
120
121 label_tree = gfc_get_label_decl (code->label1);
122
123 if (code->label1->defined == ST_LABEL_TARGET
124 || code->label1->defined == ST_LABEL_DO_TARGET)
125 {
126 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 len_tree = integer_minus_one_node;
128 }
129 else
130 {
131 gfc_expr *format = code->label1->format;
132
133 label_len = format->value.character.length;
134 len_tree = build_int_cst (gfc_charlen_type_node, label_len);
135 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
136 format->value.character.string);
137 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
138 }
139
140 gfc_add_modify (&se.pre, len, len_tree);
141 gfc_add_modify (&se.pre, addr, label_tree);
142
143 return gfc_finish_block (&se.pre);
144 }
145
146 /* Translate a GOTO statement. */
147
148 tree
149 gfc_trans_goto (gfc_code * code)
150 {
151 locus loc = code->loc;
152 tree assigned_goto;
153 tree target;
154 tree tmp;
155 gfc_se se;
156
157 if (code->label1 != NULL)
158 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
159
160 /* ASSIGNED GOTO. */
161 gfc_init_se (&se, NULL);
162 gfc_start_block (&se.pre);
163 gfc_conv_label_variable (&se, code->expr1);
164 tmp = GFC_DECL_STRING_LEN (se.expr);
165 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
166 build_int_cst (TREE_TYPE (tmp), -1));
167 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
168 "Assigned label is not a target label");
169
170 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
171
172 /* We're going to ignore a label list. It does not really change the
173 statement's semantics (because it is just a further restriction on
174 what's legal code); before, we were comparing label addresses here, but
175 that's a very fragile business and may break with optimization. So
176 just ignore it. */
177
178 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
179 assigned_goto);
180 gfc_add_expr_to_block (&se.pre, target);
181 return gfc_finish_block (&se.pre);
182 }
183
184
185 /* Translate an ENTRY statement. Just adds a label for this entry point. */
186 tree
187 gfc_trans_entry (gfc_code * code)
188 {
189 return build1_v (LABEL_EXPR, code->ext.entry->label);
190 }
191
192
193 /* Replace a gfc_ss structure by another both in the gfc_se struct
194 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
195 to replace a variable ss by the corresponding temporary. */
196
197 static void
198 replace_ss (gfc_se *se, gfc_ss *old_ss, gfc_ss *new_ss)
199 {
200 gfc_ss **sess, **loopss;
201
202 /* The old_ss is a ss for a single variable. */
203 gcc_assert (old_ss->info->type == GFC_SS_SECTION);
204
205 for (sess = &(se->ss); *sess != gfc_ss_terminator; sess = &((*sess)->next))
206 if (*sess == old_ss)
207 break;
208 gcc_assert (*sess != gfc_ss_terminator);
209
210 *sess = new_ss;
211 new_ss->next = old_ss->next;
212
213
214 for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator;
215 loopss = &((*loopss)->loop_chain))
216 if (*loopss == old_ss)
217 break;
218 gcc_assert (*loopss != gfc_ss_terminator);
219
220 *loopss = new_ss;
221 new_ss->loop_chain = old_ss->loop_chain;
222 new_ss->loop = old_ss->loop;
223
224 gfc_free_ss (old_ss);
225 }
226
227
228 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
229 elemental subroutines. Make temporaries for output arguments if any such
230 dependencies are found. Output arguments are chosen because internal_unpack
231 can be used, as is, to copy the result back to the variable. */
232 static void
233 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
234 gfc_symbol * sym, gfc_actual_arglist * arg,
235 gfc_dep_check check_variable)
236 {
237 gfc_actual_arglist *arg0;
238 gfc_expr *e;
239 gfc_formal_arglist *formal;
240 gfc_se parmse;
241 gfc_ss *ss;
242 gfc_symbol *fsym;
243 tree data;
244 tree size;
245 tree tmp;
246
247 if (loopse->ss == NULL)
248 return;
249
250 ss = loopse->ss;
251 arg0 = arg;
252 formal = gfc_sym_get_dummy_args (sym);
253
254 /* Loop over all the arguments testing for dependencies. */
255 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
256 {
257 e = arg->expr;
258 if (e == NULL)
259 continue;
260
261 /* Obtain the info structure for the current argument. */
262 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
263 if (ss->info->expr == e)
264 break;
265
266 /* If there is a dependency, create a temporary and use it
267 instead of the variable. */
268 fsym = formal ? formal->sym : NULL;
269 if (e->expr_type == EXPR_VARIABLE
270 && e->rank && fsym
271 && fsym->attr.intent != INTENT_IN
272 && gfc_check_fncall_dependency (e, fsym->attr.intent,
273 sym, arg0, check_variable))
274 {
275 tree initial, temptype;
276 stmtblock_t temp_post;
277 gfc_ss *tmp_ss;
278
279 tmp_ss = gfc_get_array_ss (gfc_ss_terminator, NULL, ss->dimen,
280 GFC_SS_SECTION);
281 gfc_mark_ss_chain_used (tmp_ss, 1);
282 tmp_ss->info->expr = ss->info->expr;
283 replace_ss (loopse, ss, tmp_ss);
284
285 /* Obtain the argument descriptor for unpacking. */
286 gfc_init_se (&parmse, NULL);
287 parmse.want_pointer = 1;
288 gfc_conv_expr_descriptor (&parmse, e);
289 gfc_add_block_to_block (&se->pre, &parmse.pre);
290
291 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
292 initialize the array temporary with a copy of the values. */
293 if (fsym->attr.intent == INTENT_INOUT
294 || (fsym->ts.type ==BT_DERIVED
295 && fsym->attr.intent == INTENT_OUT))
296 initial = parmse.expr;
297 /* For class expressions, we always initialize with the copy of
298 the values. */
299 else if (e->ts.type == BT_CLASS)
300 initial = parmse.expr;
301 else
302 initial = NULL_TREE;
303
304 if (e->ts.type != BT_CLASS)
305 {
306 /* Find the type of the temporary to create; we don't use the type
307 of e itself as this breaks for subcomponent-references in e
308 (where the type of e is that of the final reference, but
309 parmse.expr's type corresponds to the full derived-type). */
310 /* TODO: Fix this somehow so we don't need a temporary of the whole
311 array but instead only the components referenced. */
312 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
313 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
314 temptype = TREE_TYPE (temptype);
315 temptype = gfc_get_element_type (temptype);
316 }
317
318 else
319 /* For class arrays signal that the size of the dynamic type has to
320 be obtained from the vtable, using the 'initial' expression. */
321 temptype = NULL_TREE;
322
323 /* Generate the temporary. Cleaning up the temporary should be the
324 very last thing done, so we add the code to a new block and add it
325 to se->post as last instructions. */
326 size = gfc_create_var (gfc_array_index_type, NULL);
327 data = gfc_create_var (pvoid_type_node, NULL);
328 gfc_init_block (&temp_post);
329 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, tmp_ss,
330 temptype, initial, false, true,
331 false, &arg->expr->where);
332 gfc_add_modify (&se->pre, size, tmp);
333 tmp = fold_convert (pvoid_type_node, tmp_ss->info->data.array.data);
334 gfc_add_modify (&se->pre, data, tmp);
335
336 /* Update other ss' delta. */
337 gfc_set_delta (loopse->loop);
338
339 /* Copy the result back using unpack..... */
340 if (e->ts.type != BT_CLASS)
341 tmp = build_call_expr_loc (input_location,
342 gfor_fndecl_in_unpack, 2, parmse.expr, data);
343 else
344 {
345 /* ... except for class results where the copy is
346 unconditional. */
347 tmp = build_fold_indirect_ref_loc (input_location, parmse.expr);
348 tmp = gfc_conv_descriptor_data_get (tmp);
349 tmp = build_call_expr_loc (input_location,
350 builtin_decl_explicit (BUILT_IN_MEMCPY),
351 3, tmp, data,
352 fold_convert (size_type_node, size));
353 }
354 gfc_add_expr_to_block (&se->post, tmp);
355
356 /* parmse.pre is already added above. */
357 gfc_add_block_to_block (&se->post, &parmse.post);
358 gfc_add_block_to_block (&se->post, &temp_post);
359 }
360 }
361 }
362
363
364 /* Get the interface symbol for the procedure corresponding to the given call.
365 We can't get the procedure symbol directly as we have to handle the case
366 of (deferred) type-bound procedures. */
367
368 static gfc_symbol *
369 get_proc_ifc_for_call (gfc_code *c)
370 {
371 gfc_symbol *sym;
372
373 gcc_assert (c->op == EXEC_ASSIGN_CALL || c->op == EXEC_CALL);
374
375 sym = gfc_get_proc_ifc_for_expr (c->expr1);
376
377 /* Fall back/last resort try. */
378 if (sym == NULL)
379 sym = c->resolved_sym;
380
381 return sym;
382 }
383
384
385 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
386
387 tree
388 gfc_trans_call (gfc_code * code, bool dependency_check,
389 tree mask, tree count1, bool invert)
390 {
391 gfc_se se;
392 gfc_ss * ss;
393 int has_alternate_specifier;
394 gfc_dep_check check_variable;
395 tree index = NULL_TREE;
396 tree maskexpr = NULL_TREE;
397 tree tmp;
398
399 /* A CALL starts a new block because the actual arguments may have to
400 be evaluated first. */
401 gfc_init_se (&se, NULL);
402 gfc_start_block (&se.pre);
403
404 gcc_assert (code->resolved_sym);
405
406 ss = gfc_ss_terminator;
407 if (code->resolved_sym->attr.elemental)
408 ss = gfc_walk_elemental_function_args (ss, code->ext.actual,
409 get_proc_ifc_for_call (code),
410 GFC_SS_REFERENCE);
411
412 /* Is not an elemental subroutine call with array valued arguments. */
413 if (ss == gfc_ss_terminator)
414 {
415
416 /* Translate the call. */
417 has_alternate_specifier
418 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
419 code->expr1, NULL);
420
421 /* A subroutine without side-effect, by definition, does nothing! */
422 TREE_SIDE_EFFECTS (se.expr) = 1;
423
424 /* Chain the pieces together and return the block. */
425 if (has_alternate_specifier)
426 {
427 gfc_code *select_code;
428 gfc_symbol *sym;
429 select_code = code->next;
430 gcc_assert(select_code->op == EXEC_SELECT);
431 sym = select_code->expr1->symtree->n.sym;
432 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
433 if (sym->backend_decl == NULL)
434 sym->backend_decl = gfc_get_symbol_decl (sym);
435 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
436 }
437 else
438 gfc_add_expr_to_block (&se.pre, se.expr);
439
440 gfc_add_block_to_block (&se.pre, &se.post);
441 }
442
443 else
444 {
445 /* An elemental subroutine call with array valued arguments has
446 to be scalarized. */
447 gfc_loopinfo loop;
448 stmtblock_t body;
449 stmtblock_t block;
450 gfc_se loopse;
451 gfc_se depse;
452
453 /* gfc_walk_elemental_function_args renders the ss chain in the
454 reverse order to the actual argument order. */
455 ss = gfc_reverse_ss (ss);
456
457 /* Initialize the loop. */
458 gfc_init_se (&loopse, NULL);
459 gfc_init_loopinfo (&loop);
460 gfc_add_ss_to_loop (&loop, ss);
461
462 gfc_conv_ss_startstride (&loop);
463 /* TODO: gfc_conv_loop_setup generates a temporary for vector
464 subscripts. This could be prevented in the elemental case
465 as temporaries are handled separatedly
466 (below in gfc_conv_elemental_dependencies). */
467 gfc_conv_loop_setup (&loop, &code->expr1->where);
468 gfc_mark_ss_chain_used (ss, 1);
469
470 /* Convert the arguments, checking for dependencies. */
471 gfc_copy_loopinfo_to_se (&loopse, &loop);
472 loopse.ss = ss;
473
474 /* For operator assignment, do dependency checking. */
475 if (dependency_check)
476 check_variable = ELEM_CHECK_VARIABLE;
477 else
478 check_variable = ELEM_DONT_CHECK_VARIABLE;
479
480 gfc_init_se (&depse, NULL);
481 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
482 code->ext.actual, check_variable);
483
484 gfc_add_block_to_block (&loop.pre, &depse.pre);
485 gfc_add_block_to_block (&loop.post, &depse.post);
486
487 /* Generate the loop body. */
488 gfc_start_scalarized_body (&loop, &body);
489 gfc_init_block (&block);
490
491 if (mask && count1)
492 {
493 /* Form the mask expression according to the mask. */
494 index = count1;
495 maskexpr = gfc_build_array_ref (mask, index, NULL);
496 if (invert)
497 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
498 TREE_TYPE (maskexpr), maskexpr);
499 }
500
501 /* Add the subroutine call to the block. */
502 gfc_conv_procedure_call (&loopse, code->resolved_sym,
503 code->ext.actual, code->expr1,
504 NULL);
505
506 if (mask && count1)
507 {
508 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
509 build_empty_stmt (input_location));
510 gfc_add_expr_to_block (&loopse.pre, tmp);
511 tmp = fold_build2_loc (input_location, PLUS_EXPR,
512 gfc_array_index_type,
513 count1, gfc_index_one_node);
514 gfc_add_modify (&loopse.pre, count1, tmp);
515 }
516 else
517 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
518
519 gfc_add_block_to_block (&block, &loopse.pre);
520 gfc_add_block_to_block (&block, &loopse.post);
521
522 /* Finish up the loop block and the loop. */
523 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
524 gfc_trans_scalarizing_loops (&loop, &body);
525 gfc_add_block_to_block (&se.pre, &loop.pre);
526 gfc_add_block_to_block (&se.pre, &loop.post);
527 gfc_add_block_to_block (&se.pre, &se.post);
528 gfc_cleanup_loop (&loop);
529 }
530
531 return gfc_finish_block (&se.pre);
532 }
533
534
535 /* Translate the RETURN statement. */
536
537 tree
538 gfc_trans_return (gfc_code * code)
539 {
540 if (code->expr1)
541 {
542 gfc_se se;
543 tree tmp;
544 tree result;
545
546 /* If code->expr is not NULL, this return statement must appear
547 in a subroutine and current_fake_result_decl has already
548 been generated. */
549
550 result = gfc_get_fake_result_decl (NULL, 0);
551 if (!result)
552 {
553 gfc_warning (0,
554 "An alternate return at %L without a * dummy argument",
555 &code->expr1->where);
556 return gfc_generate_return ();
557 }
558
559 /* Start a new block for this statement. */
560 gfc_init_se (&se, NULL);
561 gfc_start_block (&se.pre);
562
563 gfc_conv_expr (&se, code->expr1);
564
565 /* Note that the actually returned expression is a simple value and
566 does not depend on any pointers or such; thus we can clean-up with
567 se.post before returning. */
568 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
569 result, fold_convert (TREE_TYPE (result),
570 se.expr));
571 gfc_add_expr_to_block (&se.pre, tmp);
572 gfc_add_block_to_block (&se.pre, &se.post);
573
574 tmp = gfc_generate_return ();
575 gfc_add_expr_to_block (&se.pre, tmp);
576 return gfc_finish_block (&se.pre);
577 }
578
579 return gfc_generate_return ();
580 }
581
582
583 /* Translate the PAUSE statement. We have to translate this statement
584 to a runtime library call. */
585
586 tree
587 gfc_trans_pause (gfc_code * code)
588 {
589 tree gfc_int4_type_node = gfc_get_int_type (4);
590 gfc_se se;
591 tree tmp;
592
593 /* Start a new block for this statement. */
594 gfc_init_se (&se, NULL);
595 gfc_start_block (&se.pre);
596
597
598 if (code->expr1 == NULL)
599 {
600 tmp = build_int_cst (gfc_int4_type_node, 0);
601 tmp = build_call_expr_loc (input_location,
602 gfor_fndecl_pause_string, 2,
603 build_int_cst (pchar_type_node, 0), tmp);
604 }
605 else if (code->expr1->ts.type == BT_INTEGER)
606 {
607 gfc_conv_expr (&se, code->expr1);
608 tmp = build_call_expr_loc (input_location,
609 gfor_fndecl_pause_numeric, 1,
610 fold_convert (gfc_int4_type_node, se.expr));
611 }
612 else
613 {
614 gfc_conv_expr_reference (&se, code->expr1);
615 tmp = build_call_expr_loc (input_location,
616 gfor_fndecl_pause_string, 2,
617 se.expr, se.string_length);
618 }
619
620 gfc_add_expr_to_block (&se.pre, tmp);
621
622 gfc_add_block_to_block (&se.pre, &se.post);
623
624 return gfc_finish_block (&se.pre);
625 }
626
627
628 /* Translate the STOP statement. We have to translate this statement
629 to a runtime library call. */
630
631 tree
632 gfc_trans_stop (gfc_code *code, bool error_stop)
633 {
634 tree gfc_int4_type_node = gfc_get_int_type (4);
635 gfc_se se;
636 tree tmp;
637
638 /* Start a new block for this statement. */
639 gfc_init_se (&se, NULL);
640 gfc_start_block (&se.pre);
641
642 if (flag_coarray == GFC_FCOARRAY_LIB && !error_stop)
643 {
644 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
645 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
646 tmp = build_call_expr_loc (input_location, tmp, 0);
647 gfc_add_expr_to_block (&se.pre, tmp);
648
649 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0);
650 gfc_add_expr_to_block (&se.pre, tmp);
651 }
652
653 if (code->expr1 == NULL)
654 {
655 tmp = build_int_cst (gfc_int4_type_node, 0);
656 tmp = build_call_expr_loc (input_location,
657 error_stop
658 ? (flag_coarray == GFC_FCOARRAY_LIB
659 ? gfor_fndecl_caf_error_stop_str
660 : gfor_fndecl_error_stop_string)
661 : gfor_fndecl_stop_string,
662 2, build_int_cst (pchar_type_node, 0), tmp);
663 }
664 else if (code->expr1->ts.type == BT_INTEGER)
665 {
666 gfc_conv_expr (&se, code->expr1);
667 tmp = build_call_expr_loc (input_location,
668 error_stop
669 ? (flag_coarray == GFC_FCOARRAY_LIB
670 ? gfor_fndecl_caf_error_stop
671 : gfor_fndecl_error_stop_numeric)
672 : gfor_fndecl_stop_numeric_f08, 1,
673 fold_convert (gfc_int4_type_node, se.expr));
674 }
675 else
676 {
677 gfc_conv_expr_reference (&se, code->expr1);
678 tmp = build_call_expr_loc (input_location,
679 error_stop
680 ? (flag_coarray == GFC_FCOARRAY_LIB
681 ? gfor_fndecl_caf_error_stop_str
682 : gfor_fndecl_error_stop_string)
683 : gfor_fndecl_stop_string,
684 2, se.expr, se.string_length);
685 }
686
687 gfc_add_expr_to_block (&se.pre, tmp);
688
689 gfc_add_block_to_block (&se.pre, &se.post);
690
691 return gfc_finish_block (&se.pre);
692 }
693
694
695 tree
696 gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op type ATTRIBUTE_UNUSED)
697 {
698 gfc_se se, argse;
699 tree stat = NULL_TREE, lock_acquired = NULL_TREE;
700
701 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
702 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
703 if (!code->expr2 && !code->expr4 && flag_coarray != GFC_FCOARRAY_LIB)
704 return NULL_TREE;
705
706 gfc_init_se (&se, NULL);
707 gfc_start_block (&se.pre);
708
709 if (code->expr2)
710 {
711 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
712 gfc_init_se (&argse, NULL);
713 gfc_conv_expr_val (&argse, code->expr2);
714 stat = argse.expr;
715 }
716
717 if (code->expr4)
718 {
719 gcc_assert (code->expr4->expr_type == EXPR_VARIABLE);
720 gfc_init_se (&argse, NULL);
721 gfc_conv_expr_val (&argse, code->expr4);
722 lock_acquired = argse.expr;
723 }
724
725 if (stat != NULL_TREE)
726 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
727
728 if (lock_acquired != NULL_TREE)
729 gfc_add_modify (&se.pre, lock_acquired,
730 fold_convert (TREE_TYPE (lock_acquired),
731 boolean_true_node));
732
733 return gfc_finish_block (&se.pre);
734 }
735
736
737 tree
738 gfc_trans_sync (gfc_code *code, gfc_exec_op type)
739 {
740 gfc_se se, argse;
741 tree tmp;
742 tree images = NULL_TREE, stat = NULL_TREE,
743 errmsg = NULL_TREE, errmsglen = NULL_TREE;
744
745 /* Short cut: For single images without bound checking or without STAT=,
746 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
747 if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
748 && flag_coarray != GFC_FCOARRAY_LIB)
749 return NULL_TREE;
750
751 gfc_init_se (&se, NULL);
752 gfc_start_block (&se.pre);
753
754 if (code->expr1 && code->expr1->rank == 0)
755 {
756 gfc_init_se (&argse, NULL);
757 gfc_conv_expr_val (&argse, code->expr1);
758 images = argse.expr;
759 }
760
761 if (code->expr2)
762 {
763 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
764 gfc_init_se (&argse, NULL);
765 gfc_conv_expr_val (&argse, code->expr2);
766 stat = argse.expr;
767 }
768 else
769 stat = null_pointer_node;
770
771 if (code->expr3 && flag_coarray == GFC_FCOARRAY_LIB
772 && type != EXEC_SYNC_MEMORY)
773 {
774 gcc_assert (code->expr3->expr_type == EXPR_VARIABLE);
775 gfc_init_se (&argse, NULL);
776 gfc_conv_expr (&argse, code->expr3);
777 gfc_conv_string_parameter (&argse);
778 errmsg = gfc_build_addr_expr (NULL, argse.expr);
779 errmsglen = argse.string_length;
780 }
781 else if (flag_coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY)
782 {
783 errmsg = null_pointer_node;
784 errmsglen = build_int_cst (integer_type_node, 0);
785 }
786
787 /* Check SYNC IMAGES(imageset) for valid image index.
788 FIXME: Add a check for image-set arrays. */
789 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
790 && code->expr1->rank == 0)
791 {
792 tree cond;
793 if (flag_coarray != GFC_FCOARRAY_LIB)
794 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
795 images, build_int_cst (TREE_TYPE (images), 1));
796 else
797 {
798 tree cond2;
799 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_num_images,
800 2, integer_zero_node,
801 build_int_cst (integer_type_node, -1));
802 cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
803 images, tmp);
804 cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
805 images,
806 build_int_cst (TREE_TYPE (images), 1));
807 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
808 boolean_type_node, cond, cond2);
809 }
810 gfc_trans_runtime_check (true, false, cond, &se.pre,
811 &code->expr1->where, "Invalid image number "
812 "%d in SYNC IMAGES",
813 fold_convert (integer_type_node, images));
814 }
815
816 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
817 image control statements SYNC IMAGES and SYNC ALL. */
818 if (flag_coarray == GFC_FCOARRAY_LIB)
819 {
820 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
821 tmp = build_call_expr_loc (input_location, tmp, 0);
822 gfc_add_expr_to_block (&se.pre, tmp);
823 }
824
825 if (flag_coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY)
826 {
827 /* Set STAT to zero. */
828 if (code->expr2)
829 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
830 }
831 else if (type == EXEC_SYNC_ALL)
832 {
833 /* SYNC ALL => stat == null_pointer_node
834 SYNC ALL(stat=s) => stat has an integer type
835
836 If "stat" has the wrong integer type, use a temp variable of
837 the right type and later cast the result back into "stat". */
838 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
839 {
840 if (TREE_TYPE (stat) == integer_type_node)
841 stat = gfc_build_addr_expr (NULL, stat);
842
843 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
844 3, stat, errmsg, errmsglen);
845 gfc_add_expr_to_block (&se.pre, tmp);
846 }
847 else
848 {
849 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
850
851 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
852 3, gfc_build_addr_expr (NULL, tmp_stat),
853 errmsg, errmsglen);
854 gfc_add_expr_to_block (&se.pre, tmp);
855
856 gfc_add_modify (&se.pre, stat,
857 fold_convert (TREE_TYPE (stat), tmp_stat));
858 }
859 }
860 else
861 {
862 tree len;
863
864 gcc_assert (type == EXEC_SYNC_IMAGES);
865
866 if (!code->expr1)
867 {
868 len = build_int_cst (integer_type_node, -1);
869 images = null_pointer_node;
870 }
871 else if (code->expr1->rank == 0)
872 {
873 len = build_int_cst (integer_type_node, 1);
874 images = gfc_build_addr_expr (NULL_TREE, images);
875 }
876 else
877 {
878 /* FIXME. */
879 if (code->expr1->ts.kind != gfc_c_int_kind)
880 gfc_fatal_error ("Sorry, only support for integer kind %d "
881 "implemented for image-set at %L",
882 gfc_c_int_kind, &code->expr1->where);
883
884 gfc_conv_array_parameter (&se, code->expr1, true, NULL, NULL, &len);
885 images = se.expr;
886
887 tmp = gfc_typenode_for_spec (&code->expr1->ts);
888 if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp))
889 tmp = gfc_get_element_type (tmp);
890
891 len = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
892 TREE_TYPE (len), len,
893 fold_convert (TREE_TYPE (len),
894 TYPE_SIZE_UNIT (tmp)));
895 len = fold_convert (integer_type_node, len);
896 }
897
898 /* SYNC IMAGES(imgs) => stat == null_pointer_node
899 SYNC IMAGES(imgs,stat=s) => stat has an integer type
900
901 If "stat" has the wrong integer type, use a temp variable of
902 the right type and later cast the result back into "stat". */
903 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
904 {
905 if (TREE_TYPE (stat) == integer_type_node)
906 stat = gfc_build_addr_expr (NULL, stat);
907
908 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
909 5, fold_convert (integer_type_node, len),
910 images, stat, errmsg, errmsglen);
911 gfc_add_expr_to_block (&se.pre, tmp);
912 }
913 else
914 {
915 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
916
917 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
918 5, fold_convert (integer_type_node, len),
919 images, gfc_build_addr_expr (NULL, tmp_stat),
920 errmsg, errmsglen);
921 gfc_add_expr_to_block (&se.pre, tmp);
922
923 gfc_add_modify (&se.pre, stat,
924 fold_convert (TREE_TYPE (stat), tmp_stat));
925 }
926 }
927
928 return gfc_finish_block (&se.pre);
929 }
930
931
932 /* Generate GENERIC for the IF construct. This function also deals with
933 the simple IF statement, because the front end translates the IF
934 statement into an IF construct.
935
936 We translate:
937
938 IF (cond) THEN
939 then_clause
940 ELSEIF (cond2)
941 elseif_clause
942 ELSE
943 else_clause
944 ENDIF
945
946 into:
947
948 pre_cond_s;
949 if (cond_s)
950 {
951 then_clause;
952 }
953 else
954 {
955 pre_cond_s
956 if (cond_s)
957 {
958 elseif_clause
959 }
960 else
961 {
962 else_clause;
963 }
964 }
965
966 where COND_S is the simplified version of the predicate. PRE_COND_S
967 are the pre side-effects produced by the translation of the
968 conditional.
969 We need to build the chain recursively otherwise we run into
970 problems with folding incomplete statements. */
971
972 static tree
973 gfc_trans_if_1 (gfc_code * code)
974 {
975 gfc_se if_se;
976 tree stmt, elsestmt;
977 locus saved_loc;
978 location_t loc;
979
980 /* Check for an unconditional ELSE clause. */
981 if (!code->expr1)
982 return gfc_trans_code (code->next);
983
984 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
985 gfc_init_se (&if_se, NULL);
986 gfc_start_block (&if_se.pre);
987
988 /* Calculate the IF condition expression. */
989 if (code->expr1->where.lb)
990 {
991 gfc_save_backend_locus (&saved_loc);
992 gfc_set_backend_locus (&code->expr1->where);
993 }
994
995 gfc_conv_expr_val (&if_se, code->expr1);
996
997 if (code->expr1->where.lb)
998 gfc_restore_backend_locus (&saved_loc);
999
1000 /* Translate the THEN clause. */
1001 stmt = gfc_trans_code (code->next);
1002
1003 /* Translate the ELSE clause. */
1004 if (code->block)
1005 elsestmt = gfc_trans_if_1 (code->block);
1006 else
1007 elsestmt = build_empty_stmt (input_location);
1008
1009 /* Build the condition expression and add it to the condition block. */
1010 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
1011 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
1012 elsestmt);
1013
1014 gfc_add_expr_to_block (&if_se.pre, stmt);
1015
1016 /* Finish off this statement. */
1017 return gfc_finish_block (&if_se.pre);
1018 }
1019
1020 tree
1021 gfc_trans_if (gfc_code * code)
1022 {
1023 stmtblock_t body;
1024 tree exit_label;
1025
1026 /* Create exit label so it is available for trans'ing the body code. */
1027 exit_label = gfc_build_label_decl (NULL_TREE);
1028 code->exit_label = exit_label;
1029
1030 /* Translate the actual code in code->block. */
1031 gfc_init_block (&body);
1032 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
1033
1034 /* Add exit label. */
1035 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1036
1037 return gfc_finish_block (&body);
1038 }
1039
1040
1041 /* Translate an arithmetic IF expression.
1042
1043 IF (cond) label1, label2, label3 translates to
1044
1045 if (cond <= 0)
1046 {
1047 if (cond < 0)
1048 goto label1;
1049 else // cond == 0
1050 goto label2;
1051 }
1052 else // cond > 0
1053 goto label3;
1054
1055 An optimized version can be generated in case of equal labels.
1056 E.g., if label1 is equal to label2, we can translate it to
1057
1058 if (cond <= 0)
1059 goto label1;
1060 else
1061 goto label3;
1062 */
1063
1064 tree
1065 gfc_trans_arithmetic_if (gfc_code * code)
1066 {
1067 gfc_se se;
1068 tree tmp;
1069 tree branch1;
1070 tree branch2;
1071 tree zero;
1072
1073 /* Start a new block. */
1074 gfc_init_se (&se, NULL);
1075 gfc_start_block (&se.pre);
1076
1077 /* Pre-evaluate COND. */
1078 gfc_conv_expr_val (&se, code->expr1);
1079 se.expr = gfc_evaluate_now (se.expr, &se.pre);
1080
1081 /* Build something to compare with. */
1082 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
1083
1084 if (code->label1->value != code->label2->value)
1085 {
1086 /* If (cond < 0) take branch1 else take branch2.
1087 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1088 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1089 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
1090
1091 if (code->label1->value != code->label3->value)
1092 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
1093 se.expr, zero);
1094 else
1095 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1096 se.expr, zero);
1097
1098 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1099 tmp, branch1, branch2);
1100 }
1101 else
1102 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1103
1104 if (code->label1->value != code->label3->value
1105 && code->label2->value != code->label3->value)
1106 {
1107 /* if (cond <= 0) take branch1 else take branch2. */
1108 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
1109 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1110 se.expr, zero);
1111 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1112 tmp, branch1, branch2);
1113 }
1114
1115 /* Append the COND_EXPR to the evaluation of COND, and return. */
1116 gfc_add_expr_to_block (&se.pre, branch1);
1117 return gfc_finish_block (&se.pre);
1118 }
1119
1120
1121 /* Translate a CRITICAL block. */
1122 tree
1123 gfc_trans_critical (gfc_code *code)
1124 {
1125 stmtblock_t block;
1126 tree tmp, token = NULL_TREE;
1127
1128 gfc_start_block (&block);
1129
1130 if (flag_coarray == GFC_FCOARRAY_LIB)
1131 {
1132 token = gfc_get_symbol_decl (code->resolved_sym);
1133 token = GFC_TYPE_ARRAY_CAF_TOKEN (TREE_TYPE (token));
1134 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_lock, 7,
1135 token, integer_zero_node, integer_one_node,
1136 null_pointer_node, null_pointer_node,
1137 null_pointer_node, integer_zero_node);
1138 gfc_add_expr_to_block (&block, tmp);
1139 }
1140
1141 tmp = gfc_trans_code (code->block->next);
1142 gfc_add_expr_to_block (&block, tmp);
1143
1144 if (flag_coarray == GFC_FCOARRAY_LIB)
1145 {
1146 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_unlock, 6,
1147 token, integer_zero_node, integer_one_node,
1148 null_pointer_node, null_pointer_node,
1149 integer_zero_node);
1150 gfc_add_expr_to_block (&block, tmp);
1151 }
1152
1153
1154 return gfc_finish_block (&block);
1155 }
1156
1157
1158 /* Return true, when the class has a _len component. */
1159
1160 static bool
1161 class_has_len_component (gfc_symbol *sym)
1162 {
1163 gfc_component *comp = sym->ts.u.derived->components;
1164 while (comp)
1165 {
1166 if (strcmp (comp->name, "_len") == 0)
1167 return true;
1168 comp = comp->next;
1169 }
1170 return false;
1171 }
1172
1173
1174 /* Do proper initialization for ASSOCIATE names. */
1175
1176 static void
1177 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
1178 {
1179 gfc_expr *e;
1180 tree tmp;
1181 bool class_target;
1182 bool unlimited;
1183 tree desc;
1184 tree offset;
1185 tree dim;
1186 int n;
1187 tree charlen;
1188 bool need_len_assign;
1189
1190 gcc_assert (sym->assoc);
1191 e = sym->assoc->target;
1192
1193 class_target = (e->expr_type == EXPR_VARIABLE)
1194 && (gfc_is_class_scalar_expr (e)
1195 || gfc_is_class_array_ref (e, NULL));
1196
1197 unlimited = UNLIMITED_POLY (e);
1198
1199 /* Assignments to the string length need to be generated, when
1200 ( sym is a char array or
1201 sym has a _len component)
1202 and the associated expression is unlimited polymorphic, which is
1203 not (yet) correctly in 'unlimited', because for an already associated
1204 BT_DERIVED the u-poly flag is not set, i.e.,
1205 __tmp_CHARACTER_0_1 => w => arg
1206 ^ generated temp ^ from code, the w does not have the u-poly
1207 flag set, where UNLIMITED_POLY(e) expects it. */
1208 need_len_assign = ((unlimited || (e->ts.type == BT_DERIVED
1209 && e->ts.u.derived->attr.unlimited_polymorphic))
1210 && (sym->ts.type == BT_CHARACTER
1211 || ((sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED)
1212 && class_has_len_component (sym))));
1213 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1214 to array temporary) for arrays with either unknown shape or if associating
1215 to a variable. */
1216 if (sym->attr.dimension && !class_target
1217 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
1218 {
1219 gfc_se se;
1220 tree desc;
1221 bool cst_array_ctor;
1222
1223 desc = sym->backend_decl;
1224 cst_array_ctor = e->expr_type == EXPR_ARRAY
1225 && gfc_constant_array_constructor_p (e->value.constructor);
1226
1227 /* If association is to an expression, evaluate it and create temporary.
1228 Otherwise, get descriptor of target for pointer assignment. */
1229 gfc_init_se (&se, NULL);
1230 if (sym->assoc->variable || cst_array_ctor)
1231 {
1232 se.direct_byref = 1;
1233 se.use_offset = 1;
1234 se.expr = desc;
1235 }
1236
1237 gfc_conv_expr_descriptor (&se, e);
1238
1239 /* If we didn't already do the pointer assignment, set associate-name
1240 descriptor to the one generated for the temporary. */
1241 if (!sym->assoc->variable && !cst_array_ctor)
1242 {
1243 int dim;
1244
1245 gfc_add_modify (&se.pre, desc, se.expr);
1246
1247 /* The generated descriptor has lower bound zero (as array
1248 temporary), shift bounds so we get lower bounds of 1. */
1249 for (dim = 0; dim < e->rank; ++dim)
1250 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
1251 dim, gfc_index_one_node);
1252 }
1253
1254 /* If this is a subreference array pointer associate name use the
1255 associate variable element size for the value of 'span'. */
1256 if (sym->attr.subref_array_pointer)
1257 {
1258 gcc_assert (e->expr_type == EXPR_VARIABLE);
1259 tmp = e->symtree->n.sym->backend_decl;
1260 tmp = gfc_get_element_type (TREE_TYPE (tmp));
1261 tmp = fold_convert (gfc_array_index_type, size_in_bytes (tmp));
1262 gfc_add_modify (&se.pre, GFC_DECL_SPAN(desc), tmp);
1263 }
1264
1265 /* Done, register stuff as init / cleanup code. */
1266 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1267 gfc_finish_block (&se.post));
1268 }
1269
1270 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1271 arrays to be assigned directly. */
1272 else if (class_target && sym->attr.dimension
1273 && (sym->ts.type == BT_DERIVED || unlimited))
1274 {
1275 gfc_se se;
1276
1277 gfc_init_se (&se, NULL);
1278 se.descriptor_only = 1;
1279 gfc_conv_expr (&se, e);
1280
1281 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr)));
1282 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym->backend_decl)));
1283
1284 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
1285
1286 if (unlimited)
1287 {
1288 /* Recover the dtype, which has been overwritten by the
1289 assignment from an unlimited polymorphic object. */
1290 tmp = gfc_conv_descriptor_dtype (sym->backend_decl);
1291 gfc_add_modify (&se.pre, tmp,
1292 gfc_get_dtype (TREE_TYPE (sym->backend_decl)));
1293 }
1294
1295 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1296 gfc_finish_block (&se.post));
1297 }
1298
1299 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1300 else if (gfc_is_associate_pointer (sym))
1301 {
1302 gfc_se se;
1303
1304 gcc_assert (!sym->attr.dimension);
1305
1306 gfc_init_se (&se, NULL);
1307
1308 /* Class associate-names come this way because they are
1309 unconditionally associate pointers and the symbol is scalar. */
1310 if (sym->ts.type == BT_CLASS && CLASS_DATA (sym)->attr.dimension)
1311 {
1312 tree target_expr;
1313 /* For a class array we need a descriptor for the selector. */
1314 gfc_conv_expr_descriptor (&se, e);
1315 /* Needed to get/set the _len component below. */
1316 target_expr = se.expr;
1317
1318 /* Obtain a temporary class container for the result. */
1319 gfc_conv_class_to_class (&se, e, sym->ts, false, true, false, false);
1320 se.expr = build_fold_indirect_ref_loc (input_location, se.expr);
1321
1322 /* Set the offset. */
1323 desc = gfc_class_data_get (se.expr);
1324 offset = gfc_index_zero_node;
1325 for (n = 0; n < e->rank; n++)
1326 {
1327 dim = gfc_rank_cst[n];
1328 tmp = fold_build2_loc (input_location, MULT_EXPR,
1329 gfc_array_index_type,
1330 gfc_conv_descriptor_stride_get (desc, dim),
1331 gfc_conv_descriptor_lbound_get (desc, dim));
1332 offset = fold_build2_loc (input_location, MINUS_EXPR,
1333 gfc_array_index_type,
1334 offset, tmp);
1335 }
1336 if (need_len_assign)
1337 {
1338 /* Get the _len comp from the target expr by stripping _data
1339 from it and adding component-ref to _len. */
1340 tmp = gfc_class_len_get (TREE_OPERAND (target_expr, 0));
1341 /* Get the component-ref for the temp structure's _len comp. */
1342 charlen = gfc_class_len_get (se.expr);
1343 /* Add the assign to the beginning of the the block... */
1344 gfc_add_modify (&se.pre, charlen,
1345 fold_convert (TREE_TYPE (charlen), tmp));
1346 /* and the oposite way at the end of the block, to hand changes
1347 on the string length back. */
1348 gfc_add_modify (&se.post, tmp,
1349 fold_convert (TREE_TYPE (tmp), charlen));
1350 /* Length assignment done, prevent adding it again below. */
1351 need_len_assign = false;
1352 }
1353 gfc_conv_descriptor_offset_set (&se.pre, desc, offset);
1354 }
1355 else if (sym->ts.type == BT_CLASS && e->ts.type == BT_CLASS
1356 && CLASS_DATA (e)->attr.dimension)
1357 {
1358 /* This is bound to be a class array element. */
1359 gfc_conv_expr_reference (&se, e);
1360 /* Get the _vptr component of the class object. */
1361 tmp = gfc_get_vptr_from_expr (se.expr);
1362 /* Obtain a temporary class container for the result. */
1363 gfc_conv_derived_to_class (&se, e, sym->ts, tmp, false, false);
1364 se.expr = build_fold_indirect_ref_loc (input_location, se.expr);
1365 }
1366 else
1367 {
1368 /* For BT_CLASS and BT_DERIVED, this boils down to a pointer assign,
1369 which has the string length included. For CHARACTERS it is still
1370 needed and will be done at the end of this routine. */
1371 gfc_conv_expr (&se, e);
1372 need_len_assign = need_len_assign && sym->ts.type == BT_CHARACTER;
1373 }
1374
1375 tmp = TREE_TYPE (sym->backend_decl);
1376 tmp = gfc_build_addr_expr (tmp, se.expr);
1377 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
1378
1379 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1380 gfc_finish_block (&se.post));
1381 }
1382
1383 /* Do a simple assignment. This is for scalar expressions, where we
1384 can simply use expression assignment. */
1385 else
1386 {
1387 gfc_expr *lhs;
1388
1389 lhs = gfc_lval_expr_from_sym (sym);
1390 tmp = gfc_trans_assignment (lhs, e, false, true);
1391 gfc_add_init_cleanup (block, tmp, NULL_TREE);
1392 }
1393
1394 /* Set the stringlength, when needed. */
1395 if (need_len_assign)
1396 {
1397 gfc_se se;
1398 gfc_init_se (&se, NULL);
1399 if (e->symtree->n.sym->ts.type == BT_CHARACTER)
1400 {
1401 /* What about deferred strings? */
1402 gcc_assert (!e->symtree->n.sym->ts.deferred);
1403 tmp = e->symtree->n.sym->ts.u.cl->backend_decl;
1404 }
1405 else
1406 tmp = gfc_class_len_get (gfc_get_symbol_decl (e->symtree->n.sym));
1407 gfc_get_symbol_decl (sym);
1408 charlen = sym->ts.type == BT_CHARACTER ? sym->ts.u.cl->backend_decl
1409 : gfc_class_len_get (sym->backend_decl);
1410 /* Prevent adding a noop len= len. */
1411 if (tmp != charlen)
1412 {
1413 gfc_add_modify (&se.pre, charlen,
1414 fold_convert (TREE_TYPE (charlen), tmp));
1415 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1416 gfc_finish_block (&se.post));
1417 }
1418 }
1419 }
1420
1421
1422 /* Translate a BLOCK construct. This is basically what we would do for a
1423 procedure body. */
1424
1425 tree
1426 gfc_trans_block_construct (gfc_code* code)
1427 {
1428 gfc_namespace* ns;
1429 gfc_symbol* sym;
1430 gfc_wrapped_block block;
1431 tree exit_label;
1432 stmtblock_t body;
1433 gfc_association_list *ass;
1434
1435 ns = code->ext.block.ns;
1436 gcc_assert (ns);
1437 sym = ns->proc_name;
1438 gcc_assert (sym);
1439
1440 /* Process local variables. */
1441 gcc_assert (!sym->tlink);
1442 sym->tlink = sym;
1443 gfc_process_block_locals (ns);
1444
1445 /* Generate code including exit-label. */
1446 gfc_init_block (&body);
1447 exit_label = gfc_build_label_decl (NULL_TREE);
1448 code->exit_label = exit_label;
1449
1450 /* Generate !$ACC DECLARE directive. */
1451 if (ns->oacc_declare_clauses)
1452 {
1453 tree tmp = gfc_trans_oacc_declare (&body, ns);
1454 gfc_add_expr_to_block (&body, tmp);
1455 }
1456
1457 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
1458 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1459
1460 /* Finish everything. */
1461 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
1462 gfc_trans_deferred_vars (sym, &block);
1463 for (ass = code->ext.block.assoc; ass; ass = ass->next)
1464 trans_associate_var (ass->st->n.sym, &block);
1465
1466 return gfc_finish_wrapped_block (&block);
1467 }
1468
1469
1470 /* Translate the simple DO construct. This is where the loop variable has
1471 integer type and step +-1. We can't use this in the general case
1472 because integer overflow and floating point errors could give incorrect
1473 results.
1474 We translate a do loop from:
1475
1476 DO dovar = from, to, step
1477 body
1478 END DO
1479
1480 to:
1481
1482 [Evaluate loop bounds and step]
1483 dovar = from;
1484 if ((step > 0) ? (dovar <= to) : (dovar => to))
1485 {
1486 for (;;)
1487 {
1488 body;
1489 cycle_label:
1490 cond = (dovar == to);
1491 dovar += step;
1492 if (cond) goto end_label;
1493 }
1494 }
1495 end_label:
1496
1497 This helps the optimizers by avoiding the extra induction variable
1498 used in the general case. */
1499
1500 static tree
1501 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1502 tree from, tree to, tree step, tree exit_cond)
1503 {
1504 stmtblock_t body;
1505 tree type;
1506 tree cond;
1507 tree tmp;
1508 tree saved_dovar = NULL;
1509 tree cycle_label;
1510 tree exit_label;
1511 location_t loc;
1512
1513 type = TREE_TYPE (dovar);
1514
1515 loc = code->ext.iterator->start->where.lb->location;
1516
1517 /* Initialize the DO variable: dovar = from. */
1518 gfc_add_modify_loc (loc, pblock, dovar,
1519 fold_convert (TREE_TYPE(dovar), from));
1520
1521 /* Save value for do-tinkering checking. */
1522 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1523 {
1524 saved_dovar = gfc_create_var (type, ".saved_dovar");
1525 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1526 }
1527
1528 /* Cycle and exit statements are implemented with gotos. */
1529 cycle_label = gfc_build_label_decl (NULL_TREE);
1530 exit_label = gfc_build_label_decl (NULL_TREE);
1531
1532 /* Put the labels where they can be found later. See gfc_trans_do(). */
1533 code->cycle_label = cycle_label;
1534 code->exit_label = exit_label;
1535
1536 /* Loop body. */
1537 gfc_start_block (&body);
1538
1539 /* Main loop body. */
1540 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1541 gfc_add_expr_to_block (&body, tmp);
1542
1543 /* Label for cycle statements (if needed). */
1544 if (TREE_USED (cycle_label))
1545 {
1546 tmp = build1_v (LABEL_EXPR, cycle_label);
1547 gfc_add_expr_to_block (&body, tmp);
1548 }
1549
1550 /* Check whether someone has modified the loop variable. */
1551 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1552 {
1553 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1554 dovar, saved_dovar);
1555 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1556 "Loop variable has been modified");
1557 }
1558
1559 /* Exit the loop if there is an I/O result condition or error. */
1560 if (exit_cond)
1561 {
1562 tmp = build1_v (GOTO_EXPR, exit_label);
1563 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1564 exit_cond, tmp,
1565 build_empty_stmt (loc));
1566 gfc_add_expr_to_block (&body, tmp);
1567 }
1568
1569 /* Evaluate the loop condition. */
1570 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1571 to);
1572 cond = gfc_evaluate_now_loc (loc, cond, &body);
1573
1574 /* Increment the loop variable. */
1575 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1576 gfc_add_modify_loc (loc, &body, dovar, tmp);
1577
1578 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1579 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1580
1581 /* The loop exit. */
1582 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1583 TREE_USED (exit_label) = 1;
1584 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1585 cond, tmp, build_empty_stmt (loc));
1586 gfc_add_expr_to_block (&body, tmp);
1587
1588 /* Finish the loop body. */
1589 tmp = gfc_finish_block (&body);
1590 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1591
1592 /* Only execute the loop if the number of iterations is positive. */
1593 if (tree_int_cst_sgn (step) > 0)
1594 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1595 to);
1596 else
1597 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1598 to);
1599 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1600 build_empty_stmt (loc));
1601 gfc_add_expr_to_block (pblock, tmp);
1602
1603 /* Add the exit label. */
1604 tmp = build1_v (LABEL_EXPR, exit_label);
1605 gfc_add_expr_to_block (pblock, tmp);
1606
1607 return gfc_finish_block (pblock);
1608 }
1609
1610 /* Translate the DO construct. This obviously is one of the most
1611 important ones to get right with any compiler, but especially
1612 so for Fortran.
1613
1614 We special case some loop forms as described in gfc_trans_simple_do.
1615 For other cases we implement them with a separate loop count,
1616 as described in the standard.
1617
1618 We translate a do loop from:
1619
1620 DO dovar = from, to, step
1621 body
1622 END DO
1623
1624 to:
1625
1626 [evaluate loop bounds and step]
1627 empty = (step > 0 ? to < from : to > from);
1628 countm1 = (to - from) / step;
1629 dovar = from;
1630 if (empty) goto exit_label;
1631 for (;;)
1632 {
1633 body;
1634 cycle_label:
1635 dovar += step
1636 countm1t = countm1;
1637 countm1--;
1638 if (countm1t == 0) goto exit_label;
1639 }
1640 exit_label:
1641
1642 countm1 is an unsigned integer. It is equal to the loop count minus one,
1643 because the loop count itself can overflow. */
1644
1645 tree
1646 gfc_trans_do (gfc_code * code, tree exit_cond)
1647 {
1648 gfc_se se;
1649 tree dovar;
1650 tree saved_dovar = NULL;
1651 tree from;
1652 tree to;
1653 tree step;
1654 tree countm1;
1655 tree type;
1656 tree utype;
1657 tree cond;
1658 tree cycle_label;
1659 tree exit_label;
1660 tree tmp;
1661 stmtblock_t block;
1662 stmtblock_t body;
1663 location_t loc;
1664
1665 gfc_start_block (&block);
1666
1667 loc = code->ext.iterator->start->where.lb->location;
1668
1669 /* Evaluate all the expressions in the iterator. */
1670 gfc_init_se (&se, NULL);
1671 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1672 gfc_add_block_to_block (&block, &se.pre);
1673 dovar = se.expr;
1674 type = TREE_TYPE (dovar);
1675
1676 gfc_init_se (&se, NULL);
1677 gfc_conv_expr_val (&se, code->ext.iterator->start);
1678 gfc_add_block_to_block (&block, &se.pre);
1679 from = gfc_evaluate_now (se.expr, &block);
1680
1681 gfc_init_se (&se, NULL);
1682 gfc_conv_expr_val (&se, code->ext.iterator->end);
1683 gfc_add_block_to_block (&block, &se.pre);
1684 to = gfc_evaluate_now (se.expr, &block);
1685
1686 gfc_init_se (&se, NULL);
1687 gfc_conv_expr_val (&se, code->ext.iterator->step);
1688 gfc_add_block_to_block (&block, &se.pre);
1689 step = gfc_evaluate_now (se.expr, &block);
1690
1691 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1692 {
1693 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1694 build_zero_cst (type));
1695 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1696 "DO step value is zero");
1697 }
1698
1699 /* Special case simple loops. */
1700 if (TREE_CODE (type) == INTEGER_TYPE
1701 && (integer_onep (step)
1702 || tree_int_cst_equal (step, integer_minus_one_node)))
1703 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1704
1705
1706 if (TREE_CODE (type) == INTEGER_TYPE)
1707 utype = unsigned_type_for (type);
1708 else
1709 utype = unsigned_type_for (gfc_array_index_type);
1710 countm1 = gfc_create_var (utype, "countm1");
1711
1712 /* Cycle and exit statements are implemented with gotos. */
1713 cycle_label = gfc_build_label_decl (NULL_TREE);
1714 exit_label = gfc_build_label_decl (NULL_TREE);
1715 TREE_USED (exit_label) = 1;
1716
1717 /* Put these labels where they can be found later. */
1718 code->cycle_label = cycle_label;
1719 code->exit_label = exit_label;
1720
1721 /* Initialize the DO variable: dovar = from. */
1722 gfc_add_modify (&block, dovar, from);
1723
1724 /* Save value for do-tinkering checking. */
1725 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1726 {
1727 saved_dovar = gfc_create_var (type, ".saved_dovar");
1728 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1729 }
1730
1731 /* Initialize loop count and jump to exit label if the loop is empty.
1732 This code is executed before we enter the loop body. We generate:
1733 if (step > 0)
1734 {
1735 countm1 = (to - from) / step;
1736 if (to < from)
1737 goto exit_label;
1738 }
1739 else
1740 {
1741 countm1 = (from - to) / -step;
1742 if (to > from)
1743 goto exit_label;
1744 }
1745 */
1746
1747 if (TREE_CODE (type) == INTEGER_TYPE)
1748 {
1749 tree pos, neg, tou, fromu, stepu, tmp2;
1750
1751 /* The distance from FROM to TO cannot always be represented in a signed
1752 type, thus use unsigned arithmetic, also to avoid any undefined
1753 overflow issues. */
1754 tou = fold_convert (utype, to);
1755 fromu = fold_convert (utype, from);
1756 stepu = fold_convert (utype, step);
1757
1758 /* For a positive step, when to < from, exit, otherwise compute
1759 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1760 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1761 tmp2 = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype,
1762 fold_build2_loc (loc, MINUS_EXPR, utype,
1763 tou, fromu),
1764 stepu);
1765 pos = build2 (COMPOUND_EXPR, void_type_node,
1766 fold_build2 (MODIFY_EXPR, void_type_node,
1767 countm1, tmp2),
1768 build3_loc (loc, COND_EXPR, void_type_node, tmp,
1769 build1_loc (loc, GOTO_EXPR, void_type_node,
1770 exit_label), NULL_TREE));
1771
1772 /* For a negative step, when to > from, exit, otherwise compute
1773 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1774 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to, from);
1775 tmp2 = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype,
1776 fold_build2_loc (loc, MINUS_EXPR, utype,
1777 fromu, tou),
1778 fold_build1_loc (loc, NEGATE_EXPR, utype, stepu));
1779 neg = build2 (COMPOUND_EXPR, void_type_node,
1780 fold_build2 (MODIFY_EXPR, void_type_node,
1781 countm1, tmp2),
1782 build3_loc (loc, COND_EXPR, void_type_node, tmp,
1783 build1_loc (loc, GOTO_EXPR, void_type_node,
1784 exit_label), NULL_TREE));
1785
1786 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1787 build_int_cst (TREE_TYPE (step), 0));
1788 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp, neg, pos);
1789
1790 gfc_add_expr_to_block (&block, tmp);
1791 }
1792 else
1793 {
1794 tree pos_step;
1795
1796 /* TODO: We could use the same width as the real type.
1797 This would probably cause more problems that it solves
1798 when we implement "long double" types. */
1799
1800 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1801 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1802 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1803 gfc_add_modify (&block, countm1, tmp);
1804
1805 /* We need a special check for empty loops:
1806 empty = (step > 0 ? to < from : to > from); */
1807 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1808 build_zero_cst (type));
1809 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1810 fold_build2_loc (loc, LT_EXPR,
1811 boolean_type_node, to, from),
1812 fold_build2_loc (loc, GT_EXPR,
1813 boolean_type_node, to, from));
1814 /* If the loop is empty, go directly to the exit label. */
1815 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1816 build1_v (GOTO_EXPR, exit_label),
1817 build_empty_stmt (input_location));
1818 gfc_add_expr_to_block (&block, tmp);
1819 }
1820
1821 /* Loop body. */
1822 gfc_start_block (&body);
1823
1824 /* Main loop body. */
1825 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1826 gfc_add_expr_to_block (&body, tmp);
1827
1828 /* Label for cycle statements (if needed). */
1829 if (TREE_USED (cycle_label))
1830 {
1831 tmp = build1_v (LABEL_EXPR, cycle_label);
1832 gfc_add_expr_to_block (&body, tmp);
1833 }
1834
1835 /* Check whether someone has modified the loop variable. */
1836 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1837 {
1838 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1839 saved_dovar);
1840 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1841 "Loop variable has been modified");
1842 }
1843
1844 /* Exit the loop if there is an I/O result condition or error. */
1845 if (exit_cond)
1846 {
1847 tmp = build1_v (GOTO_EXPR, exit_label);
1848 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1849 exit_cond, tmp,
1850 build_empty_stmt (input_location));
1851 gfc_add_expr_to_block (&body, tmp);
1852 }
1853
1854 /* Increment the loop variable. */
1855 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1856 gfc_add_modify_loc (loc, &body, dovar, tmp);
1857
1858 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1859 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1860
1861 /* Initialize countm1t. */
1862 tree countm1t = gfc_create_var (utype, "countm1t");
1863 gfc_add_modify_loc (loc, &body, countm1t, countm1);
1864
1865 /* Decrement the loop count. */
1866 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1867 build_int_cst (utype, 1));
1868 gfc_add_modify_loc (loc, &body, countm1, tmp);
1869
1870 /* End with the loop condition. Loop until countm1t == 0. */
1871 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1t,
1872 build_int_cst (utype, 0));
1873 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1874 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1875 cond, tmp, build_empty_stmt (loc));
1876 gfc_add_expr_to_block (&body, tmp);
1877
1878 /* End of loop body. */
1879 tmp = gfc_finish_block (&body);
1880
1881 /* The for loop itself. */
1882 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1883 gfc_add_expr_to_block (&block, tmp);
1884
1885 /* Add the exit label. */
1886 tmp = build1_v (LABEL_EXPR, exit_label);
1887 gfc_add_expr_to_block (&block, tmp);
1888
1889 return gfc_finish_block (&block);
1890 }
1891
1892
1893 /* Translate the DO WHILE construct.
1894
1895 We translate
1896
1897 DO WHILE (cond)
1898 body
1899 END DO
1900
1901 to:
1902
1903 for ( ; ; )
1904 {
1905 pre_cond;
1906 if (! cond) goto exit_label;
1907 body;
1908 cycle_label:
1909 }
1910 exit_label:
1911
1912 Because the evaluation of the exit condition `cond' may have side
1913 effects, we can't do much for empty loop bodies. The backend optimizers
1914 should be smart enough to eliminate any dead loops. */
1915
1916 tree
1917 gfc_trans_do_while (gfc_code * code)
1918 {
1919 gfc_se cond;
1920 tree tmp;
1921 tree cycle_label;
1922 tree exit_label;
1923 stmtblock_t block;
1924
1925 /* Everything we build here is part of the loop body. */
1926 gfc_start_block (&block);
1927
1928 /* Cycle and exit statements are implemented with gotos. */
1929 cycle_label = gfc_build_label_decl (NULL_TREE);
1930 exit_label = gfc_build_label_decl (NULL_TREE);
1931
1932 /* Put the labels where they can be found later. See gfc_trans_do(). */
1933 code->cycle_label = cycle_label;
1934 code->exit_label = exit_label;
1935
1936 /* Create a GIMPLE version of the exit condition. */
1937 gfc_init_se (&cond, NULL);
1938 gfc_conv_expr_val (&cond, code->expr1);
1939 gfc_add_block_to_block (&block, &cond.pre);
1940 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1941 TRUTH_NOT_EXPR, TREE_TYPE (cond.expr), cond.expr);
1942
1943 /* Build "IF (! cond) GOTO exit_label". */
1944 tmp = build1_v (GOTO_EXPR, exit_label);
1945 TREE_USED (exit_label) = 1;
1946 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1947 void_type_node, cond.expr, tmp,
1948 build_empty_stmt (code->expr1->where.lb->location));
1949 gfc_add_expr_to_block (&block, tmp);
1950
1951 /* The main body of the loop. */
1952 tmp = gfc_trans_code (code->block->next);
1953 gfc_add_expr_to_block (&block, tmp);
1954
1955 /* Label for cycle statements (if needed). */
1956 if (TREE_USED (cycle_label))
1957 {
1958 tmp = build1_v (LABEL_EXPR, cycle_label);
1959 gfc_add_expr_to_block (&block, tmp);
1960 }
1961
1962 /* End of loop body. */
1963 tmp = gfc_finish_block (&block);
1964
1965 gfc_init_block (&block);
1966 /* Build the loop. */
1967 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1968 void_type_node, tmp);
1969 gfc_add_expr_to_block (&block, tmp);
1970
1971 /* Add the exit label. */
1972 tmp = build1_v (LABEL_EXPR, exit_label);
1973 gfc_add_expr_to_block (&block, tmp);
1974
1975 return gfc_finish_block (&block);
1976 }
1977
1978
1979 /* Translate the SELECT CASE construct for INTEGER case expressions,
1980 without killing all potential optimizations. The problem is that
1981 Fortran allows unbounded cases, but the back-end does not, so we
1982 need to intercept those before we enter the equivalent SWITCH_EXPR
1983 we can build.
1984
1985 For example, we translate this,
1986
1987 SELECT CASE (expr)
1988 CASE (:100,101,105:115)
1989 block_1
1990 CASE (190:199,200:)
1991 block_2
1992 CASE (300)
1993 block_3
1994 CASE DEFAULT
1995 block_4
1996 END SELECT
1997
1998 to the GENERIC equivalent,
1999
2000 switch (expr)
2001 {
2002 case (minimum value for typeof(expr) ... 100:
2003 case 101:
2004 case 105 ... 114:
2005 block1:
2006 goto end_label;
2007
2008 case 200 ... (maximum value for typeof(expr):
2009 case 190 ... 199:
2010 block2;
2011 goto end_label;
2012
2013 case 300:
2014 block_3;
2015 goto end_label;
2016
2017 default:
2018 block_4;
2019 goto end_label;
2020 }
2021
2022 end_label: */
2023
2024 static tree
2025 gfc_trans_integer_select (gfc_code * code)
2026 {
2027 gfc_code *c;
2028 gfc_case *cp;
2029 tree end_label;
2030 tree tmp;
2031 gfc_se se;
2032 stmtblock_t block;
2033 stmtblock_t body;
2034
2035 gfc_start_block (&block);
2036
2037 /* Calculate the switch expression. */
2038 gfc_init_se (&se, NULL);
2039 gfc_conv_expr_val (&se, code->expr1);
2040 gfc_add_block_to_block (&block, &se.pre);
2041
2042 end_label = gfc_build_label_decl (NULL_TREE);
2043
2044 gfc_init_block (&body);
2045
2046 for (c = code->block; c; c = c->block)
2047 {
2048 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2049 {
2050 tree low, high;
2051 tree label;
2052
2053 /* Assume it's the default case. */
2054 low = high = NULL_TREE;
2055
2056 if (cp->low)
2057 {
2058 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
2059 cp->low->ts.kind);
2060
2061 /* If there's only a lower bound, set the high bound to the
2062 maximum value of the case expression. */
2063 if (!cp->high)
2064 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
2065 }
2066
2067 if (cp->high)
2068 {
2069 /* Three cases are possible here:
2070
2071 1) There is no lower bound, e.g. CASE (:N).
2072 2) There is a lower bound .NE. high bound, that is
2073 a case range, e.g. CASE (N:M) where M>N (we make
2074 sure that M>N during type resolution).
2075 3) There is a lower bound, and it has the same value
2076 as the high bound, e.g. CASE (N:N). This is our
2077 internal representation of CASE(N).
2078
2079 In the first and second case, we need to set a value for
2080 high. In the third case, we don't because the GCC middle
2081 end represents a single case value by just letting high be
2082 a NULL_TREE. We can't do that because we need to be able
2083 to represent unbounded cases. */
2084
2085 if (!cp->low
2086 || (cp->low
2087 && mpz_cmp (cp->low->value.integer,
2088 cp->high->value.integer) != 0))
2089 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
2090 cp->high->ts.kind);
2091
2092 /* Unbounded case. */
2093 if (!cp->low)
2094 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
2095 }
2096
2097 /* Build a label. */
2098 label = gfc_build_label_decl (NULL_TREE);
2099
2100 /* Add this case label.
2101 Add parameter 'label', make it match GCC backend. */
2102 tmp = build_case_label (low, high, label);
2103 gfc_add_expr_to_block (&body, tmp);
2104 }
2105
2106 /* Add the statements for this case. */
2107 tmp = gfc_trans_code (c->next);
2108 gfc_add_expr_to_block (&body, tmp);
2109
2110 /* Break to the end of the construct. */
2111 tmp = build1_v (GOTO_EXPR, end_label);
2112 gfc_add_expr_to_block (&body, tmp);
2113 }
2114
2115 tmp = gfc_finish_block (&body);
2116 tmp = fold_build3_loc (input_location, SWITCH_EXPR, NULL_TREE,
2117 se.expr, tmp, NULL_TREE);
2118 gfc_add_expr_to_block (&block, tmp);
2119
2120 tmp = build1_v (LABEL_EXPR, end_label);
2121 gfc_add_expr_to_block (&block, tmp);
2122
2123 return gfc_finish_block (&block);
2124 }
2125
2126
2127 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2128
2129 There are only two cases possible here, even though the standard
2130 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2131 .FALSE., and DEFAULT.
2132
2133 We never generate more than two blocks here. Instead, we always
2134 try to eliminate the DEFAULT case. This way, we can translate this
2135 kind of SELECT construct to a simple
2136
2137 if {} else {};
2138
2139 expression in GENERIC. */
2140
2141 static tree
2142 gfc_trans_logical_select (gfc_code * code)
2143 {
2144 gfc_code *c;
2145 gfc_code *t, *f, *d;
2146 gfc_case *cp;
2147 gfc_se se;
2148 stmtblock_t block;
2149
2150 /* Assume we don't have any cases at all. */
2151 t = f = d = NULL;
2152
2153 /* Now see which ones we actually do have. We can have at most two
2154 cases in a single case list: one for .TRUE. and one for .FALSE.
2155 The default case is always separate. If the cases for .TRUE. and
2156 .FALSE. are in the same case list, the block for that case list
2157 always executed, and we don't generate code a COND_EXPR. */
2158 for (c = code->block; c; c = c->block)
2159 {
2160 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2161 {
2162 if (cp->low)
2163 {
2164 if (cp->low->value.logical == 0) /* .FALSE. */
2165 f = c;
2166 else /* if (cp->value.logical != 0), thus .TRUE. */
2167 t = c;
2168 }
2169 else
2170 d = c;
2171 }
2172 }
2173
2174 /* Start a new block. */
2175 gfc_start_block (&block);
2176
2177 /* Calculate the switch expression. We always need to do this
2178 because it may have side effects. */
2179 gfc_init_se (&se, NULL);
2180 gfc_conv_expr_val (&se, code->expr1);
2181 gfc_add_block_to_block (&block, &se.pre);
2182
2183 if (t == f && t != NULL)
2184 {
2185 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2186 translate the code for these cases, append it to the current
2187 block. */
2188 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
2189 }
2190 else
2191 {
2192 tree true_tree, false_tree, stmt;
2193
2194 true_tree = build_empty_stmt (input_location);
2195 false_tree = build_empty_stmt (input_location);
2196
2197 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2198 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2199 make the missing case the default case. */
2200 if (t != NULL && f != NULL)
2201 d = NULL;
2202 else if (d != NULL)
2203 {
2204 if (t == NULL)
2205 t = d;
2206 else
2207 f = d;
2208 }
2209
2210 /* Translate the code for each of these blocks, and append it to
2211 the current block. */
2212 if (t != NULL)
2213 true_tree = gfc_trans_code (t->next);
2214
2215 if (f != NULL)
2216 false_tree = gfc_trans_code (f->next);
2217
2218 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2219 se.expr, true_tree, false_tree);
2220 gfc_add_expr_to_block (&block, stmt);
2221 }
2222
2223 return gfc_finish_block (&block);
2224 }
2225
2226
2227 /* The jump table types are stored in static variables to avoid
2228 constructing them from scratch every single time. */
2229 static GTY(()) tree select_struct[2];
2230
2231 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2232 Instead of generating compares and jumps, it is far simpler to
2233 generate a data structure describing the cases in order and call a
2234 library subroutine that locates the right case.
2235 This is particularly true because this is the only case where we
2236 might have to dispose of a temporary.
2237 The library subroutine returns a pointer to jump to or NULL if no
2238 branches are to be taken. */
2239
2240 static tree
2241 gfc_trans_character_select (gfc_code *code)
2242 {
2243 tree init, end_label, tmp, type, case_num, label, fndecl;
2244 stmtblock_t block, body;
2245 gfc_case *cp, *d;
2246 gfc_code *c;
2247 gfc_se se, expr1se;
2248 int n, k;
2249 vec<constructor_elt, va_gc> *inits = NULL;
2250
2251 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
2252
2253 /* The jump table types are stored in static variables to avoid
2254 constructing them from scratch every single time. */
2255 static tree ss_string1[2], ss_string1_len[2];
2256 static tree ss_string2[2], ss_string2_len[2];
2257 static tree ss_target[2];
2258
2259 cp = code->block->ext.block.case_list;
2260 while (cp->left != NULL)
2261 cp = cp->left;
2262
2263 /* Generate the body */
2264 gfc_start_block (&block);
2265 gfc_init_se (&expr1se, NULL);
2266 gfc_conv_expr_reference (&expr1se, code->expr1);
2267
2268 gfc_add_block_to_block (&block, &expr1se.pre);
2269
2270 end_label = gfc_build_label_decl (NULL_TREE);
2271
2272 gfc_init_block (&body);
2273
2274 /* Attempt to optimize length 1 selects. */
2275 if (integer_onep (expr1se.string_length))
2276 {
2277 for (d = cp; d; d = d->right)
2278 {
2279 int i;
2280 if (d->low)
2281 {
2282 gcc_assert (d->low->expr_type == EXPR_CONSTANT
2283 && d->low->ts.type == BT_CHARACTER);
2284 if (d->low->value.character.length > 1)
2285 {
2286 for (i = 1; i < d->low->value.character.length; i++)
2287 if (d->low->value.character.string[i] != ' ')
2288 break;
2289 if (i != d->low->value.character.length)
2290 {
2291 if (optimize && d->high && i == 1)
2292 {
2293 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2294 && d->high->ts.type == BT_CHARACTER);
2295 if (d->high->value.character.length > 1
2296 && (d->low->value.character.string[0]
2297 == d->high->value.character.string[0])
2298 && d->high->value.character.string[1] != ' '
2299 && ((d->low->value.character.string[1] < ' ')
2300 == (d->high->value.character.string[1]
2301 < ' ')))
2302 continue;
2303 }
2304 break;
2305 }
2306 }
2307 }
2308 if (d->high)
2309 {
2310 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2311 && d->high->ts.type == BT_CHARACTER);
2312 if (d->high->value.character.length > 1)
2313 {
2314 for (i = 1; i < d->high->value.character.length; i++)
2315 if (d->high->value.character.string[i] != ' ')
2316 break;
2317 if (i != d->high->value.character.length)
2318 break;
2319 }
2320 }
2321 }
2322 if (d == NULL)
2323 {
2324 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
2325
2326 for (c = code->block; c; c = c->block)
2327 {
2328 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2329 {
2330 tree low, high;
2331 tree label;
2332 gfc_char_t r;
2333
2334 /* Assume it's the default case. */
2335 low = high = NULL_TREE;
2336
2337 if (cp->low)
2338 {
2339 /* CASE ('ab') or CASE ('ab':'az') will never match
2340 any length 1 character. */
2341 if (cp->low->value.character.length > 1
2342 && cp->low->value.character.string[1] != ' ')
2343 continue;
2344
2345 if (cp->low->value.character.length > 0)
2346 r = cp->low->value.character.string[0];
2347 else
2348 r = ' ';
2349 low = build_int_cst (ctype, r);
2350
2351 /* If there's only a lower bound, set the high bound
2352 to the maximum value of the case expression. */
2353 if (!cp->high)
2354 high = TYPE_MAX_VALUE (ctype);
2355 }
2356
2357 if (cp->high)
2358 {
2359 if (!cp->low
2360 || (cp->low->value.character.string[0]
2361 != cp->high->value.character.string[0]))
2362 {
2363 if (cp->high->value.character.length > 0)
2364 r = cp->high->value.character.string[0];
2365 else
2366 r = ' ';
2367 high = build_int_cst (ctype, r);
2368 }
2369
2370 /* Unbounded case. */
2371 if (!cp->low)
2372 low = TYPE_MIN_VALUE (ctype);
2373 }
2374
2375 /* Build a label. */
2376 label = gfc_build_label_decl (NULL_TREE);
2377
2378 /* Add this case label.
2379 Add parameter 'label', make it match GCC backend. */
2380 tmp = build_case_label (low, high, label);
2381 gfc_add_expr_to_block (&body, tmp);
2382 }
2383
2384 /* Add the statements for this case. */
2385 tmp = gfc_trans_code (c->next);
2386 gfc_add_expr_to_block (&body, tmp);
2387
2388 /* Break to the end of the construct. */
2389 tmp = build1_v (GOTO_EXPR, end_label);
2390 gfc_add_expr_to_block (&body, tmp);
2391 }
2392
2393 tmp = gfc_string_to_single_character (expr1se.string_length,
2394 expr1se.expr,
2395 code->expr1->ts.kind);
2396 case_num = gfc_create_var (ctype, "case_num");
2397 gfc_add_modify (&block, case_num, tmp);
2398
2399 gfc_add_block_to_block (&block, &expr1se.post);
2400
2401 tmp = gfc_finish_block (&body);
2402 tmp = fold_build3_loc (input_location, SWITCH_EXPR, NULL_TREE,
2403 case_num, tmp, NULL_TREE);
2404 gfc_add_expr_to_block (&block, tmp);
2405
2406 tmp = build1_v (LABEL_EXPR, end_label);
2407 gfc_add_expr_to_block (&block, tmp);
2408
2409 return gfc_finish_block (&block);
2410 }
2411 }
2412
2413 if (code->expr1->ts.kind == 1)
2414 k = 0;
2415 else if (code->expr1->ts.kind == 4)
2416 k = 1;
2417 else
2418 gcc_unreachable ();
2419
2420 if (select_struct[k] == NULL)
2421 {
2422 tree *chain = NULL;
2423 select_struct[k] = make_node (RECORD_TYPE);
2424
2425 if (code->expr1->ts.kind == 1)
2426 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
2427 else if (code->expr1->ts.kind == 4)
2428 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
2429 else
2430 gcc_unreachable ();
2431
2432 #undef ADD_FIELD
2433 #define ADD_FIELD(NAME, TYPE) \
2434 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2435 get_identifier (stringize(NAME)), \
2436 TYPE, \
2437 &chain)
2438
2439 ADD_FIELD (string1, pchartype);
2440 ADD_FIELD (string1_len, gfc_charlen_type_node);
2441
2442 ADD_FIELD (string2, pchartype);
2443 ADD_FIELD (string2_len, gfc_charlen_type_node);
2444
2445 ADD_FIELD (target, integer_type_node);
2446 #undef ADD_FIELD
2447
2448 gfc_finish_type (select_struct[k]);
2449 }
2450
2451 n = 0;
2452 for (d = cp; d; d = d->right)
2453 d->n = n++;
2454
2455 for (c = code->block; c; c = c->block)
2456 {
2457 for (d = c->ext.block.case_list; d; d = d->next)
2458 {
2459 label = gfc_build_label_decl (NULL_TREE);
2460 tmp = build_case_label ((d->low == NULL && d->high == NULL)
2461 ? NULL
2462 : build_int_cst (integer_type_node, d->n),
2463 NULL, label);
2464 gfc_add_expr_to_block (&body, tmp);
2465 }
2466
2467 tmp = gfc_trans_code (c->next);
2468 gfc_add_expr_to_block (&body, tmp);
2469
2470 tmp = build1_v (GOTO_EXPR, end_label);
2471 gfc_add_expr_to_block (&body, tmp);
2472 }
2473
2474 /* Generate the structure describing the branches */
2475 for (d = cp; d; d = d->right)
2476 {
2477 vec<constructor_elt, va_gc> *node = NULL;
2478
2479 gfc_init_se (&se, NULL);
2480
2481 if (d->low == NULL)
2482 {
2483 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2484 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2485 }
2486 else
2487 {
2488 gfc_conv_expr_reference (&se, d->low);
2489
2490 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2491 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2492 }
2493
2494 if (d->high == NULL)
2495 {
2496 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2497 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2498 }
2499 else
2500 {
2501 gfc_init_se (&se, NULL);
2502 gfc_conv_expr_reference (&se, d->high);
2503
2504 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2505 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2506 }
2507
2508 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2509 build_int_cst (integer_type_node, d->n));
2510
2511 tmp = build_constructor (select_struct[k], node);
2512 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2513 }
2514
2515 type = build_array_type (select_struct[k],
2516 build_index_type (size_int (n-1)));
2517
2518 init = build_constructor (type, inits);
2519 TREE_CONSTANT (init) = 1;
2520 TREE_STATIC (init) = 1;
2521 /* Create a static variable to hold the jump table. */
2522 tmp = gfc_create_var (type, "jumptable");
2523 TREE_CONSTANT (tmp) = 1;
2524 TREE_STATIC (tmp) = 1;
2525 TREE_READONLY (tmp) = 1;
2526 DECL_INITIAL (tmp) = init;
2527 init = tmp;
2528
2529 /* Build the library call */
2530 init = gfc_build_addr_expr (pvoid_type_node, init);
2531
2532 if (code->expr1->ts.kind == 1)
2533 fndecl = gfor_fndecl_select_string;
2534 else if (code->expr1->ts.kind == 4)
2535 fndecl = gfor_fndecl_select_string_char4;
2536 else
2537 gcc_unreachable ();
2538
2539 tmp = build_call_expr_loc (input_location,
2540 fndecl, 4, init,
2541 build_int_cst (gfc_charlen_type_node, n),
2542 expr1se.expr, expr1se.string_length);
2543 case_num = gfc_create_var (integer_type_node, "case_num");
2544 gfc_add_modify (&block, case_num, tmp);
2545
2546 gfc_add_block_to_block (&block, &expr1se.post);
2547
2548 tmp = gfc_finish_block (&body);
2549 tmp = fold_build3_loc (input_location, SWITCH_EXPR, NULL_TREE,
2550 case_num, tmp, NULL_TREE);
2551 gfc_add_expr_to_block (&block, tmp);
2552
2553 tmp = build1_v (LABEL_EXPR, end_label);
2554 gfc_add_expr_to_block (&block, tmp);
2555
2556 return gfc_finish_block (&block);
2557 }
2558
2559
2560 /* Translate the three variants of the SELECT CASE construct.
2561
2562 SELECT CASEs with INTEGER case expressions can be translated to an
2563 equivalent GENERIC switch statement, and for LOGICAL case
2564 expressions we build one or two if-else compares.
2565
2566 SELECT CASEs with CHARACTER case expressions are a whole different
2567 story, because they don't exist in GENERIC. So we sort them and
2568 do a binary search at runtime.
2569
2570 Fortran has no BREAK statement, and it does not allow jumps from
2571 one case block to another. That makes things a lot easier for
2572 the optimizers. */
2573
2574 tree
2575 gfc_trans_select (gfc_code * code)
2576 {
2577 stmtblock_t block;
2578 tree body;
2579 tree exit_label;
2580
2581 gcc_assert (code && code->expr1);
2582 gfc_init_block (&block);
2583
2584 /* Build the exit label and hang it in. */
2585 exit_label = gfc_build_label_decl (NULL_TREE);
2586 code->exit_label = exit_label;
2587
2588 /* Empty SELECT constructs are legal. */
2589 if (code->block == NULL)
2590 body = build_empty_stmt (input_location);
2591
2592 /* Select the correct translation function. */
2593 else
2594 switch (code->expr1->ts.type)
2595 {
2596 case BT_LOGICAL:
2597 body = gfc_trans_logical_select (code);
2598 break;
2599
2600 case BT_INTEGER:
2601 body = gfc_trans_integer_select (code);
2602 break;
2603
2604 case BT_CHARACTER:
2605 body = gfc_trans_character_select (code);
2606 break;
2607
2608 default:
2609 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2610 /* Not reached */
2611 }
2612
2613 /* Build everything together. */
2614 gfc_add_expr_to_block (&block, body);
2615 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2616
2617 return gfc_finish_block (&block);
2618 }
2619
2620
2621 /* Traversal function to substitute a replacement symtree if the symbol
2622 in the expression is the same as that passed. f == 2 signals that
2623 that variable itself is not to be checked - only the references.
2624 This group of functions is used when the variable expression in a
2625 FORALL assignment has internal references. For example:
2626 FORALL (i = 1:4) p(p(i)) = i
2627 The only recourse here is to store a copy of 'p' for the index
2628 expression. */
2629
2630 static gfc_symtree *new_symtree;
2631 static gfc_symtree *old_symtree;
2632
2633 static bool
2634 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2635 {
2636 if (expr->expr_type != EXPR_VARIABLE)
2637 return false;
2638
2639 if (*f == 2)
2640 *f = 1;
2641 else if (expr->symtree->n.sym == sym)
2642 expr->symtree = new_symtree;
2643
2644 return false;
2645 }
2646
2647 static void
2648 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2649 {
2650 gfc_traverse_expr (e, sym, forall_replace, f);
2651 }
2652
2653 static bool
2654 forall_restore (gfc_expr *expr,
2655 gfc_symbol *sym ATTRIBUTE_UNUSED,
2656 int *f ATTRIBUTE_UNUSED)
2657 {
2658 if (expr->expr_type != EXPR_VARIABLE)
2659 return false;
2660
2661 if (expr->symtree == new_symtree)
2662 expr->symtree = old_symtree;
2663
2664 return false;
2665 }
2666
2667 static void
2668 forall_restore_symtree (gfc_expr *e)
2669 {
2670 gfc_traverse_expr (e, NULL, forall_restore, 0);
2671 }
2672
2673 static void
2674 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2675 {
2676 gfc_se tse;
2677 gfc_se rse;
2678 gfc_expr *e;
2679 gfc_symbol *new_sym;
2680 gfc_symbol *old_sym;
2681 gfc_symtree *root;
2682 tree tmp;
2683
2684 /* Build a copy of the lvalue. */
2685 old_symtree = c->expr1->symtree;
2686 old_sym = old_symtree->n.sym;
2687 e = gfc_lval_expr_from_sym (old_sym);
2688 if (old_sym->attr.dimension)
2689 {
2690 gfc_init_se (&tse, NULL);
2691 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2692 gfc_add_block_to_block (pre, &tse.pre);
2693 gfc_add_block_to_block (post, &tse.post);
2694 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2695
2696 if (e->ts.type != BT_CHARACTER)
2697 {
2698 /* Use the variable offset for the temporary. */
2699 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2700 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2701 }
2702 }
2703 else
2704 {
2705 gfc_init_se (&tse, NULL);
2706 gfc_init_se (&rse, NULL);
2707 gfc_conv_expr (&rse, e);
2708 if (e->ts.type == BT_CHARACTER)
2709 {
2710 tse.string_length = rse.string_length;
2711 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2712 tse.string_length);
2713 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2714 rse.string_length);
2715 gfc_add_block_to_block (pre, &tse.pre);
2716 gfc_add_block_to_block (post, &tse.post);
2717 }
2718 else
2719 {
2720 tmp = gfc_typenode_for_spec (&e->ts);
2721 tse.expr = gfc_create_var (tmp, "temp");
2722 }
2723
2724 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2725 e->expr_type == EXPR_VARIABLE, true);
2726 gfc_add_expr_to_block (pre, tmp);
2727 }
2728 gfc_free_expr (e);
2729
2730 /* Create a new symbol to represent the lvalue. */
2731 new_sym = gfc_new_symbol (old_sym->name, NULL);
2732 new_sym->ts = old_sym->ts;
2733 new_sym->attr.referenced = 1;
2734 new_sym->attr.temporary = 1;
2735 new_sym->attr.dimension = old_sym->attr.dimension;
2736 new_sym->attr.flavor = old_sym->attr.flavor;
2737
2738 /* Use the temporary as the backend_decl. */
2739 new_sym->backend_decl = tse.expr;
2740
2741 /* Create a fake symtree for it. */
2742 root = NULL;
2743 new_symtree = gfc_new_symtree (&root, old_sym->name);
2744 new_symtree->n.sym = new_sym;
2745 gcc_assert (new_symtree == root);
2746
2747 /* Go through the expression reference replacing the old_symtree
2748 with the new. */
2749 forall_replace_symtree (c->expr1, old_sym, 2);
2750
2751 /* Now we have made this temporary, we might as well use it for
2752 the right hand side. */
2753 forall_replace_symtree (c->expr2, old_sym, 1);
2754 }
2755
2756
2757 /* Handles dependencies in forall assignments. */
2758 static int
2759 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2760 {
2761 gfc_ref *lref;
2762 gfc_ref *rref;
2763 int need_temp;
2764 gfc_symbol *lsym;
2765
2766 lsym = c->expr1->symtree->n.sym;
2767 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2768
2769 /* Now check for dependencies within the 'variable'
2770 expression itself. These are treated by making a complete
2771 copy of variable and changing all the references to it
2772 point to the copy instead. Note that the shallow copy of
2773 the variable will not suffice for derived types with
2774 pointer components. We therefore leave these to their
2775 own devices. */
2776 if (lsym->ts.type == BT_DERIVED
2777 && lsym->ts.u.derived->attr.pointer_comp)
2778 return need_temp;
2779
2780 new_symtree = NULL;
2781 if (find_forall_index (c->expr1, lsym, 2))
2782 {
2783 forall_make_variable_temp (c, pre, post);
2784 need_temp = 0;
2785 }
2786
2787 /* Substrings with dependencies are treated in the same
2788 way. */
2789 if (c->expr1->ts.type == BT_CHARACTER
2790 && c->expr1->ref
2791 && c->expr2->expr_type == EXPR_VARIABLE
2792 && lsym == c->expr2->symtree->n.sym)
2793 {
2794 for (lref = c->expr1->ref; lref; lref = lref->next)
2795 if (lref->type == REF_SUBSTRING)
2796 break;
2797 for (rref = c->expr2->ref; rref; rref = rref->next)
2798 if (rref->type == REF_SUBSTRING)
2799 break;
2800
2801 if (rref && lref
2802 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2803 {
2804 forall_make_variable_temp (c, pre, post);
2805 need_temp = 0;
2806 }
2807 }
2808 return need_temp;
2809 }
2810
2811
2812 static void
2813 cleanup_forall_symtrees (gfc_code *c)
2814 {
2815 forall_restore_symtree (c->expr1);
2816 forall_restore_symtree (c->expr2);
2817 free (new_symtree->n.sym);
2818 free (new_symtree);
2819 }
2820
2821
2822 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2823 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2824 indicates whether we should generate code to test the FORALLs mask
2825 array. OUTER is the loop header to be used for initializing mask
2826 indices.
2827
2828 The generated loop format is:
2829 count = (end - start + step) / step
2830 loopvar = start
2831 while (1)
2832 {
2833 if (count <=0 )
2834 goto end_of_loop
2835 <body>
2836 loopvar += step
2837 count --
2838 }
2839 end_of_loop: */
2840
2841 static tree
2842 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2843 int mask_flag, stmtblock_t *outer)
2844 {
2845 int n, nvar;
2846 tree tmp;
2847 tree cond;
2848 stmtblock_t block;
2849 tree exit_label;
2850 tree count;
2851 tree var, start, end, step;
2852 iter_info *iter;
2853
2854 /* Initialize the mask index outside the FORALL nest. */
2855 if (mask_flag && forall_tmp->mask)
2856 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2857
2858 iter = forall_tmp->this_loop;
2859 nvar = forall_tmp->nvar;
2860 for (n = 0; n < nvar; n++)
2861 {
2862 var = iter->var;
2863 start = iter->start;
2864 end = iter->end;
2865 step = iter->step;
2866
2867 exit_label = gfc_build_label_decl (NULL_TREE);
2868 TREE_USED (exit_label) = 1;
2869
2870 /* The loop counter. */
2871 count = gfc_create_var (TREE_TYPE (var), "count");
2872
2873 /* The body of the loop. */
2874 gfc_init_block (&block);
2875
2876 /* The exit condition. */
2877 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2878 count, build_int_cst (TREE_TYPE (count), 0));
2879 if (forall_tmp->do_concurrent)
2880 cond = build2 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
2881 build_int_cst (integer_type_node,
2882 annot_expr_ivdep_kind));
2883
2884 tmp = build1_v (GOTO_EXPR, exit_label);
2885 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2886 cond, tmp, build_empty_stmt (input_location));
2887 gfc_add_expr_to_block (&block, tmp);
2888
2889 /* The main loop body. */
2890 gfc_add_expr_to_block (&block, body);
2891
2892 /* Increment the loop variable. */
2893 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2894 step);
2895 gfc_add_modify (&block, var, tmp);
2896
2897 /* Advance to the next mask element. Only do this for the
2898 innermost loop. */
2899 if (n == 0 && mask_flag && forall_tmp->mask)
2900 {
2901 tree maskindex = forall_tmp->maskindex;
2902 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2903 maskindex, gfc_index_one_node);
2904 gfc_add_modify (&block, maskindex, tmp);
2905 }
2906
2907 /* Decrement the loop counter. */
2908 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2909 build_int_cst (TREE_TYPE (var), 1));
2910 gfc_add_modify (&block, count, tmp);
2911
2912 body = gfc_finish_block (&block);
2913
2914 /* Loop var initialization. */
2915 gfc_init_block (&block);
2916 gfc_add_modify (&block, var, start);
2917
2918
2919 /* Initialize the loop counter. */
2920 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2921 start);
2922 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2923 tmp);
2924 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2925 tmp, step);
2926 gfc_add_modify (&block, count, tmp);
2927
2928 /* The loop expression. */
2929 tmp = build1_v (LOOP_EXPR, body);
2930 gfc_add_expr_to_block (&block, tmp);
2931
2932 /* The exit label. */
2933 tmp = build1_v (LABEL_EXPR, exit_label);
2934 gfc_add_expr_to_block (&block, tmp);
2935
2936 body = gfc_finish_block (&block);
2937 iter = iter->next;
2938 }
2939 return body;
2940 }
2941
2942
2943 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2944 is nonzero, the body is controlled by all masks in the forall nest.
2945 Otherwise, the innermost loop is not controlled by it's mask. This
2946 is used for initializing that mask. */
2947
2948 static tree
2949 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2950 int mask_flag)
2951 {
2952 tree tmp;
2953 stmtblock_t header;
2954 forall_info *forall_tmp;
2955 tree mask, maskindex;
2956
2957 gfc_start_block (&header);
2958
2959 forall_tmp = nested_forall_info;
2960 while (forall_tmp != NULL)
2961 {
2962 /* Generate body with masks' control. */
2963 if (mask_flag)
2964 {
2965 mask = forall_tmp->mask;
2966 maskindex = forall_tmp->maskindex;
2967
2968 /* If a mask was specified make the assignment conditional. */
2969 if (mask)
2970 {
2971 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2972 body = build3_v (COND_EXPR, tmp, body,
2973 build_empty_stmt (input_location));
2974 }
2975 }
2976 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2977 forall_tmp = forall_tmp->prev_nest;
2978 mask_flag = 1;
2979 }
2980
2981 gfc_add_expr_to_block (&header, body);
2982 return gfc_finish_block (&header);
2983 }
2984
2985
2986 /* Allocate data for holding a temporary array. Returns either a local
2987 temporary array or a pointer variable. */
2988
2989 static tree
2990 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2991 tree elem_type)
2992 {
2993 tree tmpvar;
2994 tree type;
2995 tree tmp;
2996
2997 if (INTEGER_CST_P (size))
2998 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2999 size, gfc_index_one_node);
3000 else
3001 tmp = NULL_TREE;
3002
3003 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
3004 type = build_array_type (elem_type, type);
3005 if (gfc_can_put_var_on_stack (bytesize))
3006 {
3007 gcc_assert (INTEGER_CST_P (size));
3008 tmpvar = gfc_create_var (type, "temp");
3009 *pdata = NULL_TREE;
3010 }
3011 else
3012 {
3013 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
3014 *pdata = convert (pvoid_type_node, tmpvar);
3015
3016 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
3017 gfc_add_modify (pblock, tmpvar, tmp);
3018 }
3019 return tmpvar;
3020 }
3021
3022
3023 /* Generate codes to copy the temporary to the actual lhs. */
3024
3025 static tree
3026 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
3027 tree count1, tree wheremask, bool invert)
3028 {
3029 gfc_ss *lss;
3030 gfc_se lse, rse;
3031 stmtblock_t block, body;
3032 gfc_loopinfo loop1;
3033 tree tmp;
3034 tree wheremaskexpr;
3035
3036 /* Walk the lhs. */
3037 lss = gfc_walk_expr (expr);
3038
3039 if (lss == gfc_ss_terminator)
3040 {
3041 gfc_start_block (&block);
3042
3043 gfc_init_se (&lse, NULL);
3044
3045 /* Translate the expression. */
3046 gfc_conv_expr (&lse, expr);
3047
3048 /* Form the expression for the temporary. */
3049 tmp = gfc_build_array_ref (tmp1, count1, NULL);
3050
3051 /* Use the scalar assignment as is. */
3052 gfc_add_block_to_block (&block, &lse.pre);
3053 gfc_add_modify (&block, lse.expr, tmp);
3054 gfc_add_block_to_block (&block, &lse.post);
3055
3056 /* Increment the count1. */
3057 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
3058 count1, gfc_index_one_node);
3059 gfc_add_modify (&block, count1, tmp);
3060
3061 tmp = gfc_finish_block (&block);
3062 }
3063 else
3064 {
3065 gfc_start_block (&block);
3066
3067 gfc_init_loopinfo (&loop1);
3068 gfc_init_se (&rse, NULL);
3069 gfc_init_se (&lse, NULL);
3070
3071 /* Associate the lss with the loop. */
3072 gfc_add_ss_to_loop (&loop1, lss);
3073
3074 /* Calculate the bounds of the scalarization. */
3075 gfc_conv_ss_startstride (&loop1);
3076 /* Setup the scalarizing loops. */
3077 gfc_conv_loop_setup (&loop1, &expr->where);
3078
3079 gfc_mark_ss_chain_used (lss, 1);
3080
3081 /* Start the scalarized loop body. */
3082 gfc_start_scalarized_body (&loop1, &body);
3083
3084 /* Setup the gfc_se structures. */
3085 gfc_copy_loopinfo_to_se (&lse, &loop1);
3086 lse.ss = lss;
3087
3088 /* Form the expression of the temporary. */
3089 if (lss != gfc_ss_terminator)
3090 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
3091 /* Translate expr. */
3092 gfc_conv_expr (&lse, expr);
3093
3094 /* Use the scalar assignment. */
3095 rse.string_length = lse.string_length;
3096 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
3097
3098 /* Form the mask expression according to the mask tree list. */
3099 if (wheremask)
3100 {
3101 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
3102 if (invert)
3103 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3104 TREE_TYPE (wheremaskexpr),
3105 wheremaskexpr);
3106 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
3107 wheremaskexpr, tmp,
3108 build_empty_stmt (input_location));
3109 }
3110
3111 gfc_add_expr_to_block (&body, tmp);
3112
3113 /* Increment count1. */
3114 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3115 count1, gfc_index_one_node);
3116 gfc_add_modify (&body, count1, tmp);
3117
3118 /* Increment count3. */
3119 if (count3)
3120 {
3121 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3122 gfc_array_index_type, count3,
3123 gfc_index_one_node);
3124 gfc_add_modify (&body, count3, tmp);
3125 }
3126
3127 /* Generate the copying loops. */
3128 gfc_trans_scalarizing_loops (&loop1, &body);
3129 gfc_add_block_to_block (&block, &loop1.pre);
3130 gfc_add_block_to_block (&block, &loop1.post);
3131 gfc_cleanup_loop (&loop1);
3132
3133 tmp = gfc_finish_block (&block);
3134 }
3135 return tmp;
3136 }
3137
3138
3139 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3140 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3141 and should not be freed. WHEREMASK is the conditional execution mask
3142 whose sense may be inverted by INVERT. */
3143
3144 static tree
3145 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
3146 tree count1, gfc_ss *lss, gfc_ss *rss,
3147 tree wheremask, bool invert)
3148 {
3149 stmtblock_t block, body1;
3150 gfc_loopinfo loop;
3151 gfc_se lse;
3152 gfc_se rse;
3153 tree tmp;
3154 tree wheremaskexpr;
3155
3156 gfc_start_block (&block);
3157
3158 gfc_init_se (&rse, NULL);
3159 gfc_init_se (&lse, NULL);
3160
3161 if (lss == gfc_ss_terminator)
3162 {
3163 gfc_init_block (&body1);
3164 gfc_conv_expr (&rse, expr2);
3165 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
3166 }
3167 else
3168 {
3169 /* Initialize the loop. */
3170 gfc_init_loopinfo (&loop);
3171
3172 /* We may need LSS to determine the shape of the expression. */
3173 gfc_add_ss_to_loop (&loop, lss);
3174 gfc_add_ss_to_loop (&loop, rss);
3175
3176 gfc_conv_ss_startstride (&loop);
3177 gfc_conv_loop_setup (&loop, &expr2->where);
3178
3179 gfc_mark_ss_chain_used (rss, 1);
3180 /* Start the loop body. */
3181 gfc_start_scalarized_body (&loop, &body1);
3182
3183 /* Translate the expression. */
3184 gfc_copy_loopinfo_to_se (&rse, &loop);
3185 rse.ss = rss;
3186 gfc_conv_expr (&rse, expr2);
3187
3188 /* Form the expression of the temporary. */
3189 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
3190 }
3191
3192 /* Use the scalar assignment. */
3193 lse.string_length = rse.string_length;
3194 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
3195 expr2->expr_type == EXPR_VARIABLE, true);
3196
3197 /* Form the mask expression according to the mask tree list. */
3198 if (wheremask)
3199 {
3200 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
3201 if (invert)
3202 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3203 TREE_TYPE (wheremaskexpr),
3204 wheremaskexpr);
3205 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
3206 wheremaskexpr, tmp,
3207 build_empty_stmt (input_location));
3208 }
3209
3210 gfc_add_expr_to_block (&body1, tmp);
3211
3212 if (lss == gfc_ss_terminator)
3213 {
3214 gfc_add_block_to_block (&block, &body1);
3215
3216 /* Increment count1. */
3217 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
3218 count1, gfc_index_one_node);
3219 gfc_add_modify (&block, count1, tmp);
3220 }
3221 else
3222 {
3223 /* Increment count1. */
3224 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3225 count1, gfc_index_one_node);
3226 gfc_add_modify (&body1, count1, tmp);
3227
3228 /* Increment count3. */
3229 if (count3)
3230 {
3231 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3232 gfc_array_index_type,
3233 count3, gfc_index_one_node);
3234 gfc_add_modify (&body1, count3, tmp);
3235 }
3236
3237 /* Generate the copying loops. */
3238 gfc_trans_scalarizing_loops (&loop, &body1);
3239
3240 gfc_add_block_to_block (&block, &loop.pre);
3241 gfc_add_block_to_block (&block, &loop.post);
3242
3243 gfc_cleanup_loop (&loop);
3244 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3245 as tree nodes in SS may not be valid in different scope. */
3246 }
3247
3248 tmp = gfc_finish_block (&block);
3249 return tmp;
3250 }
3251
3252
3253 /* Calculate the size of temporary needed in the assignment inside forall.
3254 LSS and RSS are filled in this function. */
3255
3256 static tree
3257 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
3258 stmtblock_t * pblock,
3259 gfc_ss **lss, gfc_ss **rss)
3260 {
3261 gfc_loopinfo loop;
3262 tree size;
3263 int i;
3264 int save_flag;
3265 tree tmp;
3266
3267 *lss = gfc_walk_expr (expr1);
3268 *rss = NULL;
3269
3270 size = gfc_index_one_node;
3271 if (*lss != gfc_ss_terminator)
3272 {
3273 gfc_init_loopinfo (&loop);
3274
3275 /* Walk the RHS of the expression. */
3276 *rss = gfc_walk_expr (expr2);
3277 if (*rss == gfc_ss_terminator)
3278 /* The rhs is scalar. Add a ss for the expression. */
3279 *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
3280
3281 /* Associate the SS with the loop. */
3282 gfc_add_ss_to_loop (&loop, *lss);
3283 /* We don't actually need to add the rhs at this point, but it might
3284 make guessing the loop bounds a bit easier. */
3285 gfc_add_ss_to_loop (&loop, *rss);
3286
3287 /* We only want the shape of the expression, not rest of the junk
3288 generated by the scalarizer. */
3289 loop.array_parameter = 1;
3290
3291 /* Calculate the bounds of the scalarization. */
3292 save_flag = gfc_option.rtcheck;
3293 gfc_option.rtcheck &= ~GFC_RTCHECK_BOUNDS;
3294 gfc_conv_ss_startstride (&loop);
3295 gfc_option.rtcheck = save_flag;
3296 gfc_conv_loop_setup (&loop, &expr2->where);
3297
3298 /* Figure out how many elements we need. */
3299 for (i = 0; i < loop.dimen; i++)
3300 {
3301 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3302 gfc_array_index_type,
3303 gfc_index_one_node, loop.from[i]);
3304 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3305 gfc_array_index_type, tmp, loop.to[i]);
3306 size = fold_build2_loc (input_location, MULT_EXPR,
3307 gfc_array_index_type, size, tmp);
3308 }
3309 gfc_add_block_to_block (pblock, &loop.pre);
3310 size = gfc_evaluate_now (size, pblock);
3311 gfc_add_block_to_block (pblock, &loop.post);
3312
3313 /* TODO: write a function that cleans up a loopinfo without freeing
3314 the SS chains. Currently a NOP. */
3315 }
3316
3317 return size;
3318 }
3319
3320
3321 /* Calculate the overall iterator number of the nested forall construct.
3322 This routine actually calculates the number of times the body of the
3323 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3324 that by the expression INNER_SIZE. The BLOCK argument specifies the
3325 block in which to calculate the result, and the optional INNER_SIZE_BODY
3326 argument contains any statements that need to executed (inside the loop)
3327 to initialize or calculate INNER_SIZE. */
3328
3329 static tree
3330 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
3331 stmtblock_t *inner_size_body, stmtblock_t *block)
3332 {
3333 forall_info *forall_tmp = nested_forall_info;
3334 tree tmp, number;
3335 stmtblock_t body;
3336
3337 /* We can eliminate the innermost unconditional loops with constant
3338 array bounds. */
3339 if (INTEGER_CST_P (inner_size))
3340 {
3341 while (forall_tmp
3342 && !forall_tmp->mask
3343 && INTEGER_CST_P (forall_tmp->size))
3344 {
3345 inner_size = fold_build2_loc (input_location, MULT_EXPR,
3346 gfc_array_index_type,
3347 inner_size, forall_tmp->size);
3348 forall_tmp = forall_tmp->prev_nest;
3349 }
3350
3351 /* If there are no loops left, we have our constant result. */
3352 if (!forall_tmp)
3353 return inner_size;
3354 }
3355
3356 /* Otherwise, create a temporary variable to compute the result. */
3357 number = gfc_create_var (gfc_array_index_type, "num");
3358 gfc_add_modify (block, number, gfc_index_zero_node);
3359
3360 gfc_start_block (&body);
3361 if (inner_size_body)
3362 gfc_add_block_to_block (&body, inner_size_body);
3363 if (forall_tmp)
3364 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3365 gfc_array_index_type, number, inner_size);
3366 else
3367 tmp = inner_size;
3368 gfc_add_modify (&body, number, tmp);
3369 tmp = gfc_finish_block (&body);
3370
3371 /* Generate loops. */
3372 if (forall_tmp != NULL)
3373 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
3374
3375 gfc_add_expr_to_block (block, tmp);
3376
3377 return number;
3378 }
3379
3380
3381 /* Allocate temporary for forall construct. SIZE is the size of temporary
3382 needed. PTEMP1 is returned for space free. */
3383
3384 static tree
3385 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
3386 tree * ptemp1)
3387 {
3388 tree bytesize;
3389 tree unit;
3390 tree tmp;
3391
3392 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
3393 if (!integer_onep (unit))
3394 bytesize = fold_build2_loc (input_location, MULT_EXPR,
3395 gfc_array_index_type, size, unit);
3396 else
3397 bytesize = size;
3398
3399 *ptemp1 = NULL;
3400 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
3401
3402 if (*ptemp1)
3403 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3404 return tmp;
3405 }
3406
3407
3408 /* Allocate temporary for forall construct according to the information in
3409 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3410 assignment inside forall. PTEMP1 is returned for space free. */
3411
3412 static tree
3413 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
3414 tree inner_size, stmtblock_t * inner_size_body,
3415 stmtblock_t * block, tree * ptemp1)
3416 {
3417 tree size;
3418
3419 /* Calculate the total size of temporary needed in forall construct. */
3420 size = compute_overall_iter_number (nested_forall_info, inner_size,
3421 inner_size_body, block);
3422
3423 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
3424 }
3425
3426
3427 /* Handle assignments inside forall which need temporary.
3428
3429 forall (i=start:end:stride; maskexpr)
3430 e<i> = f<i>
3431 end forall
3432 (where e,f<i> are arbitrary expressions possibly involving i
3433 and there is a dependency between e<i> and f<i>)
3434 Translates to:
3435 masktmp(:) = maskexpr(:)
3436
3437 maskindex = 0;
3438 count1 = 0;
3439 num = 0;
3440 for (i = start; i <= end; i += stride)
3441 num += SIZE (f<i>)
3442 count1 = 0;
3443 ALLOCATE (tmp(num))
3444 for (i = start; i <= end; i += stride)
3445 {
3446 if (masktmp[maskindex++])
3447 tmp[count1++] = f<i>
3448 }
3449 maskindex = 0;
3450 count1 = 0;
3451 for (i = start; i <= end; i += stride)
3452 {
3453 if (masktmp[maskindex++])
3454 e<i> = tmp[count1++]
3455 }
3456 DEALLOCATE (tmp)
3457 */
3458 static void
3459 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3460 tree wheremask, bool invert,
3461 forall_info * nested_forall_info,
3462 stmtblock_t * block)
3463 {
3464 tree type;
3465 tree inner_size;
3466 gfc_ss *lss, *rss;
3467 tree count, count1;
3468 tree tmp, tmp1;
3469 tree ptemp1;
3470 stmtblock_t inner_size_body;
3471
3472 /* Create vars. count1 is the current iterator number of the nested
3473 forall. */
3474 count1 = gfc_create_var (gfc_array_index_type, "count1");
3475
3476 /* Count is the wheremask index. */
3477 if (wheremask)
3478 {
3479 count = gfc_create_var (gfc_array_index_type, "count");
3480 gfc_add_modify (block, count, gfc_index_zero_node);
3481 }
3482 else
3483 count = NULL;
3484
3485 /* Initialize count1. */
3486 gfc_add_modify (block, count1, gfc_index_zero_node);
3487
3488 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3489 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3490 gfc_init_block (&inner_size_body);
3491 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3492 &lss, &rss);
3493
3494 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3495 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3496 {
3497 if (!expr1->ts.u.cl->backend_decl)
3498 {
3499 gfc_se tse;
3500 gfc_init_se (&tse, NULL);
3501 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3502 expr1->ts.u.cl->backend_decl = tse.expr;
3503 }
3504 type = gfc_get_character_type_len (gfc_default_character_kind,
3505 expr1->ts.u.cl->backend_decl);
3506 }
3507 else
3508 type = gfc_typenode_for_spec (&expr1->ts);
3509
3510 /* Allocate temporary for nested forall construct according to the
3511 information in nested_forall_info and inner_size. */
3512 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3513 &inner_size_body, block, &ptemp1);
3514
3515 /* Generate codes to copy rhs to the temporary . */
3516 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3517 wheremask, invert);
3518
3519 /* Generate body and loops according to the information in
3520 nested_forall_info. */
3521 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3522 gfc_add_expr_to_block (block, tmp);
3523
3524 /* Reset count1. */
3525 gfc_add_modify (block, count1, gfc_index_zero_node);
3526
3527 /* Reset count. */
3528 if (wheremask)
3529 gfc_add_modify (block, count, gfc_index_zero_node);
3530
3531 /* Generate codes to copy the temporary to lhs. */
3532 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3533 wheremask, invert);
3534
3535 /* Generate body and loops according to the information in
3536 nested_forall_info. */
3537 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3538 gfc_add_expr_to_block (block, tmp);
3539
3540 if (ptemp1)
3541 {
3542 /* Free the temporary. */
3543 tmp = gfc_call_free (ptemp1);
3544 gfc_add_expr_to_block (block, tmp);
3545 }
3546 }
3547
3548
3549 /* Translate pointer assignment inside FORALL which need temporary. */
3550
3551 static void
3552 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3553 forall_info * nested_forall_info,
3554 stmtblock_t * block)
3555 {
3556 tree type;
3557 tree inner_size;
3558 gfc_ss *lss, *rss;
3559 gfc_se lse;
3560 gfc_se rse;
3561 gfc_array_info *info;
3562 gfc_loopinfo loop;
3563 tree desc;
3564 tree parm;
3565 tree parmtype;
3566 stmtblock_t body;
3567 tree count;
3568 tree tmp, tmp1, ptemp1;
3569
3570 count = gfc_create_var (gfc_array_index_type, "count");
3571 gfc_add_modify (block, count, gfc_index_zero_node);
3572
3573 inner_size = gfc_index_one_node;
3574 lss = gfc_walk_expr (expr1);
3575 rss = gfc_walk_expr (expr2);
3576 if (lss == gfc_ss_terminator)
3577 {
3578 type = gfc_typenode_for_spec (&expr1->ts);
3579 type = build_pointer_type (type);
3580
3581 /* Allocate temporary for nested forall construct according to the
3582 information in nested_forall_info and inner_size. */
3583 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3584 inner_size, NULL, block, &ptemp1);
3585 gfc_start_block (&body);
3586 gfc_init_se (&lse, NULL);
3587 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3588 gfc_init_se (&rse, NULL);
3589 rse.want_pointer = 1;
3590 gfc_conv_expr (&rse, expr2);
3591 gfc_add_block_to_block (&body, &rse.pre);
3592 gfc_add_modify (&body, lse.expr,
3593 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3594 gfc_add_block_to_block (&body, &rse.post);
3595
3596 /* Increment count. */
3597 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3598 count, gfc_index_one_node);
3599 gfc_add_modify (&body, count, tmp);
3600
3601 tmp = gfc_finish_block (&body);
3602
3603 /* Generate body and loops according to the information in
3604 nested_forall_info. */
3605 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3606 gfc_add_expr_to_block (block, tmp);
3607
3608 /* Reset count. */
3609 gfc_add_modify (block, count, gfc_index_zero_node);
3610
3611 gfc_start_block (&body);
3612 gfc_init_se (&lse, NULL);
3613 gfc_init_se (&rse, NULL);
3614 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3615 lse.want_pointer = 1;
3616 gfc_conv_expr (&lse, expr1);
3617 gfc_add_block_to_block (&body, &lse.pre);
3618 gfc_add_modify (&body, lse.expr, rse.expr);
3619 gfc_add_block_to_block (&body, &lse.post);
3620 /* Increment count. */
3621 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3622 count, gfc_index_one_node);
3623 gfc_add_modify (&body, count, tmp);
3624 tmp = gfc_finish_block (&body);
3625
3626 /* Generate body and loops according to the information in
3627 nested_forall_info. */
3628 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3629 gfc_add_expr_to_block (block, tmp);
3630 }
3631 else
3632 {
3633 gfc_init_loopinfo (&loop);
3634
3635 /* Associate the SS with the loop. */
3636 gfc_add_ss_to_loop (&loop, rss);
3637
3638 /* Setup the scalarizing loops and bounds. */
3639 gfc_conv_ss_startstride (&loop);
3640
3641 gfc_conv_loop_setup (&loop, &expr2->where);
3642
3643 info = &rss->info->data.array;
3644 desc = info->descriptor;
3645
3646 /* Make a new descriptor. */
3647 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3648 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3649 loop.from, loop.to, 1,
3650 GFC_ARRAY_UNKNOWN, true);
3651
3652 /* Allocate temporary for nested forall construct. */
3653 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3654 inner_size, NULL, block, &ptemp1);
3655 gfc_start_block (&body);
3656 gfc_init_se (&lse, NULL);
3657 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3658 lse.direct_byref = 1;
3659 gfc_conv_expr_descriptor (&lse, expr2);
3660
3661 gfc_add_block_to_block (&body, &lse.pre);
3662 gfc_add_block_to_block (&body, &lse.post);
3663
3664 /* Increment count. */
3665 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3666 count, gfc_index_one_node);
3667 gfc_add_modify (&body, count, tmp);
3668
3669 tmp = gfc_finish_block (&body);
3670
3671 /* Generate body and loops according to the information in
3672 nested_forall_info. */
3673 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3674 gfc_add_expr_to_block (block, tmp);
3675
3676 /* Reset count. */
3677 gfc_add_modify (block, count, gfc_index_zero_node);
3678
3679 parm = gfc_build_array_ref (tmp1, count, NULL);
3680 gfc_init_se (&lse, NULL);
3681 gfc_conv_expr_descriptor (&lse, expr1);
3682 gfc_add_modify (&lse.pre, lse.expr, parm);
3683 gfc_start_block (&body);
3684 gfc_add_block_to_block (&body, &lse.pre);
3685 gfc_add_block_to_block (&body, &lse.post);
3686
3687 /* Increment count. */
3688 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3689 count, gfc_index_one_node);
3690 gfc_add_modify (&body, count, tmp);
3691
3692 tmp = gfc_finish_block (&body);
3693
3694 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3695 gfc_add_expr_to_block (block, tmp);
3696 }
3697 /* Free the temporary. */
3698 if (ptemp1)
3699 {
3700 tmp = gfc_call_free (ptemp1);
3701 gfc_add_expr_to_block (block, tmp);
3702 }
3703 }
3704
3705
3706 /* FORALL and WHERE statements are really nasty, especially when you nest
3707 them. All the rhs of a forall assignment must be evaluated before the
3708 actual assignments are performed. Presumably this also applies to all the
3709 assignments in an inner where statement. */
3710
3711 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3712 linear array, relying on the fact that we process in the same order in all
3713 loops.
3714
3715 forall (i=start:end:stride; maskexpr)
3716 e<i> = f<i>
3717 g<i> = h<i>
3718 end forall
3719 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3720 Translates to:
3721 count = ((end + 1 - start) / stride)
3722 masktmp(:) = maskexpr(:)
3723
3724 maskindex = 0;
3725 for (i = start; i <= end; i += stride)
3726 {
3727 if (masktmp[maskindex++])
3728 e<i> = f<i>
3729 }
3730 maskindex = 0;
3731 for (i = start; i <= end; i += stride)
3732 {
3733 if (masktmp[maskindex++])
3734 g<i> = h<i>
3735 }
3736
3737 Note that this code only works when there are no dependencies.
3738 Forall loop with array assignments and data dependencies are a real pain,
3739 because the size of the temporary cannot always be determined before the
3740 loop is executed. This problem is compounded by the presence of nested
3741 FORALL constructs.
3742 */
3743
3744 static tree
3745 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3746 {
3747 stmtblock_t pre;
3748 stmtblock_t post;
3749 stmtblock_t block;
3750 stmtblock_t body;
3751 tree *var;
3752 tree *start;
3753 tree *end;
3754 tree *step;
3755 gfc_expr **varexpr;
3756 tree tmp;
3757 tree assign;
3758 tree size;
3759 tree maskindex;
3760 tree mask;
3761 tree pmask;
3762 tree cycle_label = NULL_TREE;
3763 int n;
3764 int nvar;
3765 int need_temp;
3766 gfc_forall_iterator *fa;
3767 gfc_se se;
3768 gfc_code *c;
3769 gfc_saved_var *saved_vars;
3770 iter_info *this_forall;
3771 forall_info *info;
3772 bool need_mask;
3773
3774 /* Do nothing if the mask is false. */
3775 if (code->expr1
3776 && code->expr1->expr_type == EXPR_CONSTANT
3777 && !code->expr1->value.logical)
3778 return build_empty_stmt (input_location);
3779
3780 n = 0;
3781 /* Count the FORALL index number. */
3782 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3783 n++;
3784 nvar = n;
3785
3786 /* Allocate the space for var, start, end, step, varexpr. */
3787 var = XCNEWVEC (tree, nvar);
3788 start = XCNEWVEC (tree, nvar);
3789 end = XCNEWVEC (tree, nvar);
3790 step = XCNEWVEC (tree, nvar);
3791 varexpr = XCNEWVEC (gfc_expr *, nvar);
3792 saved_vars = XCNEWVEC (gfc_saved_var, nvar);
3793
3794 /* Allocate the space for info. */
3795 info = XCNEW (forall_info);
3796
3797 gfc_start_block (&pre);
3798 gfc_init_block (&post);
3799 gfc_init_block (&block);
3800
3801 n = 0;
3802 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3803 {
3804 gfc_symbol *sym = fa->var->symtree->n.sym;
3805
3806 /* Allocate space for this_forall. */
3807 this_forall = XCNEW (iter_info);
3808
3809 /* Create a temporary variable for the FORALL index. */
3810 tmp = gfc_typenode_for_spec (&sym->ts);
3811 var[n] = gfc_create_var (tmp, sym->name);
3812 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3813
3814 /* Record it in this_forall. */
3815 this_forall->var = var[n];
3816
3817 /* Replace the index symbol's backend_decl with the temporary decl. */
3818 sym->backend_decl = var[n];
3819
3820 /* Work out the start, end and stride for the loop. */
3821 gfc_init_se (&se, NULL);
3822 gfc_conv_expr_val (&se, fa->start);
3823 /* Record it in this_forall. */
3824 this_forall->start = se.expr;
3825 gfc_add_block_to_block (&block, &se.pre);
3826 start[n] = se.expr;
3827
3828 gfc_init_se (&se, NULL);
3829 gfc_conv_expr_val (&se, fa->end);
3830 /* Record it in this_forall. */
3831 this_forall->end = se.expr;
3832 gfc_make_safe_expr (&se);
3833 gfc_add_block_to_block (&block, &se.pre);
3834 end[n] = se.expr;
3835
3836 gfc_init_se (&se, NULL);
3837 gfc_conv_expr_val (&se, fa->stride);
3838 /* Record it in this_forall. */
3839 this_forall->step = se.expr;
3840 gfc_make_safe_expr (&se);
3841 gfc_add_block_to_block (&block, &se.pre);
3842 step[n] = se.expr;
3843
3844 /* Set the NEXT field of this_forall to NULL. */
3845 this_forall->next = NULL;
3846 /* Link this_forall to the info construct. */
3847 if (info->this_loop)
3848 {
3849 iter_info *iter_tmp = info->this_loop;
3850 while (iter_tmp->next != NULL)
3851 iter_tmp = iter_tmp->next;
3852 iter_tmp->next = this_forall;
3853 }
3854 else
3855 info->this_loop = this_forall;
3856
3857 n++;
3858 }
3859 nvar = n;
3860
3861 /* Calculate the size needed for the current forall level. */
3862 size = gfc_index_one_node;
3863 for (n = 0; n < nvar; n++)
3864 {
3865 /* size = (end + step - start) / step. */
3866 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3867 step[n], start[n]);
3868 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3869 end[n], tmp);
3870 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3871 tmp, step[n]);
3872 tmp = convert (gfc_array_index_type, tmp);
3873
3874 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3875 size, tmp);
3876 }
3877
3878 /* Record the nvar and size of current forall level. */
3879 info->nvar = nvar;
3880 info->size = size;
3881
3882 if (code->expr1)
3883 {
3884 /* If the mask is .true., consider the FORALL unconditional. */
3885 if (code->expr1->expr_type == EXPR_CONSTANT
3886 && code->expr1->value.logical)
3887 need_mask = false;
3888 else
3889 need_mask = true;
3890 }
3891 else
3892 need_mask = false;
3893
3894 /* First we need to allocate the mask. */
3895 if (need_mask)
3896 {
3897 /* As the mask array can be very big, prefer compact boolean types. */
3898 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3899 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3900 size, NULL, &block, &pmask);
3901 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3902
3903 /* Record them in the info structure. */
3904 info->maskindex = maskindex;
3905 info->mask = mask;
3906 }
3907 else
3908 {
3909 /* No mask was specified. */
3910 maskindex = NULL_TREE;
3911 mask = pmask = NULL_TREE;
3912 }
3913
3914 /* Link the current forall level to nested_forall_info. */
3915 info->prev_nest = nested_forall_info;
3916 nested_forall_info = info;
3917
3918 /* Copy the mask into a temporary variable if required.
3919 For now we assume a mask temporary is needed. */
3920 if (need_mask)
3921 {
3922 /* As the mask array can be very big, prefer compact boolean types. */
3923 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3924
3925 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3926
3927 /* Start of mask assignment loop body. */
3928 gfc_start_block (&body);
3929
3930 /* Evaluate the mask expression. */
3931 gfc_init_se (&se, NULL);
3932 gfc_conv_expr_val (&se, code->expr1);
3933 gfc_add_block_to_block (&body, &se.pre);
3934
3935 /* Store the mask. */
3936 se.expr = convert (mask_type, se.expr);
3937
3938 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3939 gfc_add_modify (&body, tmp, se.expr);
3940
3941 /* Advance to the next mask element. */
3942 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3943 maskindex, gfc_index_one_node);
3944 gfc_add_modify (&body, maskindex, tmp);
3945
3946 /* Generate the loops. */
3947 tmp = gfc_finish_block (&body);
3948 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3949 gfc_add_expr_to_block (&block, tmp);
3950 }
3951
3952 if (code->op == EXEC_DO_CONCURRENT)
3953 {
3954 gfc_init_block (&body);
3955 cycle_label = gfc_build_label_decl (NULL_TREE);
3956 code->cycle_label = cycle_label;
3957 tmp = gfc_trans_code (code->block->next);
3958 gfc_add_expr_to_block (&body, tmp);
3959
3960 if (TREE_USED (cycle_label))
3961 {
3962 tmp = build1_v (LABEL_EXPR, cycle_label);
3963 gfc_add_expr_to_block (&body, tmp);
3964 }
3965
3966 tmp = gfc_finish_block (&body);
3967 nested_forall_info->do_concurrent = true;
3968 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3969 gfc_add_expr_to_block (&block, tmp);
3970 goto done;
3971 }
3972
3973 c = code->block->next;
3974
3975 /* TODO: loop merging in FORALL statements. */
3976 /* Now that we've got a copy of the mask, generate the assignment loops. */
3977 while (c)
3978 {
3979 switch (c->op)
3980 {
3981 case EXEC_ASSIGN:
3982 /* A scalar or array assignment. DO the simple check for
3983 lhs to rhs dependencies. These make a temporary for the
3984 rhs and form a second forall block to copy to variable. */
3985 need_temp = check_forall_dependencies(c, &pre, &post);
3986
3987 /* Temporaries due to array assignment data dependencies introduce
3988 no end of problems. */
3989 if (need_temp)
3990 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3991 nested_forall_info, &block);
3992 else
3993 {
3994 /* Use the normal assignment copying routines. */
3995 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3996
3997 /* Generate body and loops. */
3998 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3999 assign, 1);
4000 gfc_add_expr_to_block (&block, tmp);
4001 }
4002
4003 /* Cleanup any temporary symtrees that have been made to deal
4004 with dependencies. */
4005 if (new_symtree)
4006 cleanup_forall_symtrees (c);
4007
4008 break;
4009
4010 case EXEC_WHERE:
4011 /* Translate WHERE or WHERE construct nested in FORALL. */
4012 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
4013 break;
4014
4015 /* Pointer assignment inside FORALL. */
4016 case EXEC_POINTER_ASSIGN:
4017 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
4018 if (need_temp)
4019 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
4020 nested_forall_info, &block);
4021 else
4022 {
4023 /* Use the normal assignment copying routines. */
4024 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
4025
4026 /* Generate body and loops. */
4027 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4028 assign, 1);
4029 gfc_add_expr_to_block (&block, tmp);
4030 }
4031 break;
4032
4033 case EXEC_FORALL:
4034 tmp = gfc_trans_forall_1 (c, nested_forall_info);
4035 gfc_add_expr_to_block (&block, tmp);
4036 break;
4037
4038 /* Explicit subroutine calls are prevented by the frontend but interface
4039 assignments can legitimately produce them. */
4040 case EXEC_ASSIGN_CALL:
4041 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
4042 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
4043 gfc_add_expr_to_block (&block, tmp);
4044 break;
4045
4046 default:
4047 gcc_unreachable ();
4048 }
4049
4050 c = c->next;
4051 }
4052
4053 done:
4054 /* Restore the original index variables. */
4055 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
4056 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
4057
4058 /* Free the space for var, start, end, step, varexpr. */
4059 free (var);
4060 free (start);
4061 free (end);
4062 free (step);
4063 free (varexpr);
4064 free (saved_vars);
4065
4066 for (this_forall = info->this_loop; this_forall;)
4067 {
4068 iter_info *next = this_forall->next;
4069 free (this_forall);
4070 this_forall = next;
4071 }
4072
4073 /* Free the space for this forall_info. */
4074 free (info);
4075
4076 if (pmask)
4077 {
4078 /* Free the temporary for the mask. */
4079 tmp = gfc_call_free (pmask);
4080 gfc_add_expr_to_block (&block, tmp);
4081 }
4082 if (maskindex)
4083 pushdecl (maskindex);
4084
4085 gfc_add_block_to_block (&pre, &block);
4086 gfc_add_block_to_block (&pre, &post);
4087
4088 return gfc_finish_block (&pre);
4089 }
4090
4091
4092 /* Translate the FORALL statement or construct. */
4093
4094 tree gfc_trans_forall (gfc_code * code)
4095 {
4096 return gfc_trans_forall_1 (code, NULL);
4097 }
4098
4099
4100 /* Translate the DO CONCURRENT construct. */
4101
4102 tree gfc_trans_do_concurrent (gfc_code * code)
4103 {
4104 return gfc_trans_forall_1 (code, NULL);
4105 }
4106
4107
4108 /* Evaluate the WHERE mask expression, copy its value to a temporary.
4109 If the WHERE construct is nested in FORALL, compute the overall temporary
4110 needed by the WHERE mask expression multiplied by the iterator number of
4111 the nested forall.
4112 ME is the WHERE mask expression.
4113 MASK is the current execution mask upon input, whose sense may or may
4114 not be inverted as specified by the INVERT argument.
4115 CMASK is the updated execution mask on output, or NULL if not required.
4116 PMASK is the pending execution mask on output, or NULL if not required.
4117 BLOCK is the block in which to place the condition evaluation loops. */
4118
4119 static void
4120 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
4121 tree mask, bool invert, tree cmask, tree pmask,
4122 tree mask_type, stmtblock_t * block)
4123 {
4124 tree tmp, tmp1;
4125 gfc_ss *lss, *rss;
4126 gfc_loopinfo loop;
4127 stmtblock_t body, body1;
4128 tree count, cond, mtmp;
4129 gfc_se lse, rse;
4130
4131 gfc_init_loopinfo (&loop);
4132
4133 lss = gfc_walk_expr (me);
4134 rss = gfc_walk_expr (me);
4135
4136 /* Variable to index the temporary. */
4137 count = gfc_create_var (gfc_array_index_type, "count");
4138 /* Initialize count. */
4139 gfc_add_modify (block, count, gfc_index_zero_node);
4140
4141 gfc_start_block (&body);
4142
4143 gfc_init_se (&rse, NULL);
4144 gfc_init_se (&lse, NULL);
4145
4146 if (lss == gfc_ss_terminator)
4147 {
4148 gfc_init_block (&body1);
4149 }
4150 else
4151 {
4152 /* Initialize the loop. */
4153 gfc_init_loopinfo (&loop);
4154
4155 /* We may need LSS to determine the shape of the expression. */
4156 gfc_add_ss_to_loop (&loop, lss);
4157 gfc_add_ss_to_loop (&loop, rss);
4158
4159 gfc_conv_ss_startstride (&loop);
4160 gfc_conv_loop_setup (&loop, &me->where);
4161
4162 gfc_mark_ss_chain_used (rss, 1);
4163 /* Start the loop body. */
4164 gfc_start_scalarized_body (&loop, &body1);
4165
4166 /* Translate the expression. */
4167 gfc_copy_loopinfo_to_se (&rse, &loop);
4168 rse.ss = rss;
4169 gfc_conv_expr (&rse, me);
4170 }
4171
4172 /* Variable to evaluate mask condition. */
4173 cond = gfc_create_var (mask_type, "cond");
4174 if (mask && (cmask || pmask))
4175 mtmp = gfc_create_var (mask_type, "mask");
4176 else mtmp = NULL_TREE;
4177
4178 gfc_add_block_to_block (&body1, &lse.pre);
4179 gfc_add_block_to_block (&body1, &rse.pre);
4180
4181 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
4182
4183 if (mask && (cmask || pmask))
4184 {
4185 tmp = gfc_build_array_ref (mask, count, NULL);
4186 if (invert)
4187 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
4188 gfc_add_modify (&body1, mtmp, tmp);
4189 }
4190
4191 if (cmask)
4192 {
4193 tmp1 = gfc_build_array_ref (cmask, count, NULL);
4194 tmp = cond;
4195 if (mask)
4196 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
4197 mtmp, tmp);
4198 gfc_add_modify (&body1, tmp1, tmp);
4199 }
4200
4201 if (pmask)
4202 {
4203 tmp1 = gfc_build_array_ref (pmask, count, NULL);
4204 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
4205 if (mask)
4206 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
4207 tmp);
4208 gfc_add_modify (&body1, tmp1, tmp);
4209 }
4210
4211 gfc_add_block_to_block (&body1, &lse.post);
4212 gfc_add_block_to_block (&body1, &rse.post);
4213
4214 if (lss == gfc_ss_terminator)
4215 {
4216 gfc_add_block_to_block (&body, &body1);
4217 }
4218 else
4219 {
4220 /* Increment count. */
4221 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4222 count, gfc_index_one_node);
4223 gfc_add_modify (&body1, count, tmp1);
4224
4225 /* Generate the copying loops. */
4226 gfc_trans_scalarizing_loops (&loop, &body1);
4227
4228 gfc_add_block_to_block (&body, &loop.pre);
4229 gfc_add_block_to_block (&body, &loop.post);
4230
4231 gfc_cleanup_loop (&loop);
4232 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4233 as tree nodes in SS may not be valid in different scope. */
4234 }
4235
4236 tmp1 = gfc_finish_block (&body);
4237 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4238 if (nested_forall_info != NULL)
4239 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
4240
4241 gfc_add_expr_to_block (block, tmp1);
4242 }
4243
4244
4245 /* Translate an assignment statement in a WHERE statement or construct
4246 statement. The MASK expression is used to control which elements
4247 of EXPR1 shall be assigned. The sense of MASK is specified by
4248 INVERT. */
4249
4250 static tree
4251 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
4252 tree mask, bool invert,
4253 tree count1, tree count2,
4254 gfc_code *cnext)
4255 {
4256 gfc_se lse;
4257 gfc_se rse;
4258 gfc_ss *lss;
4259 gfc_ss *lss_section;
4260 gfc_ss *rss;
4261
4262 gfc_loopinfo loop;
4263 tree tmp;
4264 stmtblock_t block;
4265 stmtblock_t body;
4266 tree index, maskexpr;
4267
4268 /* A defined assignment. */
4269 if (cnext && cnext->resolved_sym)
4270 return gfc_trans_call (cnext, true, mask, count1, invert);
4271
4272 #if 0
4273 /* TODO: handle this special case.
4274 Special case a single function returning an array. */
4275 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
4276 {
4277 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
4278 if (tmp)
4279 return tmp;
4280 }
4281 #endif
4282
4283 /* Assignment of the form lhs = rhs. */
4284 gfc_start_block (&block);
4285
4286 gfc_init_se (&lse, NULL);
4287 gfc_init_se (&rse, NULL);
4288
4289 /* Walk the lhs. */
4290 lss = gfc_walk_expr (expr1);
4291 rss = NULL;
4292
4293 /* In each where-assign-stmt, the mask-expr and the variable being
4294 defined shall be arrays of the same shape. */
4295 gcc_assert (lss != gfc_ss_terminator);
4296
4297 /* The assignment needs scalarization. */
4298 lss_section = lss;
4299
4300 /* Find a non-scalar SS from the lhs. */
4301 while (lss_section != gfc_ss_terminator
4302 && lss_section->info->type != GFC_SS_SECTION)
4303 lss_section = lss_section->next;
4304
4305 gcc_assert (lss_section != gfc_ss_terminator);
4306
4307 /* Initialize the scalarizer. */
4308 gfc_init_loopinfo (&loop);
4309
4310 /* Walk the rhs. */
4311 rss = gfc_walk_expr (expr2);
4312 if (rss == gfc_ss_terminator)
4313 {
4314 /* The rhs is scalar. Add a ss for the expression. */
4315 rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
4316 rss->info->where = 1;
4317 }
4318
4319 /* Associate the SS with the loop. */
4320 gfc_add_ss_to_loop (&loop, lss);
4321 gfc_add_ss_to_loop (&loop, rss);
4322
4323 /* Calculate the bounds of the scalarization. */
4324 gfc_conv_ss_startstride (&loop);
4325
4326 /* Resolve any data dependencies in the statement. */
4327 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
4328
4329 /* Setup the scalarizing loops. */
4330 gfc_conv_loop_setup (&loop, &expr2->where);
4331
4332 /* Setup the gfc_se structures. */
4333 gfc_copy_loopinfo_to_se (&lse, &loop);
4334 gfc_copy_loopinfo_to_se (&rse, &loop);
4335
4336 rse.ss = rss;
4337 gfc_mark_ss_chain_used (rss, 1);
4338 if (loop.temp_ss == NULL)
4339 {
4340 lse.ss = lss;
4341 gfc_mark_ss_chain_used (lss, 1);
4342 }
4343 else
4344 {
4345 lse.ss = loop.temp_ss;
4346 gfc_mark_ss_chain_used (lss, 3);
4347 gfc_mark_ss_chain_used (loop.temp_ss, 3);
4348 }
4349
4350 /* Start the scalarized loop body. */
4351 gfc_start_scalarized_body (&loop, &body);
4352
4353 /* Translate the expression. */
4354 gfc_conv_expr (&rse, expr2);
4355 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
4356 gfc_conv_tmp_array_ref (&lse);
4357 else
4358 gfc_conv_expr (&lse, expr1);
4359
4360 /* Form the mask expression according to the mask. */
4361 index = count1;
4362 maskexpr = gfc_build_array_ref (mask, index, NULL);
4363 if (invert)
4364 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4365 TREE_TYPE (maskexpr), maskexpr);
4366
4367 /* Use the scalar assignment as is. */
4368 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
4369 loop.temp_ss != NULL, false, true);
4370
4371 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
4372
4373 gfc_add_expr_to_block (&body, tmp);
4374
4375 if (lss == gfc_ss_terminator)
4376 {
4377 /* Increment count1. */
4378 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4379 count1, gfc_index_one_node);
4380 gfc_add_modify (&body, count1, tmp);
4381
4382 /* Use the scalar assignment as is. */
4383 gfc_add_block_to_block (&block, &body);
4384 }
4385 else
4386 {
4387 gcc_assert (lse.ss == gfc_ss_terminator
4388 && rse.ss == gfc_ss_terminator);
4389
4390 if (loop.temp_ss != NULL)
4391 {
4392 /* Increment count1 before finish the main body of a scalarized
4393 expression. */
4394 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4395 gfc_array_index_type, count1, gfc_index_one_node);
4396 gfc_add_modify (&body, count1, tmp);
4397 gfc_trans_scalarized_loop_boundary (&loop, &body);
4398
4399 /* We need to copy the temporary to the actual lhs. */
4400 gfc_init_se (&lse, NULL);
4401 gfc_init_se (&rse, NULL);
4402 gfc_copy_loopinfo_to_se (&lse, &loop);
4403 gfc_copy_loopinfo_to_se (&rse, &loop);
4404
4405 rse.ss = loop.temp_ss;
4406 lse.ss = lss;
4407
4408 gfc_conv_tmp_array_ref (&rse);
4409 gfc_conv_expr (&lse, expr1);
4410
4411 gcc_assert (lse.ss == gfc_ss_terminator
4412 && rse.ss == gfc_ss_terminator);
4413
4414 /* Form the mask expression according to the mask tree list. */
4415 index = count2;
4416 maskexpr = gfc_build_array_ref (mask, index, NULL);
4417 if (invert)
4418 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4419 TREE_TYPE (maskexpr), maskexpr);
4420
4421 /* Use the scalar assignment as is. */
4422 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
4423 true);
4424 tmp = build3_v (COND_EXPR, maskexpr, tmp,
4425 build_empty_stmt (input_location));
4426 gfc_add_expr_to_block (&body, tmp);
4427
4428 /* Increment count2. */
4429 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4430 gfc_array_index_type, count2,
4431 gfc_index_one_node);
4432 gfc_add_modify (&body, count2, tmp);
4433 }
4434 else
4435 {
4436 /* Increment count1. */
4437 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4438 gfc_array_index_type, count1,
4439 gfc_index_one_node);
4440 gfc_add_modify (&body, count1, tmp);
4441 }
4442
4443 /* Generate the copying loops. */
4444 gfc_trans_scalarizing_loops (&loop, &body);
4445
4446 /* Wrap the whole thing up. */
4447 gfc_add_block_to_block (&block, &loop.pre);
4448 gfc_add_block_to_block (&block, &loop.post);
4449 gfc_cleanup_loop (&loop);
4450 }
4451
4452 return gfc_finish_block (&block);
4453 }
4454
4455
4456 /* Translate the WHERE construct or statement.
4457 This function can be called iteratively to translate the nested WHERE
4458 construct or statement.
4459 MASK is the control mask. */
4460
4461 static void
4462 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
4463 forall_info * nested_forall_info, stmtblock_t * block)
4464 {
4465 stmtblock_t inner_size_body;
4466 tree inner_size, size;
4467 gfc_ss *lss, *rss;
4468 tree mask_type;
4469 gfc_expr *expr1;
4470 gfc_expr *expr2;
4471 gfc_code *cblock;
4472 gfc_code *cnext;
4473 tree tmp;
4474 tree cond;
4475 tree count1, count2;
4476 bool need_cmask;
4477 bool need_pmask;
4478 int need_temp;
4479 tree pcmask = NULL_TREE;
4480 tree ppmask = NULL_TREE;
4481 tree cmask = NULL_TREE;
4482 tree pmask = NULL_TREE;
4483 gfc_actual_arglist *arg;
4484
4485 /* the WHERE statement or the WHERE construct statement. */
4486 cblock = code->block;
4487
4488 /* As the mask array can be very big, prefer compact boolean types. */
4489 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
4490
4491 /* Determine which temporary masks are needed. */
4492 if (!cblock->block)
4493 {
4494 /* One clause: No ELSEWHEREs. */
4495 need_cmask = (cblock->next != 0);
4496 need_pmask = false;
4497 }
4498 else if (cblock->block->block)
4499 {
4500 /* Three or more clauses: Conditional ELSEWHEREs. */
4501 need_cmask = true;
4502 need_pmask = true;
4503 }
4504 else if (cblock->next)
4505 {
4506 /* Two clauses, the first non-empty. */
4507 need_cmask = true;
4508 need_pmask = (mask != NULL_TREE
4509 && cblock->block->next != 0);
4510 }
4511 else if (!cblock->block->next)
4512 {
4513 /* Two clauses, both empty. */
4514 need_cmask = false;
4515 need_pmask = false;
4516 }
4517 /* Two clauses, the first empty, the second non-empty. */
4518 else if (mask)
4519 {
4520 need_cmask = (cblock->block->expr1 != 0);
4521 need_pmask = true;
4522 }
4523 else
4524 {
4525 need_cmask = true;
4526 need_pmask = false;
4527 }
4528
4529 if (need_cmask || need_pmask)
4530 {
4531 /* Calculate the size of temporary needed by the mask-expr. */
4532 gfc_init_block (&inner_size_body);
4533 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4534 &inner_size_body, &lss, &rss);
4535
4536 gfc_free_ss_chain (lss);
4537 gfc_free_ss_chain (rss);
4538
4539 /* Calculate the total size of temporary needed. */
4540 size = compute_overall_iter_number (nested_forall_info, inner_size,
4541 &inner_size_body, block);
4542
4543 /* Check whether the size is negative. */
4544 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4545 gfc_index_zero_node);
4546 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4547 cond, gfc_index_zero_node, size);
4548 size = gfc_evaluate_now (size, block);
4549
4550 /* Allocate temporary for WHERE mask if needed. */
4551 if (need_cmask)
4552 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4553 &pcmask);
4554
4555 /* Allocate temporary for !mask if needed. */
4556 if (need_pmask)
4557 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4558 &ppmask);
4559 }
4560
4561 while (cblock)
4562 {
4563 /* Each time around this loop, the where clause is conditional
4564 on the value of mask and invert, which are updated at the
4565 bottom of the loop. */
4566
4567 /* Has mask-expr. */
4568 if (cblock->expr1)
4569 {
4570 /* Ensure that the WHERE mask will be evaluated exactly once.
4571 If there are no statements in this WHERE/ELSEWHERE clause,
4572 then we don't need to update the control mask (cmask).
4573 If this is the last clause of the WHERE construct, then
4574 we don't need to update the pending control mask (pmask). */
4575 if (mask)
4576 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4577 mask, invert,
4578 cblock->next ? cmask : NULL_TREE,
4579 cblock->block ? pmask : NULL_TREE,
4580 mask_type, block);
4581 else
4582 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4583 NULL_TREE, false,
4584 (cblock->next || cblock->block)
4585 ? cmask : NULL_TREE,
4586 NULL_TREE, mask_type, block);
4587
4588 invert = false;
4589 }
4590 /* It's a final elsewhere-stmt. No mask-expr is present. */
4591 else
4592 cmask = mask;
4593
4594 /* The body of this where clause are controlled by cmask with
4595 sense specified by invert. */
4596
4597 /* Get the assignment statement of a WHERE statement, or the first
4598 statement in where-body-construct of a WHERE construct. */
4599 cnext = cblock->next;
4600 while (cnext)
4601 {
4602 switch (cnext->op)
4603 {
4604 /* WHERE assignment statement. */
4605 case EXEC_ASSIGN_CALL:
4606
4607 arg = cnext->ext.actual;
4608 expr1 = expr2 = NULL;
4609 for (; arg; arg = arg->next)
4610 {
4611 if (!arg->expr)
4612 continue;
4613 if (expr1 == NULL)
4614 expr1 = arg->expr;
4615 else
4616 expr2 = arg->expr;
4617 }
4618 goto evaluate;
4619
4620 case EXEC_ASSIGN:
4621 expr1 = cnext->expr1;
4622 expr2 = cnext->expr2;
4623 evaluate:
4624 if (nested_forall_info != NULL)
4625 {
4626 need_temp = gfc_check_dependency (expr1, expr2, 0);
4627 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4628 gfc_trans_assign_need_temp (expr1, expr2,
4629 cmask, invert,
4630 nested_forall_info, block);
4631 else
4632 {
4633 /* Variables to control maskexpr. */
4634 count1 = gfc_create_var (gfc_array_index_type, "count1");
4635 count2 = gfc_create_var (gfc_array_index_type, "count2");
4636 gfc_add_modify (block, count1, gfc_index_zero_node);
4637 gfc_add_modify (block, count2, gfc_index_zero_node);
4638
4639 tmp = gfc_trans_where_assign (expr1, expr2,
4640 cmask, invert,
4641 count1, count2,
4642 cnext);
4643
4644 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4645 tmp, 1);
4646 gfc_add_expr_to_block (block, tmp);
4647 }
4648 }
4649 else
4650 {
4651 /* Variables to control maskexpr. */
4652 count1 = gfc_create_var (gfc_array_index_type, "count1");
4653 count2 = gfc_create_var (gfc_array_index_type, "count2");
4654 gfc_add_modify (block, count1, gfc_index_zero_node);
4655 gfc_add_modify (block, count2, gfc_index_zero_node);
4656
4657 tmp = gfc_trans_where_assign (expr1, expr2,
4658 cmask, invert,
4659 count1, count2,
4660 cnext);
4661 gfc_add_expr_to_block (block, tmp);
4662
4663 }
4664 break;
4665
4666 /* WHERE or WHERE construct is part of a where-body-construct. */
4667 case EXEC_WHERE:
4668 gfc_trans_where_2 (cnext, cmask, invert,
4669 nested_forall_info, block);
4670 break;
4671
4672 default:
4673 gcc_unreachable ();
4674 }
4675
4676 /* The next statement within the same where-body-construct. */
4677 cnext = cnext->next;
4678 }
4679 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4680 cblock = cblock->block;
4681 if (mask == NULL_TREE)
4682 {
4683 /* If we're the initial WHERE, we can simply invert the sense
4684 of the current mask to obtain the "mask" for the remaining
4685 ELSEWHEREs. */
4686 invert = true;
4687 mask = cmask;
4688 }
4689 else
4690 {
4691 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4692 invert = false;
4693 mask = pmask;
4694 }
4695 }
4696
4697 /* If we allocated a pending mask array, deallocate it now. */
4698 if (ppmask)
4699 {
4700 tmp = gfc_call_free (ppmask);
4701 gfc_add_expr_to_block (block, tmp);
4702 }
4703
4704 /* If we allocated a current mask array, deallocate it now. */
4705 if (pcmask)
4706 {
4707 tmp = gfc_call_free (pcmask);
4708 gfc_add_expr_to_block (block, tmp);
4709 }
4710 }
4711
4712 /* Translate a simple WHERE construct or statement without dependencies.
4713 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4714 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4715 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4716
4717 static tree
4718 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4719 {
4720 stmtblock_t block, body;
4721 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4722 tree tmp, cexpr, tstmt, estmt;
4723 gfc_ss *css, *tdss, *tsss;
4724 gfc_se cse, tdse, tsse, edse, esse;
4725 gfc_loopinfo loop;
4726 gfc_ss *edss = 0;
4727 gfc_ss *esss = 0;
4728
4729 /* Allow the scalarizer to workshare simple where loops. */
4730 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4731 ompws_flags |= OMPWS_SCALARIZER_WS;
4732
4733 cond = cblock->expr1;
4734 tdst = cblock->next->expr1;
4735 tsrc = cblock->next->expr2;
4736 edst = eblock ? eblock->next->expr1 : NULL;
4737 esrc = eblock ? eblock->next->expr2 : NULL;
4738
4739 gfc_start_block (&block);
4740 gfc_init_loopinfo (&loop);
4741
4742 /* Handle the condition. */
4743 gfc_init_se (&cse, NULL);
4744 css = gfc_walk_expr (cond);
4745 gfc_add_ss_to_loop (&loop, css);
4746
4747 /* Handle the then-clause. */
4748 gfc_init_se (&tdse, NULL);
4749 gfc_init_se (&tsse, NULL);
4750 tdss = gfc_walk_expr (tdst);
4751 tsss = gfc_walk_expr (tsrc);
4752 if (tsss == gfc_ss_terminator)
4753 {
4754 tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc);
4755 tsss->info->where = 1;
4756 }
4757 gfc_add_ss_to_loop (&loop, tdss);
4758 gfc_add_ss_to_loop (&loop, tsss);
4759
4760 if (eblock)
4761 {
4762 /* Handle the else clause. */
4763 gfc_init_se (&edse, NULL);
4764 gfc_init_se (&esse, NULL);
4765 edss = gfc_walk_expr (edst);
4766 esss = gfc_walk_expr (esrc);
4767 if (esss == gfc_ss_terminator)
4768 {
4769 esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc);
4770 esss->info->where = 1;
4771 }
4772 gfc_add_ss_to_loop (&loop, edss);
4773 gfc_add_ss_to_loop (&loop, esss);
4774 }
4775
4776 gfc_conv_ss_startstride (&loop);
4777 gfc_conv_loop_setup (&loop, &tdst->where);
4778
4779 gfc_mark_ss_chain_used (css, 1);
4780 gfc_mark_ss_chain_used (tdss, 1);
4781 gfc_mark_ss_chain_used (tsss, 1);
4782 if (eblock)
4783 {
4784 gfc_mark_ss_chain_used (edss, 1);
4785 gfc_mark_ss_chain_used (esss, 1);
4786 }
4787
4788 gfc_start_scalarized_body (&loop, &body);
4789
4790 gfc_copy_loopinfo_to_se (&cse, &loop);
4791 gfc_copy_loopinfo_to_se (&tdse, &loop);
4792 gfc_copy_loopinfo_to_se (&tsse, &loop);
4793 cse.ss = css;
4794 tdse.ss = tdss;
4795 tsse.ss = tsss;
4796 if (eblock)
4797 {
4798 gfc_copy_loopinfo_to_se (&edse, &loop);
4799 gfc_copy_loopinfo_to_se (&esse, &loop);
4800 edse.ss = edss;
4801 esse.ss = esss;
4802 }
4803
4804 gfc_conv_expr (&cse, cond);
4805 gfc_add_block_to_block (&body, &cse.pre);
4806 cexpr = cse.expr;
4807
4808 gfc_conv_expr (&tsse, tsrc);
4809 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4810 gfc_conv_tmp_array_ref (&tdse);
4811 else
4812 gfc_conv_expr (&tdse, tdst);
4813
4814 if (eblock)
4815 {
4816 gfc_conv_expr (&esse, esrc);
4817 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4818 gfc_conv_tmp_array_ref (&edse);
4819 else
4820 gfc_conv_expr (&edse, edst);
4821 }
4822
4823 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4824 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4825 false, true)
4826 : build_empty_stmt (input_location);
4827 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4828 gfc_add_expr_to_block (&body, tmp);
4829 gfc_add_block_to_block (&body, &cse.post);
4830
4831 gfc_trans_scalarizing_loops (&loop, &body);
4832 gfc_add_block_to_block (&block, &loop.pre);
4833 gfc_add_block_to_block (&block, &loop.post);
4834 gfc_cleanup_loop (&loop);
4835
4836 return gfc_finish_block (&block);
4837 }
4838
4839 /* As the WHERE or WHERE construct statement can be nested, we call
4840 gfc_trans_where_2 to do the translation, and pass the initial
4841 NULL values for both the control mask and the pending control mask. */
4842
4843 tree
4844 gfc_trans_where (gfc_code * code)
4845 {
4846 stmtblock_t block;
4847 gfc_code *cblock;
4848 gfc_code *eblock;
4849
4850 cblock = code->block;
4851 if (cblock->next
4852 && cblock->next->op == EXEC_ASSIGN
4853 && !cblock->next->next)
4854 {
4855 eblock = cblock->block;
4856 if (!eblock)
4857 {
4858 /* A simple "WHERE (cond) x = y" statement or block is
4859 dependence free if cond is not dependent upon writing x,
4860 and the source y is unaffected by the destination x. */
4861 if (!gfc_check_dependency (cblock->next->expr1,
4862 cblock->expr1, 0)
4863 && !gfc_check_dependency (cblock->next->expr1,
4864 cblock->next->expr2, 0))
4865 return gfc_trans_where_3 (cblock, NULL);
4866 }
4867 else if (!eblock->expr1
4868 && !eblock->block
4869 && eblock->next
4870 && eblock->next->op == EXEC_ASSIGN
4871 && !eblock->next->next)
4872 {
4873 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4874 block is dependence free if cond is not dependent on writes
4875 to x1 and x2, y1 is not dependent on writes to x2, and y2
4876 is not dependent on writes to x1, and both y's are not
4877 dependent upon their own x's. In addition to this, the
4878 final two dependency checks below exclude all but the same
4879 array reference if the where and elswhere destinations
4880 are the same. In short, this is VERY conservative and this
4881 is needed because the two loops, required by the standard
4882 are coalesced in gfc_trans_where_3. */
4883 if (!gfc_check_dependency (cblock->next->expr1,
4884 cblock->expr1, 0)
4885 && !gfc_check_dependency (eblock->next->expr1,
4886 cblock->expr1, 0)
4887 && !gfc_check_dependency (cblock->next->expr1,
4888 eblock->next->expr2, 1)
4889 && !gfc_check_dependency (eblock->next->expr1,
4890 cblock->next->expr2, 1)
4891 && !gfc_check_dependency (cblock->next->expr1,
4892 cblock->next->expr2, 1)
4893 && !gfc_check_dependency (eblock->next->expr1,
4894 eblock->next->expr2, 1)
4895 && !gfc_check_dependency (cblock->next->expr1,
4896 eblock->next->expr1, 0)
4897 && !gfc_check_dependency (eblock->next->expr1,
4898 cblock->next->expr1, 0))
4899 return gfc_trans_where_3 (cblock, eblock);
4900 }
4901 }
4902
4903 gfc_start_block (&block);
4904
4905 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4906
4907 return gfc_finish_block (&block);
4908 }
4909
4910
4911 /* CYCLE a DO loop. The label decl has already been created by
4912 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4913 node at the head of the loop. We must mark the label as used. */
4914
4915 tree
4916 gfc_trans_cycle (gfc_code * code)
4917 {
4918 tree cycle_label;
4919
4920 cycle_label = code->ext.which_construct->cycle_label;
4921 gcc_assert (cycle_label);
4922
4923 TREE_USED (cycle_label) = 1;
4924 return build1_v (GOTO_EXPR, cycle_label);
4925 }
4926
4927
4928 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4929 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4930 loop. */
4931
4932 tree
4933 gfc_trans_exit (gfc_code * code)
4934 {
4935 tree exit_label;
4936
4937 exit_label = code->ext.which_construct->exit_label;
4938 gcc_assert (exit_label);
4939
4940 TREE_USED (exit_label) = 1;
4941 return build1_v (GOTO_EXPR, exit_label);
4942 }
4943
4944
4945 /* Translate the ALLOCATE statement. */
4946
4947 tree
4948 gfc_trans_allocate (gfc_code * code)
4949 {
4950 gfc_alloc *al;
4951 gfc_expr *e;
4952 gfc_expr *expr;
4953 gfc_se se;
4954 tree tmp;
4955 tree parm;
4956 tree stat;
4957 tree errmsg;
4958 tree errlen;
4959 tree label_errmsg;
4960 tree label_finish;
4961 tree memsz;
4962 tree expr3;
4963 tree slen3;
4964 stmtblock_t block;
4965 stmtblock_t post;
4966 gfc_expr *sz;
4967 gfc_se se_sz;
4968 tree class_expr;
4969 tree nelems;
4970 tree memsize = NULL_TREE;
4971 tree classexpr = NULL_TREE;
4972
4973 if (!code->ext.alloc.list)
4974 return NULL_TREE;
4975
4976 stat = tmp = memsz = NULL_TREE;
4977 label_errmsg = label_finish = errmsg = errlen = NULL_TREE;
4978
4979 gfc_init_block (&block);
4980 gfc_init_block (&post);
4981
4982 /* STAT= (and maybe ERRMSG=) is present. */
4983 if (code->expr1)
4984 {
4985 /* STAT=. */
4986 tree gfc_int4_type_node = gfc_get_int_type (4);
4987 stat = gfc_create_var (gfc_int4_type_node, "stat");
4988
4989 /* ERRMSG= only makes sense with STAT=. */
4990 if (code->expr2)
4991 {
4992 gfc_init_se (&se, NULL);
4993 se.want_pointer = 1;
4994 gfc_conv_expr_lhs (&se, code->expr2);
4995 errmsg = se.expr;
4996 errlen = se.string_length;
4997 }
4998 else
4999 {
5000 errmsg = null_pointer_node;
5001 errlen = build_int_cst (gfc_charlen_type_node, 0);
5002 }
5003
5004 /* GOTO destinations. */
5005 label_errmsg = gfc_build_label_decl (NULL_TREE);
5006 label_finish = gfc_build_label_decl (NULL_TREE);
5007 TREE_USED (label_finish) = 0;
5008 }
5009
5010 expr3 = NULL_TREE;
5011 slen3 = NULL_TREE;
5012
5013 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5014 {
5015 expr = gfc_copy_expr (al->expr);
5016
5017 if (expr->ts.type == BT_CLASS)
5018 gfc_add_data_component (expr);
5019
5020 gfc_init_se (&se, NULL);
5021
5022 se.want_pointer = 1;
5023 se.descriptor_only = 1;
5024 gfc_conv_expr (&se, expr);
5025
5026 /* Evaluate expr3 just once if not a variable. */
5027 if (al == code->ext.alloc.list
5028 && al->expr->ts.type == BT_CLASS
5029 && code->expr3
5030 && code->expr3->ts.type == BT_CLASS
5031 && code->expr3->expr_type != EXPR_VARIABLE)
5032 {
5033 gfc_init_se (&se_sz, NULL);
5034 gfc_conv_expr_reference (&se_sz, code->expr3);
5035 gfc_conv_class_to_class (&se_sz, code->expr3,
5036 code->expr3->ts, false, true, false, false);
5037 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5038 gfc_add_block_to_block (&se.post, &se_sz.post);
5039 classexpr = build_fold_indirect_ref_loc (input_location,
5040 se_sz.expr);
5041 classexpr = gfc_evaluate_now (classexpr, &se.pre);
5042 memsize = gfc_vtable_size_get (classexpr);
5043 memsize = fold_convert (sizetype, memsize);
5044 }
5045
5046 memsz = memsize;
5047 class_expr = classexpr;
5048
5049 nelems = NULL_TREE;
5050 if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen, label_finish,
5051 memsz, &nelems, code->expr3, &code->ext.alloc.ts))
5052 {
5053 bool unlimited_char;
5054
5055 unlimited_char = UNLIMITED_POLY (al->expr)
5056 && ((code->expr3 && code->expr3->ts.type == BT_CHARACTER)
5057 || (code->ext.alloc.ts.type == BT_CHARACTER
5058 && code->ext.alloc.ts.u.cl
5059 && code->ext.alloc.ts.u.cl->length));
5060
5061 /* A scalar or derived type. */
5062
5063 /* Determine allocate size. */
5064 if (al->expr->ts.type == BT_CLASS
5065 && !unlimited_char
5066 && code->expr3
5067 && memsz == NULL_TREE)
5068 {
5069 if (code->expr3->ts.type == BT_CLASS)
5070 {
5071 sz = gfc_copy_expr (code->expr3);
5072 gfc_add_vptr_component (sz);
5073 gfc_add_size_component (sz);
5074 gfc_init_se (&se_sz, NULL);
5075 gfc_conv_expr (&se_sz, sz);
5076 gfc_free_expr (sz);
5077 memsz = se_sz.expr;
5078 }
5079 else
5080 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
5081 }
5082 else if (((al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
5083 || unlimited_char) && code->expr3)
5084 {
5085 if (!code->expr3->ts.u.cl->backend_decl)
5086 {
5087 /* Convert and use the length expression. */
5088 gfc_init_se (&se_sz, NULL);
5089 if (code->expr3->expr_type == EXPR_VARIABLE
5090 || code->expr3->expr_type == EXPR_CONSTANT)
5091 {
5092 gfc_conv_expr (&se_sz, code->expr3);
5093 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5094 se_sz.string_length
5095 = gfc_evaluate_now (se_sz.string_length, &se.pre);
5096 gfc_add_block_to_block (&se.pre, &se_sz.post);
5097 memsz = se_sz.string_length;
5098 }
5099 else if (code->expr3->mold
5100 && code->expr3->ts.u.cl
5101 && code->expr3->ts.u.cl->length)
5102 {
5103 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
5104 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5105 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
5106 gfc_add_block_to_block (&se.pre, &se_sz.post);
5107 memsz = se_sz.expr;
5108 }
5109 else
5110 {
5111 /* This is would be inefficient and possibly could
5112 generate wrong code if the result were not stored
5113 in expr3/slen3. */
5114 if (slen3 == NULL_TREE)
5115 {
5116 gfc_conv_expr (&se_sz, code->expr3);
5117 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5118 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
5119 gfc_add_block_to_block (&post, &se_sz.post);
5120 slen3 = gfc_evaluate_now (se_sz.string_length,
5121 &se.pre);
5122 }
5123 memsz = slen3;
5124 }
5125 }
5126 else
5127 /* Otherwise use the stored string length. */
5128 memsz = code->expr3->ts.u.cl->backend_decl;
5129 tmp = al->expr->ts.u.cl->backend_decl;
5130
5131 /* Store the string length. */
5132 if (tmp && TREE_CODE (tmp) == VAR_DECL)
5133 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
5134 memsz));
5135 else if (al->expr->ts.type == BT_CHARACTER
5136 && al->expr->ts.deferred && se.string_length)
5137 gfc_add_modify (&se.pre, se.string_length,
5138 fold_convert (TREE_TYPE (se.string_length),
5139 memsz));
5140 else if ((al->expr->ts.type == BT_DERIVED
5141 || al->expr->ts.type == BT_CLASS)
5142 && expr->ts.u.derived->attr.unlimited_polymorphic)
5143 {
5144 tmp = gfc_class_len_get (al->expr->symtree->n.sym->backend_decl);
5145 gfc_add_modify (&se.pre, tmp,
5146 fold_convert (TREE_TYPE (tmp),
5147 memsz));
5148 }
5149
5150 /* Convert to size in bytes, using the character KIND. */
5151 if (unlimited_char)
5152 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->expr3->ts));
5153 else
5154 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
5155 tmp = TYPE_SIZE_UNIT (tmp);
5156 memsz = fold_build2_loc (input_location, MULT_EXPR,
5157 TREE_TYPE (tmp), tmp,
5158 fold_convert (TREE_TYPE (tmp), memsz));
5159 }
5160 else if ((al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
5161 || unlimited_char)
5162 {
5163 gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length);
5164 gfc_init_se (&se_sz, NULL);
5165 gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length);
5166 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5167 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
5168 gfc_add_block_to_block (&se.pre, &se_sz.post);
5169 /* Store the string length. */
5170 tmp = al->expr->ts.u.cl->backend_decl;
5171 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
5172 se_sz.expr));
5173 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
5174 tmp = TYPE_SIZE_UNIT (tmp);
5175 memsz = fold_build2_loc (input_location, MULT_EXPR,
5176 TREE_TYPE (tmp), tmp,
5177 fold_convert (TREE_TYPE (se_sz.expr),
5178 se_sz.expr));
5179 }
5180 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
5181 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
5182 else if (memsz == NULL_TREE)
5183 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
5184
5185 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
5186 {
5187 memsz = se.string_length;
5188
5189 /* Convert to size in bytes, using the character KIND. */
5190 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
5191 tmp = TYPE_SIZE_UNIT (tmp);
5192 memsz = fold_build2_loc (input_location, MULT_EXPR,
5193 TREE_TYPE (tmp), tmp,
5194 fold_convert (TREE_TYPE (tmp), memsz));
5195 }
5196
5197 /* Allocate - for non-pointers with re-alloc checking. */
5198 if (gfc_expr_attr (expr).allocatable)
5199 gfc_allocate_allocatable (&se.pre, se.expr, memsz, NULL_TREE,
5200 stat, errmsg, errlen, label_finish, expr);
5201 else
5202 gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat);
5203
5204 if (al->expr->ts.type == BT_DERIVED
5205 && expr->ts.u.derived->attr.alloc_comp)
5206 {
5207 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
5208 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
5209 gfc_add_expr_to_block (&se.pre, tmp);
5210 }
5211 }
5212
5213 gfc_add_block_to_block (&block, &se.pre);
5214
5215 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5216 if (code->expr1)
5217 {
5218 tmp = build1_v (GOTO_EXPR, label_errmsg);
5219 parm = fold_build2_loc (input_location, NE_EXPR,
5220 boolean_type_node, stat,
5221 build_int_cst (TREE_TYPE (stat), 0));
5222 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5223 gfc_unlikely (parm, PRED_FORTRAN_FAIL_ALLOC),
5224 tmp, build_empty_stmt (input_location));
5225 gfc_add_expr_to_block (&block, tmp);
5226 }
5227
5228 /* We need the vptr of CLASS objects to be initialized. */
5229 e = gfc_copy_expr (al->expr);
5230 if (e->ts.type == BT_CLASS)
5231 {
5232 gfc_expr *lhs, *rhs;
5233 gfc_se lse;
5234 gfc_ref *ref, *class_ref, *tail;
5235
5236 /* Find the last class reference. */
5237 class_ref = NULL;
5238 for (ref = e->ref; ref; ref = ref->next)
5239 {
5240 if (ref->type == REF_COMPONENT
5241 && ref->u.c.component->ts.type == BT_CLASS)
5242 class_ref = ref;
5243
5244 if (ref->next == NULL)
5245 break;
5246 }
5247
5248 /* Remove and store all subsequent references after the
5249 CLASS reference. */
5250 if (class_ref)
5251 {
5252 tail = class_ref->next;
5253 class_ref->next = NULL;
5254 }
5255 else
5256 {
5257 tail = e->ref;
5258 e->ref = NULL;
5259 }
5260
5261 lhs = gfc_expr_to_initialize (e);
5262 gfc_add_vptr_component (lhs);
5263
5264 /* Remove the _vptr component and restore the original tail
5265 references. */
5266 if (class_ref)
5267 {
5268 gfc_free_ref_list (class_ref->next);
5269 class_ref->next = tail;
5270 }
5271 else
5272 {
5273 gfc_free_ref_list (e->ref);
5274 e->ref = tail;
5275 }
5276
5277 if (class_expr != NULL_TREE)
5278 {
5279 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5280 gfc_init_se (&lse, NULL);
5281 lse.want_pointer = 1;
5282 gfc_conv_expr (&lse, lhs);
5283 tmp = gfc_class_vptr_get (class_expr);
5284 gfc_add_modify (&block, lse.expr,
5285 fold_convert (TREE_TYPE (lse.expr), tmp));
5286 }
5287 else if (code->expr3 && code->expr3->ts.type == BT_CLASS)
5288 {
5289 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5290 rhs = gfc_copy_expr (code->expr3);
5291 gfc_add_vptr_component (rhs);
5292 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5293 gfc_add_expr_to_block (&block, tmp);
5294 gfc_free_expr (rhs);
5295 rhs = gfc_expr_to_initialize (e);
5296 }
5297 else
5298 {
5299 /* VPTR is fixed at compile time. */
5300 gfc_symbol *vtab;
5301 gfc_typespec *ts;
5302 if (code->expr3)
5303 ts = &code->expr3->ts;
5304 else if (e->ts.type == BT_DERIVED)
5305 ts = &e->ts;
5306 else if (code->ext.alloc.ts.type == BT_DERIVED || UNLIMITED_POLY (al->expr))
5307 ts = &code->ext.alloc.ts;
5308 else if (e->ts.type == BT_CLASS)
5309 ts = &CLASS_DATA (e)->ts;
5310 else
5311 ts = &e->ts;
5312
5313 if (ts->type == BT_DERIVED || UNLIMITED_POLY (e))
5314 {
5315 vtab = gfc_find_vtab (ts);
5316 gcc_assert (vtab);
5317 gfc_init_se (&lse, NULL);
5318 lse.want_pointer = 1;
5319 gfc_conv_expr (&lse, lhs);
5320 tmp = gfc_build_addr_expr (NULL_TREE,
5321 gfc_get_symbol_decl (vtab));
5322 gfc_add_modify (&block, lse.expr,
5323 fold_convert (TREE_TYPE (lse.expr), tmp));
5324 }
5325 }
5326 gfc_free_expr (lhs);
5327 }
5328
5329 gfc_free_expr (e);
5330
5331 if (code->expr3 && !code->expr3->mold)
5332 {
5333 /* Initialization via SOURCE block
5334 (or static default initializer). */
5335 gfc_expr *rhs = gfc_copy_expr (code->expr3);
5336 if (class_expr != NULL_TREE)
5337 {
5338 tree to;
5339 to = TREE_OPERAND (se.expr, 0);
5340
5341 tmp = gfc_copy_class_to_class (class_expr, to, nelems);
5342 }
5343 else if (al->expr->ts.type == BT_CLASS)
5344 {
5345 gfc_actual_arglist *actual;
5346 gfc_expr *ppc;
5347 gfc_code *ppc_code;
5348 gfc_ref *ref, *dataref;
5349
5350 /* Do a polymorphic deep copy. */
5351 actual = gfc_get_actual_arglist ();
5352 actual->expr = gfc_copy_expr (rhs);
5353 if (rhs->ts.type == BT_CLASS)
5354 gfc_add_data_component (actual->expr);
5355 actual->next = gfc_get_actual_arglist ();
5356 actual->next->expr = gfc_copy_expr (al->expr);
5357 actual->next->expr->ts.type = BT_CLASS;
5358 gfc_add_data_component (actual->next->expr);
5359
5360 dataref = NULL;
5361 /* Make sure we go up through the reference chain to
5362 the _data reference, where the arrayspec is found. */
5363 for (ref = actual->next->expr->ref; ref; ref = ref->next)
5364 if (ref->type == REF_COMPONENT
5365 && strcmp (ref->u.c.component->name, "_data") == 0)
5366 dataref = ref;
5367
5368 if (dataref && dataref->u.c.component->as)
5369 {
5370 int dim;
5371 gfc_expr *temp;
5372 gfc_ref *ref = dataref->next;
5373 ref->u.ar.type = AR_SECTION;
5374 /* We have to set up the array reference to give ranges
5375 in all dimensions and ensure that the end and stride
5376 are set so that the copy can be scalarized. */
5377 dim = 0;
5378 for (; dim < dataref->u.c.component->as->rank; dim++)
5379 {
5380 ref->u.ar.dimen_type[dim] = DIMEN_RANGE;
5381 if (ref->u.ar.end[dim] == NULL)
5382 {
5383 ref->u.ar.end[dim] = ref->u.ar.start[dim];
5384 temp = gfc_get_int_expr (gfc_default_integer_kind,
5385 &al->expr->where, 1);
5386 ref->u.ar.start[dim] = temp;
5387 }
5388 temp = gfc_subtract (gfc_copy_expr (ref->u.ar.end[dim]),
5389 gfc_copy_expr (ref->u.ar.start[dim]));
5390 temp = gfc_add (gfc_get_int_expr (gfc_default_integer_kind,
5391 &al->expr->where, 1),
5392 temp);
5393 }
5394 }
5395 if (rhs->ts.type == BT_CLASS)
5396 {
5397 ppc = gfc_copy_expr (rhs);
5398 gfc_add_vptr_component (ppc);
5399 }
5400 else
5401 ppc = gfc_lval_expr_from_sym (gfc_find_vtab (&rhs->ts));
5402 gfc_add_component_ref (ppc, "_copy");
5403
5404 ppc_code = gfc_get_code (EXEC_CALL);
5405 ppc_code->resolved_sym = ppc->symtree->n.sym;
5406 /* Although '_copy' is set to be elemental in class.c, it is
5407 not staying that way. Find out why, sometime.... */
5408 ppc_code->resolved_sym->attr.elemental = 1;
5409 ppc_code->ext.actual = actual;
5410 ppc_code->expr1 = ppc;
5411 /* Since '_copy' is elemental, the scalarizer will take care
5412 of arrays in gfc_trans_call. */
5413 tmp = gfc_trans_call (ppc_code, true, NULL, NULL, false);
5414 gfc_free_statements (ppc_code);
5415 }
5416 else if (expr3 != NULL_TREE)
5417 {
5418 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
5419 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
5420 slen3, expr3, code->expr3->ts.kind);
5421 tmp = NULL_TREE;
5422 }
5423 else
5424 {
5425 /* Switch off automatic reallocation since we have just done
5426 the ALLOCATE. */
5427 int realloc_lhs = flag_realloc_lhs;
5428 flag_realloc_lhs = 0;
5429 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
5430 rhs, false, false);
5431 flag_realloc_lhs = realloc_lhs;
5432 }
5433 gfc_free_expr (rhs);
5434 gfc_add_expr_to_block (&block, tmp);
5435 }
5436 else if (code->expr3 && code->expr3->mold
5437 && code->expr3->ts.type == BT_CLASS)
5438 {
5439 /* Since the _vptr has already been assigned to the allocate
5440 object, we can use gfc_copy_class_to_class in its
5441 initialization mode. */
5442 tmp = TREE_OPERAND (se.expr, 0);
5443 tmp = gfc_copy_class_to_class (NULL_TREE, tmp, nelems);
5444 gfc_add_expr_to_block (&block, tmp);
5445 }
5446
5447 gfc_free_expr (expr);
5448 }
5449
5450 /* STAT. */
5451 if (code->expr1)
5452 {
5453 tmp = build1_v (LABEL_EXPR, label_errmsg);
5454 gfc_add_expr_to_block (&block, tmp);
5455 }
5456
5457 /* ERRMSG - only useful if STAT is present. */
5458 if (code->expr1 && code->expr2)
5459 {
5460 const char *msg = "Attempt to allocate an allocated object";
5461 tree slen, dlen, errmsg_str;
5462 stmtblock_t errmsg_block;
5463
5464 gfc_init_block (&errmsg_block);
5465
5466 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5467 gfc_add_modify (&errmsg_block, errmsg_str,
5468 gfc_build_addr_expr (pchar_type_node,
5469 gfc_build_localized_cstring_const (msg)));
5470
5471 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5472 dlen = gfc_get_expr_charlen (code->expr2);
5473 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5474 slen);
5475
5476 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5477 slen, errmsg_str, gfc_default_character_kind);
5478 dlen = gfc_finish_block (&errmsg_block);
5479
5480 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5481 build_int_cst (TREE_TYPE (stat), 0));
5482
5483 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5484
5485 gfc_add_expr_to_block (&block, tmp);
5486 }
5487
5488 /* STAT block. */
5489 if (code->expr1)
5490 {
5491 if (TREE_USED (label_finish))
5492 {
5493 tmp = build1_v (LABEL_EXPR, label_finish);
5494 gfc_add_expr_to_block (&block, tmp);
5495 }
5496
5497 gfc_init_se (&se, NULL);
5498 gfc_conv_expr_lhs (&se, code->expr1);
5499 tmp = convert (TREE_TYPE (se.expr), stat);
5500 gfc_add_modify (&block, se.expr, tmp);
5501 }
5502
5503 gfc_add_block_to_block (&block, &se.post);
5504 gfc_add_block_to_block (&block, &post);
5505
5506 return gfc_finish_block (&block);
5507 }
5508
5509
5510 /* Translate a DEALLOCATE statement. */
5511
5512 tree
5513 gfc_trans_deallocate (gfc_code *code)
5514 {
5515 gfc_se se;
5516 gfc_alloc *al;
5517 tree apstat, pstat, stat, errmsg, errlen, tmp;
5518 tree label_finish, label_errmsg;
5519 stmtblock_t block;
5520
5521 pstat = apstat = stat = errmsg = errlen = tmp = NULL_TREE;
5522 label_finish = label_errmsg = NULL_TREE;
5523
5524 gfc_start_block (&block);
5525
5526 /* Count the number of failed deallocations. If deallocate() was
5527 called with STAT= , then set STAT to the count. If deallocate
5528 was called with ERRMSG, then set ERRMG to a string. */
5529 if (code->expr1)
5530 {
5531 tree gfc_int4_type_node = gfc_get_int_type (4);
5532
5533 stat = gfc_create_var (gfc_int4_type_node, "stat");
5534 pstat = gfc_build_addr_expr (NULL_TREE, stat);
5535
5536 /* GOTO destinations. */
5537 label_errmsg = gfc_build_label_decl (NULL_TREE);
5538 label_finish = gfc_build_label_decl (NULL_TREE);
5539 TREE_USED (label_finish) = 0;
5540 }
5541
5542 /* Set ERRMSG - only needed if STAT is available. */
5543 if (code->expr1 && code->expr2)
5544 {
5545 gfc_init_se (&se, NULL);
5546 se.want_pointer = 1;
5547 gfc_conv_expr_lhs (&se, code->expr2);
5548 errmsg = se.expr;
5549 errlen = se.string_length;
5550 }
5551
5552 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5553 {
5554 gfc_expr *expr = gfc_copy_expr (al->expr);
5555 gcc_assert (expr->expr_type == EXPR_VARIABLE);
5556
5557 if (expr->ts.type == BT_CLASS)
5558 gfc_add_data_component (expr);
5559
5560 gfc_init_se (&se, NULL);
5561 gfc_start_block (&se.pre);
5562
5563 se.want_pointer = 1;
5564 se.descriptor_only = 1;
5565 gfc_conv_expr (&se, expr);
5566
5567 if (expr->rank || gfc_is_coarray (expr))
5568 {
5569 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp
5570 && !gfc_is_finalizable (expr->ts.u.derived, NULL))
5571 {
5572 gfc_ref *ref;
5573 gfc_ref *last = NULL;
5574 for (ref = expr->ref; ref; ref = ref->next)
5575 if (ref->type == REF_COMPONENT)
5576 last = ref;
5577
5578 /* Do not deallocate the components of a derived type
5579 ultimate pointer component. */
5580 if (!(last && last->u.c.component->attr.pointer)
5581 && !(!last && expr->symtree->n.sym->attr.pointer))
5582 {
5583 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
5584 expr->rank);
5585 gfc_add_expr_to_block (&se.pre, tmp);
5586 }
5587 }
5588 tmp = gfc_array_deallocate (se.expr, pstat, errmsg, errlen,
5589 label_finish, expr);
5590 gfc_add_expr_to_block (&se.pre, tmp);
5591 if (al->expr->ts.type == BT_CLASS)
5592 gfc_reset_vptr (&se.pre, al->expr);
5593 }
5594 else
5595 {
5596 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
5597 al->expr, al->expr->ts);
5598 gfc_add_expr_to_block (&se.pre, tmp);
5599
5600 /* Set to zero after deallocation. */
5601 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5602 se.expr,
5603 build_int_cst (TREE_TYPE (se.expr), 0));
5604 gfc_add_expr_to_block (&se.pre, tmp);
5605
5606 if (al->expr->ts.type == BT_CLASS)
5607 gfc_reset_vptr (&se.pre, al->expr);
5608 }
5609
5610 if (code->expr1)
5611 {
5612 tree cond;
5613
5614 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5615 build_int_cst (TREE_TYPE (stat), 0));
5616 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5617 gfc_unlikely (cond, PRED_FORTRAN_FAIL_ALLOC),
5618 build1_v (GOTO_EXPR, label_errmsg),
5619 build_empty_stmt (input_location));
5620 gfc_add_expr_to_block (&se.pre, tmp);
5621 }
5622
5623 tmp = gfc_finish_block (&se.pre);
5624 gfc_add_expr_to_block (&block, tmp);
5625 gfc_free_expr (expr);
5626 }
5627
5628 if (code->expr1)
5629 {
5630 tmp = build1_v (LABEL_EXPR, label_errmsg);
5631 gfc_add_expr_to_block (&block, tmp);
5632 }
5633
5634 /* Set ERRMSG - only needed if STAT is available. */
5635 if (code->expr1 && code->expr2)
5636 {
5637 const char *msg = "Attempt to deallocate an unallocated object";
5638 stmtblock_t errmsg_block;
5639 tree errmsg_str, slen, dlen, cond;
5640
5641 gfc_init_block (&errmsg_block);
5642
5643 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5644 gfc_add_modify (&errmsg_block, errmsg_str,
5645 gfc_build_addr_expr (pchar_type_node,
5646 gfc_build_localized_cstring_const (msg)));
5647 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5648 dlen = gfc_get_expr_charlen (code->expr2);
5649
5650 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5651 slen, errmsg_str, gfc_default_character_kind);
5652 tmp = gfc_finish_block (&errmsg_block);
5653
5654 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5655 build_int_cst (TREE_TYPE (stat), 0));
5656 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5657 gfc_unlikely (cond, PRED_FORTRAN_FAIL_ALLOC), tmp,
5658 build_empty_stmt (input_location));
5659
5660 gfc_add_expr_to_block (&block, tmp);
5661 }
5662
5663 if (code->expr1 && TREE_USED (label_finish))
5664 {
5665 tmp = build1_v (LABEL_EXPR, label_finish);
5666 gfc_add_expr_to_block (&block, tmp);
5667 }
5668
5669 /* Set STAT. */
5670 if (code->expr1)
5671 {
5672 gfc_init_se (&se, NULL);
5673 gfc_conv_expr_lhs (&se, code->expr1);
5674 tmp = convert (TREE_TYPE (se.expr), stat);
5675 gfc_add_modify (&block, se.expr, tmp);
5676 }
5677
5678 return gfc_finish_block (&block);
5679 }
5680
5681 #include "gt-fortran-trans-stmt.h"