genattrtab.c (write_header): Include hash-set.h...
[gcc.git] / gcc / fortran / trans-stmt.c
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "options.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stringpool.h"
39 #include "gfortran.h"
40 #include "flags.h"
41 #include "trans.h"
42 #include "trans-stmt.h"
43 #include "trans-types.h"
44 #include "trans-array.h"
45 #include "trans-const.h"
46 #include "arith.h"
47 #include "dependency.h"
48 #include "ggc.h"
49
50 typedef struct iter_info
51 {
52 tree var;
53 tree start;
54 tree end;
55 tree step;
56 struct iter_info *next;
57 }
58 iter_info;
59
60 typedef struct forall_info
61 {
62 iter_info *this_loop;
63 tree mask;
64 tree maskindex;
65 int nvar;
66 tree size;
67 struct forall_info *prev_nest;
68 bool do_concurrent;
69 }
70 forall_info;
71
72 static void gfc_trans_where_2 (gfc_code *, tree, bool,
73 forall_info *, stmtblock_t *);
74
75 /* Translate a F95 label number to a LABEL_EXPR. */
76
77 tree
78 gfc_trans_label_here (gfc_code * code)
79 {
80 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
81 }
82
83
84 /* Given a variable expression which has been ASSIGNed to, find the decl
85 containing the auxiliary variables. For variables in common blocks this
86 is a field_decl. */
87
88 void
89 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
90 {
91 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
92 gfc_conv_expr (se, expr);
93 /* Deals with variable in common block. Get the field declaration. */
94 if (TREE_CODE (se->expr) == COMPONENT_REF)
95 se->expr = TREE_OPERAND (se->expr, 1);
96 /* Deals with dummy argument. Get the parameter declaration. */
97 else if (TREE_CODE (se->expr) == INDIRECT_REF)
98 se->expr = TREE_OPERAND (se->expr, 0);
99 }
100
101 /* Translate a label assignment statement. */
102
103 tree
104 gfc_trans_label_assign (gfc_code * code)
105 {
106 tree label_tree;
107 gfc_se se;
108 tree len;
109 tree addr;
110 tree len_tree;
111 int label_len;
112
113 /* Start a new block. */
114 gfc_init_se (&se, NULL);
115 gfc_start_block (&se.pre);
116 gfc_conv_label_variable (&se, code->expr1);
117
118 len = GFC_DECL_STRING_LEN (se.expr);
119 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
120
121 label_tree = gfc_get_label_decl (code->label1);
122
123 if (code->label1->defined == ST_LABEL_TARGET
124 || code->label1->defined == ST_LABEL_DO_TARGET)
125 {
126 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 len_tree = integer_minus_one_node;
128 }
129 else
130 {
131 gfc_expr *format = code->label1->format;
132
133 label_len = format->value.character.length;
134 len_tree = build_int_cst (gfc_charlen_type_node, label_len);
135 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
136 format->value.character.string);
137 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
138 }
139
140 gfc_add_modify (&se.pre, len, len_tree);
141 gfc_add_modify (&se.pre, addr, label_tree);
142
143 return gfc_finish_block (&se.pre);
144 }
145
146 /* Translate a GOTO statement. */
147
148 tree
149 gfc_trans_goto (gfc_code * code)
150 {
151 locus loc = code->loc;
152 tree assigned_goto;
153 tree target;
154 tree tmp;
155 gfc_se se;
156
157 if (code->label1 != NULL)
158 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
159
160 /* ASSIGNED GOTO. */
161 gfc_init_se (&se, NULL);
162 gfc_start_block (&se.pre);
163 gfc_conv_label_variable (&se, code->expr1);
164 tmp = GFC_DECL_STRING_LEN (se.expr);
165 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
166 build_int_cst (TREE_TYPE (tmp), -1));
167 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
168 "Assigned label is not a target label");
169
170 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
171
172 /* We're going to ignore a label list. It does not really change the
173 statement's semantics (because it is just a further restriction on
174 what's legal code); before, we were comparing label addresses here, but
175 that's a very fragile business and may break with optimization. So
176 just ignore it. */
177
178 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
179 assigned_goto);
180 gfc_add_expr_to_block (&se.pre, target);
181 return gfc_finish_block (&se.pre);
182 }
183
184
185 /* Translate an ENTRY statement. Just adds a label for this entry point. */
186 tree
187 gfc_trans_entry (gfc_code * code)
188 {
189 return build1_v (LABEL_EXPR, code->ext.entry->label);
190 }
191
192
193 /* Replace a gfc_ss structure by another both in the gfc_se struct
194 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
195 to replace a variable ss by the corresponding temporary. */
196
197 static void
198 replace_ss (gfc_se *se, gfc_ss *old_ss, gfc_ss *new_ss)
199 {
200 gfc_ss **sess, **loopss;
201
202 /* The old_ss is a ss for a single variable. */
203 gcc_assert (old_ss->info->type == GFC_SS_SECTION);
204
205 for (sess = &(se->ss); *sess != gfc_ss_terminator; sess = &((*sess)->next))
206 if (*sess == old_ss)
207 break;
208 gcc_assert (*sess != gfc_ss_terminator);
209
210 *sess = new_ss;
211 new_ss->next = old_ss->next;
212
213
214 for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator;
215 loopss = &((*loopss)->loop_chain))
216 if (*loopss == old_ss)
217 break;
218 gcc_assert (*loopss != gfc_ss_terminator);
219
220 *loopss = new_ss;
221 new_ss->loop_chain = old_ss->loop_chain;
222 new_ss->loop = old_ss->loop;
223
224 gfc_free_ss (old_ss);
225 }
226
227
228 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
229 elemental subroutines. Make temporaries for output arguments if any such
230 dependencies are found. Output arguments are chosen because internal_unpack
231 can be used, as is, to copy the result back to the variable. */
232 static void
233 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
234 gfc_symbol * sym, gfc_actual_arglist * arg,
235 gfc_dep_check check_variable)
236 {
237 gfc_actual_arglist *arg0;
238 gfc_expr *e;
239 gfc_formal_arglist *formal;
240 gfc_se parmse;
241 gfc_ss *ss;
242 gfc_symbol *fsym;
243 tree data;
244 tree size;
245 tree tmp;
246
247 if (loopse->ss == NULL)
248 return;
249
250 ss = loopse->ss;
251 arg0 = arg;
252 formal = gfc_sym_get_dummy_args (sym);
253
254 /* Loop over all the arguments testing for dependencies. */
255 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
256 {
257 e = arg->expr;
258 if (e == NULL)
259 continue;
260
261 /* Obtain the info structure for the current argument. */
262 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
263 if (ss->info->expr == e)
264 break;
265
266 /* If there is a dependency, create a temporary and use it
267 instead of the variable. */
268 fsym = formal ? formal->sym : NULL;
269 if (e->expr_type == EXPR_VARIABLE
270 && e->rank && fsym
271 && fsym->attr.intent != INTENT_IN
272 && gfc_check_fncall_dependency (e, fsym->attr.intent,
273 sym, arg0, check_variable))
274 {
275 tree initial, temptype;
276 stmtblock_t temp_post;
277 gfc_ss *tmp_ss;
278
279 tmp_ss = gfc_get_array_ss (gfc_ss_terminator, NULL, ss->dimen,
280 GFC_SS_SECTION);
281 gfc_mark_ss_chain_used (tmp_ss, 1);
282 tmp_ss->info->expr = ss->info->expr;
283 replace_ss (loopse, ss, tmp_ss);
284
285 /* Obtain the argument descriptor for unpacking. */
286 gfc_init_se (&parmse, NULL);
287 parmse.want_pointer = 1;
288 gfc_conv_expr_descriptor (&parmse, e);
289 gfc_add_block_to_block (&se->pre, &parmse.pre);
290
291 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
292 initialize the array temporary with a copy of the values. */
293 if (fsym->attr.intent == INTENT_INOUT
294 || (fsym->ts.type ==BT_DERIVED
295 && fsym->attr.intent == INTENT_OUT))
296 initial = parmse.expr;
297 /* For class expressions, we always initialize with the copy of
298 the values. */
299 else if (e->ts.type == BT_CLASS)
300 initial = parmse.expr;
301 else
302 initial = NULL_TREE;
303
304 if (e->ts.type != BT_CLASS)
305 {
306 /* Find the type of the temporary to create; we don't use the type
307 of e itself as this breaks for subcomponent-references in e
308 (where the type of e is that of the final reference, but
309 parmse.expr's type corresponds to the full derived-type). */
310 /* TODO: Fix this somehow so we don't need a temporary of the whole
311 array but instead only the components referenced. */
312 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
313 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
314 temptype = TREE_TYPE (temptype);
315 temptype = gfc_get_element_type (temptype);
316 }
317
318 else
319 /* For class arrays signal that the size of the dynamic type has to
320 be obtained from the vtable, using the 'initial' expression. */
321 temptype = NULL_TREE;
322
323 /* Generate the temporary. Cleaning up the temporary should be the
324 very last thing done, so we add the code to a new block and add it
325 to se->post as last instructions. */
326 size = gfc_create_var (gfc_array_index_type, NULL);
327 data = gfc_create_var (pvoid_type_node, NULL);
328 gfc_init_block (&temp_post);
329 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, tmp_ss,
330 temptype, initial, false, true,
331 false, &arg->expr->where);
332 gfc_add_modify (&se->pre, size, tmp);
333 tmp = fold_convert (pvoid_type_node, tmp_ss->info->data.array.data);
334 gfc_add_modify (&se->pre, data, tmp);
335
336 /* Update other ss' delta. */
337 gfc_set_delta (loopse->loop);
338
339 /* Copy the result back using unpack..... */
340 if (e->ts.type != BT_CLASS)
341 tmp = build_call_expr_loc (input_location,
342 gfor_fndecl_in_unpack, 2, parmse.expr, data);
343 else
344 {
345 /* ... except for class results where the copy is
346 unconditional. */
347 tmp = build_fold_indirect_ref_loc (input_location, parmse.expr);
348 tmp = gfc_conv_descriptor_data_get (tmp);
349 tmp = build_call_expr_loc (input_location,
350 builtin_decl_explicit (BUILT_IN_MEMCPY),
351 3, tmp, data,
352 fold_convert (size_type_node, size));
353 }
354 gfc_add_expr_to_block (&se->post, tmp);
355
356 /* parmse.pre is already added above. */
357 gfc_add_block_to_block (&se->post, &parmse.post);
358 gfc_add_block_to_block (&se->post, &temp_post);
359 }
360 }
361 }
362
363
364 /* Get the interface symbol for the procedure corresponding to the given call.
365 We can't get the procedure symbol directly as we have to handle the case
366 of (deferred) type-bound procedures. */
367
368 static gfc_symbol *
369 get_proc_ifc_for_call (gfc_code *c)
370 {
371 gfc_symbol *sym;
372
373 gcc_assert (c->op == EXEC_ASSIGN_CALL || c->op == EXEC_CALL);
374
375 sym = gfc_get_proc_ifc_for_expr (c->expr1);
376
377 /* Fall back/last resort try. */
378 if (sym == NULL)
379 sym = c->resolved_sym;
380
381 return sym;
382 }
383
384
385 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
386
387 tree
388 gfc_trans_call (gfc_code * code, bool dependency_check,
389 tree mask, tree count1, bool invert)
390 {
391 gfc_se se;
392 gfc_ss * ss;
393 int has_alternate_specifier;
394 gfc_dep_check check_variable;
395 tree index = NULL_TREE;
396 tree maskexpr = NULL_TREE;
397 tree tmp;
398
399 /* A CALL starts a new block because the actual arguments may have to
400 be evaluated first. */
401 gfc_init_se (&se, NULL);
402 gfc_start_block (&se.pre);
403
404 gcc_assert (code->resolved_sym);
405
406 ss = gfc_ss_terminator;
407 if (code->resolved_sym->attr.elemental)
408 ss = gfc_walk_elemental_function_args (ss, code->ext.actual,
409 get_proc_ifc_for_call (code),
410 GFC_SS_REFERENCE);
411
412 /* Is not an elemental subroutine call with array valued arguments. */
413 if (ss == gfc_ss_terminator)
414 {
415
416 /* Translate the call. */
417 has_alternate_specifier
418 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
419 code->expr1, NULL);
420
421 /* A subroutine without side-effect, by definition, does nothing! */
422 TREE_SIDE_EFFECTS (se.expr) = 1;
423
424 /* Chain the pieces together and return the block. */
425 if (has_alternate_specifier)
426 {
427 gfc_code *select_code;
428 gfc_symbol *sym;
429 select_code = code->next;
430 gcc_assert(select_code->op == EXEC_SELECT);
431 sym = select_code->expr1->symtree->n.sym;
432 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
433 if (sym->backend_decl == NULL)
434 sym->backend_decl = gfc_get_symbol_decl (sym);
435 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
436 }
437 else
438 gfc_add_expr_to_block (&se.pre, se.expr);
439
440 gfc_add_block_to_block (&se.pre, &se.post);
441 }
442
443 else
444 {
445 /* An elemental subroutine call with array valued arguments has
446 to be scalarized. */
447 gfc_loopinfo loop;
448 stmtblock_t body;
449 stmtblock_t block;
450 gfc_se loopse;
451 gfc_se depse;
452
453 /* gfc_walk_elemental_function_args renders the ss chain in the
454 reverse order to the actual argument order. */
455 ss = gfc_reverse_ss (ss);
456
457 /* Initialize the loop. */
458 gfc_init_se (&loopse, NULL);
459 gfc_init_loopinfo (&loop);
460 gfc_add_ss_to_loop (&loop, ss);
461
462 gfc_conv_ss_startstride (&loop);
463 /* TODO: gfc_conv_loop_setup generates a temporary for vector
464 subscripts. This could be prevented in the elemental case
465 as temporaries are handled separatedly
466 (below in gfc_conv_elemental_dependencies). */
467 gfc_conv_loop_setup (&loop, &code->expr1->where);
468 gfc_mark_ss_chain_used (ss, 1);
469
470 /* Convert the arguments, checking for dependencies. */
471 gfc_copy_loopinfo_to_se (&loopse, &loop);
472 loopse.ss = ss;
473
474 /* For operator assignment, do dependency checking. */
475 if (dependency_check)
476 check_variable = ELEM_CHECK_VARIABLE;
477 else
478 check_variable = ELEM_DONT_CHECK_VARIABLE;
479
480 gfc_init_se (&depse, NULL);
481 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
482 code->ext.actual, check_variable);
483
484 gfc_add_block_to_block (&loop.pre, &depse.pre);
485 gfc_add_block_to_block (&loop.post, &depse.post);
486
487 /* Generate the loop body. */
488 gfc_start_scalarized_body (&loop, &body);
489 gfc_init_block (&block);
490
491 if (mask && count1)
492 {
493 /* Form the mask expression according to the mask. */
494 index = count1;
495 maskexpr = gfc_build_array_ref (mask, index, NULL);
496 if (invert)
497 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
498 TREE_TYPE (maskexpr), maskexpr);
499 }
500
501 /* Add the subroutine call to the block. */
502 gfc_conv_procedure_call (&loopse, code->resolved_sym,
503 code->ext.actual, code->expr1,
504 NULL);
505
506 if (mask && count1)
507 {
508 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
509 build_empty_stmt (input_location));
510 gfc_add_expr_to_block (&loopse.pre, tmp);
511 tmp = fold_build2_loc (input_location, PLUS_EXPR,
512 gfc_array_index_type,
513 count1, gfc_index_one_node);
514 gfc_add_modify (&loopse.pre, count1, tmp);
515 }
516 else
517 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
518
519 gfc_add_block_to_block (&block, &loopse.pre);
520 gfc_add_block_to_block (&block, &loopse.post);
521
522 /* Finish up the loop block and the loop. */
523 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
524 gfc_trans_scalarizing_loops (&loop, &body);
525 gfc_add_block_to_block (&se.pre, &loop.pre);
526 gfc_add_block_to_block (&se.pre, &loop.post);
527 gfc_add_block_to_block (&se.pre, &se.post);
528 gfc_cleanup_loop (&loop);
529 }
530
531 return gfc_finish_block (&se.pre);
532 }
533
534
535 /* Translate the RETURN statement. */
536
537 tree
538 gfc_trans_return (gfc_code * code)
539 {
540 if (code->expr1)
541 {
542 gfc_se se;
543 tree tmp;
544 tree result;
545
546 /* If code->expr is not NULL, this return statement must appear
547 in a subroutine and current_fake_result_decl has already
548 been generated. */
549
550 result = gfc_get_fake_result_decl (NULL, 0);
551 if (!result)
552 {
553 gfc_warning ("An alternate return at %L without a * dummy argument",
554 &code->expr1->where);
555 return gfc_generate_return ();
556 }
557
558 /* Start a new block for this statement. */
559 gfc_init_se (&se, NULL);
560 gfc_start_block (&se.pre);
561
562 gfc_conv_expr (&se, code->expr1);
563
564 /* Note that the actually returned expression is a simple value and
565 does not depend on any pointers or such; thus we can clean-up with
566 se.post before returning. */
567 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
568 result, fold_convert (TREE_TYPE (result),
569 se.expr));
570 gfc_add_expr_to_block (&se.pre, tmp);
571 gfc_add_block_to_block (&se.pre, &se.post);
572
573 tmp = gfc_generate_return ();
574 gfc_add_expr_to_block (&se.pre, tmp);
575 return gfc_finish_block (&se.pre);
576 }
577
578 return gfc_generate_return ();
579 }
580
581
582 /* Translate the PAUSE statement. We have to translate this statement
583 to a runtime library call. */
584
585 tree
586 gfc_trans_pause (gfc_code * code)
587 {
588 tree gfc_int4_type_node = gfc_get_int_type (4);
589 gfc_se se;
590 tree tmp;
591
592 /* Start a new block for this statement. */
593 gfc_init_se (&se, NULL);
594 gfc_start_block (&se.pre);
595
596
597 if (code->expr1 == NULL)
598 {
599 tmp = build_int_cst (gfc_int4_type_node, 0);
600 tmp = build_call_expr_loc (input_location,
601 gfor_fndecl_pause_string, 2,
602 build_int_cst (pchar_type_node, 0), tmp);
603 }
604 else if (code->expr1->ts.type == BT_INTEGER)
605 {
606 gfc_conv_expr (&se, code->expr1);
607 tmp = build_call_expr_loc (input_location,
608 gfor_fndecl_pause_numeric, 1,
609 fold_convert (gfc_int4_type_node, se.expr));
610 }
611 else
612 {
613 gfc_conv_expr_reference (&se, code->expr1);
614 tmp = build_call_expr_loc (input_location,
615 gfor_fndecl_pause_string, 2,
616 se.expr, se.string_length);
617 }
618
619 gfc_add_expr_to_block (&se.pre, tmp);
620
621 gfc_add_block_to_block (&se.pre, &se.post);
622
623 return gfc_finish_block (&se.pre);
624 }
625
626
627 /* Translate the STOP statement. We have to translate this statement
628 to a runtime library call. */
629
630 tree
631 gfc_trans_stop (gfc_code *code, bool error_stop)
632 {
633 tree gfc_int4_type_node = gfc_get_int_type (4);
634 gfc_se se;
635 tree tmp;
636
637 /* Start a new block for this statement. */
638 gfc_init_se (&se, NULL);
639 gfc_start_block (&se.pre);
640
641 if (flag_coarray == GFC_FCOARRAY_LIB && !error_stop)
642 {
643 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
644 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
645 tmp = build_call_expr_loc (input_location, tmp, 0);
646 gfc_add_expr_to_block (&se.pre, tmp);
647
648 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0);
649 gfc_add_expr_to_block (&se.pre, tmp);
650 }
651
652 if (code->expr1 == NULL)
653 {
654 tmp = build_int_cst (gfc_int4_type_node, 0);
655 tmp = build_call_expr_loc (input_location,
656 error_stop
657 ? (flag_coarray == GFC_FCOARRAY_LIB
658 ? gfor_fndecl_caf_error_stop_str
659 : gfor_fndecl_error_stop_string)
660 : gfor_fndecl_stop_string,
661 2, build_int_cst (pchar_type_node, 0), tmp);
662 }
663 else if (code->expr1->ts.type == BT_INTEGER)
664 {
665 gfc_conv_expr (&se, code->expr1);
666 tmp = build_call_expr_loc (input_location,
667 error_stop
668 ? (flag_coarray == GFC_FCOARRAY_LIB
669 ? gfor_fndecl_caf_error_stop
670 : gfor_fndecl_error_stop_numeric)
671 : gfor_fndecl_stop_numeric_f08, 1,
672 fold_convert (gfc_int4_type_node, se.expr));
673 }
674 else
675 {
676 gfc_conv_expr_reference (&se, code->expr1);
677 tmp = build_call_expr_loc (input_location,
678 error_stop
679 ? (flag_coarray == GFC_FCOARRAY_LIB
680 ? gfor_fndecl_caf_error_stop_str
681 : gfor_fndecl_error_stop_string)
682 : gfor_fndecl_stop_string,
683 2, se.expr, se.string_length);
684 }
685
686 gfc_add_expr_to_block (&se.pre, tmp);
687
688 gfc_add_block_to_block (&se.pre, &se.post);
689
690 return gfc_finish_block (&se.pre);
691 }
692
693
694 tree
695 gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op type ATTRIBUTE_UNUSED)
696 {
697 gfc_se se, argse;
698 tree stat = NULL_TREE, lock_acquired = NULL_TREE;
699
700 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
701 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
702 if (!code->expr2 && !code->expr4 && flag_coarray != GFC_FCOARRAY_LIB)
703 return NULL_TREE;
704
705 gfc_init_se (&se, NULL);
706 gfc_start_block (&se.pre);
707
708 if (code->expr2)
709 {
710 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
711 gfc_init_se (&argse, NULL);
712 gfc_conv_expr_val (&argse, code->expr2);
713 stat = argse.expr;
714 }
715
716 if (code->expr4)
717 {
718 gcc_assert (code->expr4->expr_type == EXPR_VARIABLE);
719 gfc_init_se (&argse, NULL);
720 gfc_conv_expr_val (&argse, code->expr4);
721 lock_acquired = argse.expr;
722 }
723
724 if (stat != NULL_TREE)
725 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
726
727 if (lock_acquired != NULL_TREE)
728 gfc_add_modify (&se.pre, lock_acquired,
729 fold_convert (TREE_TYPE (lock_acquired),
730 boolean_true_node));
731
732 return gfc_finish_block (&se.pre);
733 }
734
735
736 tree
737 gfc_trans_sync (gfc_code *code, gfc_exec_op type)
738 {
739 gfc_se se, argse;
740 tree tmp;
741 tree images = NULL_TREE, stat = NULL_TREE,
742 errmsg = NULL_TREE, errmsglen = NULL_TREE;
743
744 /* Short cut: For single images without bound checking or without STAT=,
745 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
746 if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
747 && flag_coarray != GFC_FCOARRAY_LIB)
748 return NULL_TREE;
749
750 gfc_init_se (&se, NULL);
751 gfc_start_block (&se.pre);
752
753 if (code->expr1 && code->expr1->rank == 0)
754 {
755 gfc_init_se (&argse, NULL);
756 gfc_conv_expr_val (&argse, code->expr1);
757 images = argse.expr;
758 }
759
760 if (code->expr2)
761 {
762 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
763 gfc_init_se (&argse, NULL);
764 gfc_conv_expr_val (&argse, code->expr2);
765 stat = argse.expr;
766 }
767 else
768 stat = null_pointer_node;
769
770 if (code->expr3 && flag_coarray == GFC_FCOARRAY_LIB
771 && type != EXEC_SYNC_MEMORY)
772 {
773 gcc_assert (code->expr3->expr_type == EXPR_VARIABLE);
774 gfc_init_se (&argse, NULL);
775 gfc_conv_expr (&argse, code->expr3);
776 gfc_conv_string_parameter (&argse);
777 errmsg = gfc_build_addr_expr (NULL, argse.expr);
778 errmsglen = argse.string_length;
779 }
780 else if (flag_coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY)
781 {
782 errmsg = null_pointer_node;
783 errmsglen = build_int_cst (integer_type_node, 0);
784 }
785
786 /* Check SYNC IMAGES(imageset) for valid image index.
787 FIXME: Add a check for image-set arrays. */
788 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
789 && code->expr1->rank == 0)
790 {
791 tree cond;
792 if (flag_coarray != GFC_FCOARRAY_LIB)
793 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
794 images, build_int_cst (TREE_TYPE (images), 1));
795 else
796 {
797 tree cond2;
798 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_num_images,
799 2, integer_zero_node,
800 build_int_cst (integer_type_node, -1));
801 cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
802 images, tmp);
803 cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
804 images,
805 build_int_cst (TREE_TYPE (images), 1));
806 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
807 boolean_type_node, cond, cond2);
808 }
809 gfc_trans_runtime_check (true, false, cond, &se.pre,
810 &code->expr1->where, "Invalid image number "
811 "%d in SYNC IMAGES",
812 fold_convert (integer_type_node, images));
813 }
814
815 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
816 image control statements SYNC IMAGES and SYNC ALL. */
817 if (flag_coarray == GFC_FCOARRAY_LIB)
818 {
819 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
820 tmp = build_call_expr_loc (input_location, tmp, 0);
821 gfc_add_expr_to_block (&se.pre, tmp);
822 }
823
824 if (flag_coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY)
825 {
826 /* Set STAT to zero. */
827 if (code->expr2)
828 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
829 }
830 else if (type == EXEC_SYNC_ALL)
831 {
832 /* SYNC ALL => stat == null_pointer_node
833 SYNC ALL(stat=s) => stat has an integer type
834
835 If "stat" has the wrong integer type, use a temp variable of
836 the right type and later cast the result back into "stat". */
837 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
838 {
839 if (TREE_TYPE (stat) == integer_type_node)
840 stat = gfc_build_addr_expr (NULL, stat);
841
842 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
843 3, stat, errmsg, errmsglen);
844 gfc_add_expr_to_block (&se.pre, tmp);
845 }
846 else
847 {
848 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
849
850 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
851 3, gfc_build_addr_expr (NULL, tmp_stat),
852 errmsg, errmsglen);
853 gfc_add_expr_to_block (&se.pre, tmp);
854
855 gfc_add_modify (&se.pre, stat,
856 fold_convert (TREE_TYPE (stat), tmp_stat));
857 }
858 }
859 else
860 {
861 tree len;
862
863 gcc_assert (type == EXEC_SYNC_IMAGES);
864
865 if (!code->expr1)
866 {
867 len = build_int_cst (integer_type_node, -1);
868 images = null_pointer_node;
869 }
870 else if (code->expr1->rank == 0)
871 {
872 len = build_int_cst (integer_type_node, 1);
873 images = gfc_build_addr_expr (NULL_TREE, images);
874 }
875 else
876 {
877 /* FIXME. */
878 if (code->expr1->ts.kind != gfc_c_int_kind)
879 gfc_fatal_error ("Sorry, only support for integer kind %d "
880 "implemented for image-set at %L",
881 gfc_c_int_kind, &code->expr1->where);
882
883 gfc_conv_array_parameter (&se, code->expr1, true, NULL, NULL, &len);
884 images = se.expr;
885
886 tmp = gfc_typenode_for_spec (&code->expr1->ts);
887 if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp))
888 tmp = gfc_get_element_type (tmp);
889
890 len = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
891 TREE_TYPE (len), len,
892 fold_convert (TREE_TYPE (len),
893 TYPE_SIZE_UNIT (tmp)));
894 len = fold_convert (integer_type_node, len);
895 }
896
897 /* SYNC IMAGES(imgs) => stat == null_pointer_node
898 SYNC IMAGES(imgs,stat=s) => stat has an integer type
899
900 If "stat" has the wrong integer type, use a temp variable of
901 the right type and later cast the result back into "stat". */
902 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
903 {
904 if (TREE_TYPE (stat) == integer_type_node)
905 stat = gfc_build_addr_expr (NULL, stat);
906
907 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
908 5, fold_convert (integer_type_node, len),
909 images, stat, errmsg, errmsglen);
910 gfc_add_expr_to_block (&se.pre, tmp);
911 }
912 else
913 {
914 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
915
916 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
917 5, fold_convert (integer_type_node, len),
918 images, gfc_build_addr_expr (NULL, tmp_stat),
919 errmsg, errmsglen);
920 gfc_add_expr_to_block (&se.pre, tmp);
921
922 gfc_add_modify (&se.pre, stat,
923 fold_convert (TREE_TYPE (stat), tmp_stat));
924 }
925 }
926
927 return gfc_finish_block (&se.pre);
928 }
929
930
931 /* Generate GENERIC for the IF construct. This function also deals with
932 the simple IF statement, because the front end translates the IF
933 statement into an IF construct.
934
935 We translate:
936
937 IF (cond) THEN
938 then_clause
939 ELSEIF (cond2)
940 elseif_clause
941 ELSE
942 else_clause
943 ENDIF
944
945 into:
946
947 pre_cond_s;
948 if (cond_s)
949 {
950 then_clause;
951 }
952 else
953 {
954 pre_cond_s
955 if (cond_s)
956 {
957 elseif_clause
958 }
959 else
960 {
961 else_clause;
962 }
963 }
964
965 where COND_S is the simplified version of the predicate. PRE_COND_S
966 are the pre side-effects produced by the translation of the
967 conditional.
968 We need to build the chain recursively otherwise we run into
969 problems with folding incomplete statements. */
970
971 static tree
972 gfc_trans_if_1 (gfc_code * code)
973 {
974 gfc_se if_se;
975 tree stmt, elsestmt;
976 locus saved_loc;
977 location_t loc;
978
979 /* Check for an unconditional ELSE clause. */
980 if (!code->expr1)
981 return gfc_trans_code (code->next);
982
983 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
984 gfc_init_se (&if_se, NULL);
985 gfc_start_block (&if_se.pre);
986
987 /* Calculate the IF condition expression. */
988 if (code->expr1->where.lb)
989 {
990 gfc_save_backend_locus (&saved_loc);
991 gfc_set_backend_locus (&code->expr1->where);
992 }
993
994 gfc_conv_expr_val (&if_se, code->expr1);
995
996 if (code->expr1->where.lb)
997 gfc_restore_backend_locus (&saved_loc);
998
999 /* Translate the THEN clause. */
1000 stmt = gfc_trans_code (code->next);
1001
1002 /* Translate the ELSE clause. */
1003 if (code->block)
1004 elsestmt = gfc_trans_if_1 (code->block);
1005 else
1006 elsestmt = build_empty_stmt (input_location);
1007
1008 /* Build the condition expression and add it to the condition block. */
1009 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
1010 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
1011 elsestmt);
1012
1013 gfc_add_expr_to_block (&if_se.pre, stmt);
1014
1015 /* Finish off this statement. */
1016 return gfc_finish_block (&if_se.pre);
1017 }
1018
1019 tree
1020 gfc_trans_if (gfc_code * code)
1021 {
1022 stmtblock_t body;
1023 tree exit_label;
1024
1025 /* Create exit label so it is available for trans'ing the body code. */
1026 exit_label = gfc_build_label_decl (NULL_TREE);
1027 code->exit_label = exit_label;
1028
1029 /* Translate the actual code in code->block. */
1030 gfc_init_block (&body);
1031 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
1032
1033 /* Add exit label. */
1034 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1035
1036 return gfc_finish_block (&body);
1037 }
1038
1039
1040 /* Translate an arithmetic IF expression.
1041
1042 IF (cond) label1, label2, label3 translates to
1043
1044 if (cond <= 0)
1045 {
1046 if (cond < 0)
1047 goto label1;
1048 else // cond == 0
1049 goto label2;
1050 }
1051 else // cond > 0
1052 goto label3;
1053
1054 An optimized version can be generated in case of equal labels.
1055 E.g., if label1 is equal to label2, we can translate it to
1056
1057 if (cond <= 0)
1058 goto label1;
1059 else
1060 goto label3;
1061 */
1062
1063 tree
1064 gfc_trans_arithmetic_if (gfc_code * code)
1065 {
1066 gfc_se se;
1067 tree tmp;
1068 tree branch1;
1069 tree branch2;
1070 tree zero;
1071
1072 /* Start a new block. */
1073 gfc_init_se (&se, NULL);
1074 gfc_start_block (&se.pre);
1075
1076 /* Pre-evaluate COND. */
1077 gfc_conv_expr_val (&se, code->expr1);
1078 se.expr = gfc_evaluate_now (se.expr, &se.pre);
1079
1080 /* Build something to compare with. */
1081 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
1082
1083 if (code->label1->value != code->label2->value)
1084 {
1085 /* If (cond < 0) take branch1 else take branch2.
1086 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1087 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1088 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
1089
1090 if (code->label1->value != code->label3->value)
1091 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
1092 se.expr, zero);
1093 else
1094 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1095 se.expr, zero);
1096
1097 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1098 tmp, branch1, branch2);
1099 }
1100 else
1101 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1102
1103 if (code->label1->value != code->label3->value
1104 && code->label2->value != code->label3->value)
1105 {
1106 /* if (cond <= 0) take branch1 else take branch2. */
1107 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
1108 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1109 se.expr, zero);
1110 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1111 tmp, branch1, branch2);
1112 }
1113
1114 /* Append the COND_EXPR to the evaluation of COND, and return. */
1115 gfc_add_expr_to_block (&se.pre, branch1);
1116 return gfc_finish_block (&se.pre);
1117 }
1118
1119
1120 /* Translate a CRITICAL block. */
1121 tree
1122 gfc_trans_critical (gfc_code *code)
1123 {
1124 stmtblock_t block;
1125 tree tmp, token = NULL_TREE;
1126
1127 gfc_start_block (&block);
1128
1129 if (flag_coarray == GFC_FCOARRAY_LIB)
1130 {
1131 token = gfc_get_symbol_decl (code->resolved_sym);
1132 token = GFC_TYPE_ARRAY_CAF_TOKEN (TREE_TYPE (token));
1133 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_lock, 7,
1134 token, integer_zero_node, integer_one_node,
1135 null_pointer_node, null_pointer_node,
1136 null_pointer_node, integer_zero_node);
1137 gfc_add_expr_to_block (&block, tmp);
1138 }
1139
1140 tmp = gfc_trans_code (code->block->next);
1141 gfc_add_expr_to_block (&block, tmp);
1142
1143 if (flag_coarray == GFC_FCOARRAY_LIB)
1144 {
1145 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_unlock, 6,
1146 token, integer_zero_node, integer_one_node,
1147 null_pointer_node, null_pointer_node,
1148 integer_zero_node);
1149 gfc_add_expr_to_block (&block, tmp);
1150 }
1151
1152
1153 return gfc_finish_block (&block);
1154 }
1155
1156
1157 /* Do proper initialization for ASSOCIATE names. */
1158
1159 static void
1160 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
1161 {
1162 gfc_expr *e;
1163 tree tmp;
1164 bool class_target;
1165 bool unlimited;
1166 tree desc;
1167 tree offset;
1168 tree dim;
1169 int n;
1170
1171 gcc_assert (sym->assoc);
1172 e = sym->assoc->target;
1173
1174 class_target = (e->expr_type == EXPR_VARIABLE)
1175 && (gfc_is_class_scalar_expr (e)
1176 || gfc_is_class_array_ref (e, NULL));
1177
1178 unlimited = UNLIMITED_POLY (e);
1179
1180 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1181 to array temporary) for arrays with either unknown shape or if associating
1182 to a variable. */
1183 if (sym->attr.dimension && !class_target
1184 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
1185 {
1186 gfc_se se;
1187 tree desc;
1188 bool cst_array_ctor;
1189
1190 desc = sym->backend_decl;
1191 cst_array_ctor = e->expr_type == EXPR_ARRAY
1192 && gfc_constant_array_constructor_p (e->value.constructor);
1193
1194 /* If association is to an expression, evaluate it and create temporary.
1195 Otherwise, get descriptor of target for pointer assignment. */
1196 gfc_init_se (&se, NULL);
1197 if (sym->assoc->variable || cst_array_ctor)
1198 {
1199 se.direct_byref = 1;
1200 se.use_offset = 1;
1201 se.expr = desc;
1202 }
1203
1204 gfc_conv_expr_descriptor (&se, e);
1205
1206 /* If we didn't already do the pointer assignment, set associate-name
1207 descriptor to the one generated for the temporary. */
1208 if (!sym->assoc->variable && !cst_array_ctor)
1209 {
1210 int dim;
1211
1212 gfc_add_modify (&se.pre, desc, se.expr);
1213
1214 /* The generated descriptor has lower bound zero (as array
1215 temporary), shift bounds so we get lower bounds of 1. */
1216 for (dim = 0; dim < e->rank; ++dim)
1217 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
1218 dim, gfc_index_one_node);
1219 }
1220
1221 /* If this is a subreference array pointer associate name use the
1222 associate variable element size for the value of 'span'. */
1223 if (sym->attr.subref_array_pointer)
1224 {
1225 gcc_assert (e->expr_type == EXPR_VARIABLE);
1226 tmp = e->symtree->n.sym->backend_decl;
1227 tmp = gfc_get_element_type (TREE_TYPE (tmp));
1228 tmp = fold_convert (gfc_array_index_type, size_in_bytes (tmp));
1229 gfc_add_modify (&se.pre, GFC_DECL_SPAN(desc), tmp);
1230 }
1231
1232 /* Done, register stuff as init / cleanup code. */
1233 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1234 gfc_finish_block (&se.post));
1235 }
1236
1237 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1238 arrays to be assigned directly. */
1239 else if (class_target && sym->attr.dimension
1240 && (sym->ts.type == BT_DERIVED || unlimited))
1241 {
1242 gfc_se se;
1243
1244 gfc_init_se (&se, NULL);
1245 se.descriptor_only = 1;
1246 gfc_conv_expr (&se, e);
1247
1248 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr)));
1249 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym->backend_decl)));
1250
1251 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
1252
1253 if (unlimited)
1254 {
1255 /* Recover the dtype, which has been overwritten by the
1256 assignment from an unlimited polymorphic object. */
1257 tmp = gfc_conv_descriptor_dtype (sym->backend_decl);
1258 gfc_add_modify (&se.pre, tmp,
1259 gfc_get_dtype (TREE_TYPE (sym->backend_decl)));
1260 }
1261
1262 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1263 gfc_finish_block (&se.post));
1264 }
1265
1266 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1267 else if (gfc_is_associate_pointer (sym))
1268 {
1269 gfc_se se;
1270
1271 gcc_assert (!sym->attr.dimension);
1272
1273 gfc_init_se (&se, NULL);
1274
1275 /* Class associate-names come this way because they are
1276 unconditionally associate pointers and the symbol is scalar. */
1277 if (sym->ts.type == BT_CLASS && CLASS_DATA (sym)->attr.dimension)
1278 {
1279 /* For a class array we need a descriptor for the selector. */
1280 gfc_conv_expr_descriptor (&se, e);
1281
1282 /* Obtain a temporary class container for the result. */
1283 gfc_conv_class_to_class (&se, e, sym->ts, false, true, false, false);
1284 se.expr = build_fold_indirect_ref_loc (input_location, se.expr);
1285
1286 /* Set the offset. */
1287 desc = gfc_class_data_get (se.expr);
1288 offset = gfc_index_zero_node;
1289 for (n = 0; n < e->rank; n++)
1290 {
1291 dim = gfc_rank_cst[n];
1292 tmp = fold_build2_loc (input_location, MULT_EXPR,
1293 gfc_array_index_type,
1294 gfc_conv_descriptor_stride_get (desc, dim),
1295 gfc_conv_descriptor_lbound_get (desc, dim));
1296 offset = fold_build2_loc (input_location, MINUS_EXPR,
1297 gfc_array_index_type,
1298 offset, tmp);
1299 }
1300 gfc_conv_descriptor_offset_set (&se.pre, desc, offset);
1301 }
1302 else if (sym->ts.type == BT_CLASS && e->ts.type == BT_CLASS
1303 && CLASS_DATA (e)->attr.dimension)
1304 {
1305 /* This is bound to be a class array element. */
1306 gfc_conv_expr_reference (&se, e);
1307 /* Get the _vptr component of the class object. */
1308 tmp = gfc_get_vptr_from_expr (se.expr);
1309 /* Obtain a temporary class container for the result. */
1310 gfc_conv_derived_to_class (&se, e, sym->ts, tmp, false, false);
1311 se.expr = build_fold_indirect_ref_loc (input_location, se.expr);
1312 }
1313 else
1314 gfc_conv_expr (&se, e);
1315
1316 tmp = TREE_TYPE (sym->backend_decl);
1317 tmp = gfc_build_addr_expr (tmp, se.expr);
1318 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
1319
1320 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1321 gfc_finish_block (&se.post));
1322 }
1323
1324 /* Do a simple assignment. This is for scalar expressions, where we
1325 can simply use expression assignment. */
1326 else
1327 {
1328 gfc_expr *lhs;
1329
1330 lhs = gfc_lval_expr_from_sym (sym);
1331 tmp = gfc_trans_assignment (lhs, e, false, true);
1332 gfc_add_init_cleanup (block, tmp, NULL_TREE);
1333 }
1334
1335 /* Set the stringlength from the vtable size. */
1336 if (sym->ts.type == BT_CHARACTER && sym->attr.select_type_temporary)
1337 {
1338 tree charlen;
1339 gfc_se se;
1340 gfc_init_se (&se, NULL);
1341 gcc_assert (UNLIMITED_POLY (e->symtree->n.sym));
1342 tmp = gfc_get_symbol_decl (e->symtree->n.sym);
1343 tmp = gfc_vtable_size_get (tmp);
1344 gfc_get_symbol_decl (sym);
1345 charlen = sym->ts.u.cl->backend_decl;
1346 gfc_add_modify (&se.pre, charlen,
1347 fold_convert (TREE_TYPE (charlen), tmp));
1348 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1349 gfc_finish_block (&se.post));
1350 }
1351 }
1352
1353
1354 /* Translate a BLOCK construct. This is basically what we would do for a
1355 procedure body. */
1356
1357 tree
1358 gfc_trans_block_construct (gfc_code* code)
1359 {
1360 gfc_namespace* ns;
1361 gfc_symbol* sym;
1362 gfc_wrapped_block block;
1363 tree exit_label;
1364 stmtblock_t body;
1365 gfc_association_list *ass;
1366
1367 ns = code->ext.block.ns;
1368 gcc_assert (ns);
1369 sym = ns->proc_name;
1370 gcc_assert (sym);
1371
1372 /* Process local variables. */
1373 gcc_assert (!sym->tlink);
1374 sym->tlink = sym;
1375 gfc_process_block_locals (ns);
1376
1377 /* Generate code including exit-label. */
1378 gfc_init_block (&body);
1379 exit_label = gfc_build_label_decl (NULL_TREE);
1380 code->exit_label = exit_label;
1381 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
1382 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1383
1384 /* Finish everything. */
1385 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
1386 gfc_trans_deferred_vars (sym, &block);
1387 for (ass = code->ext.block.assoc; ass; ass = ass->next)
1388 trans_associate_var (ass->st->n.sym, &block);
1389
1390 return gfc_finish_wrapped_block (&block);
1391 }
1392
1393
1394 /* Translate the simple DO construct. This is where the loop variable has
1395 integer type and step +-1. We can't use this in the general case
1396 because integer overflow and floating point errors could give incorrect
1397 results.
1398 We translate a do loop from:
1399
1400 DO dovar = from, to, step
1401 body
1402 END DO
1403
1404 to:
1405
1406 [Evaluate loop bounds and step]
1407 dovar = from;
1408 if ((step > 0) ? (dovar <= to) : (dovar => to))
1409 {
1410 for (;;)
1411 {
1412 body;
1413 cycle_label:
1414 cond = (dovar == to);
1415 dovar += step;
1416 if (cond) goto end_label;
1417 }
1418 }
1419 end_label:
1420
1421 This helps the optimizers by avoiding the extra induction variable
1422 used in the general case. */
1423
1424 static tree
1425 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1426 tree from, tree to, tree step, tree exit_cond)
1427 {
1428 stmtblock_t body;
1429 tree type;
1430 tree cond;
1431 tree tmp;
1432 tree saved_dovar = NULL;
1433 tree cycle_label;
1434 tree exit_label;
1435 location_t loc;
1436
1437 type = TREE_TYPE (dovar);
1438
1439 loc = code->ext.iterator->start->where.lb->location;
1440
1441 /* Initialize the DO variable: dovar = from. */
1442 gfc_add_modify_loc (loc, pblock, dovar,
1443 fold_convert (TREE_TYPE(dovar), from));
1444
1445 /* Save value for do-tinkering checking. */
1446 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1447 {
1448 saved_dovar = gfc_create_var (type, ".saved_dovar");
1449 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1450 }
1451
1452 /* Cycle and exit statements are implemented with gotos. */
1453 cycle_label = gfc_build_label_decl (NULL_TREE);
1454 exit_label = gfc_build_label_decl (NULL_TREE);
1455
1456 /* Put the labels where they can be found later. See gfc_trans_do(). */
1457 code->cycle_label = cycle_label;
1458 code->exit_label = exit_label;
1459
1460 /* Loop body. */
1461 gfc_start_block (&body);
1462
1463 /* Main loop body. */
1464 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1465 gfc_add_expr_to_block (&body, tmp);
1466
1467 /* Label for cycle statements (if needed). */
1468 if (TREE_USED (cycle_label))
1469 {
1470 tmp = build1_v (LABEL_EXPR, cycle_label);
1471 gfc_add_expr_to_block (&body, tmp);
1472 }
1473
1474 /* Check whether someone has modified the loop variable. */
1475 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1476 {
1477 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1478 dovar, saved_dovar);
1479 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1480 "Loop variable has been modified");
1481 }
1482
1483 /* Exit the loop if there is an I/O result condition or error. */
1484 if (exit_cond)
1485 {
1486 tmp = build1_v (GOTO_EXPR, exit_label);
1487 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1488 exit_cond, tmp,
1489 build_empty_stmt (loc));
1490 gfc_add_expr_to_block (&body, tmp);
1491 }
1492
1493 /* Evaluate the loop condition. */
1494 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1495 to);
1496 cond = gfc_evaluate_now_loc (loc, cond, &body);
1497
1498 /* Increment the loop variable. */
1499 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1500 gfc_add_modify_loc (loc, &body, dovar, tmp);
1501
1502 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1503 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1504
1505 /* The loop exit. */
1506 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1507 TREE_USED (exit_label) = 1;
1508 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1509 cond, tmp, build_empty_stmt (loc));
1510 gfc_add_expr_to_block (&body, tmp);
1511
1512 /* Finish the loop body. */
1513 tmp = gfc_finish_block (&body);
1514 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1515
1516 /* Only execute the loop if the number of iterations is positive. */
1517 if (tree_int_cst_sgn (step) > 0)
1518 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1519 to);
1520 else
1521 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1522 to);
1523 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1524 build_empty_stmt (loc));
1525 gfc_add_expr_to_block (pblock, tmp);
1526
1527 /* Add the exit label. */
1528 tmp = build1_v (LABEL_EXPR, exit_label);
1529 gfc_add_expr_to_block (pblock, tmp);
1530
1531 return gfc_finish_block (pblock);
1532 }
1533
1534 /* Translate the DO construct. This obviously is one of the most
1535 important ones to get right with any compiler, but especially
1536 so for Fortran.
1537
1538 We special case some loop forms as described in gfc_trans_simple_do.
1539 For other cases we implement them with a separate loop count,
1540 as described in the standard.
1541
1542 We translate a do loop from:
1543
1544 DO dovar = from, to, step
1545 body
1546 END DO
1547
1548 to:
1549
1550 [evaluate loop bounds and step]
1551 empty = (step > 0 ? to < from : to > from);
1552 countm1 = (to - from) / step;
1553 dovar = from;
1554 if (empty) goto exit_label;
1555 for (;;)
1556 {
1557 body;
1558 cycle_label:
1559 dovar += step
1560 countm1t = countm1;
1561 countm1--;
1562 if (countm1t == 0) goto exit_label;
1563 }
1564 exit_label:
1565
1566 countm1 is an unsigned integer. It is equal to the loop count minus one,
1567 because the loop count itself can overflow. */
1568
1569 tree
1570 gfc_trans_do (gfc_code * code, tree exit_cond)
1571 {
1572 gfc_se se;
1573 tree dovar;
1574 tree saved_dovar = NULL;
1575 tree from;
1576 tree to;
1577 tree step;
1578 tree countm1;
1579 tree type;
1580 tree utype;
1581 tree cond;
1582 tree cycle_label;
1583 tree exit_label;
1584 tree tmp;
1585 stmtblock_t block;
1586 stmtblock_t body;
1587 location_t loc;
1588
1589 gfc_start_block (&block);
1590
1591 loc = code->ext.iterator->start->where.lb->location;
1592
1593 /* Evaluate all the expressions in the iterator. */
1594 gfc_init_se (&se, NULL);
1595 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1596 gfc_add_block_to_block (&block, &se.pre);
1597 dovar = se.expr;
1598 type = TREE_TYPE (dovar);
1599
1600 gfc_init_se (&se, NULL);
1601 gfc_conv_expr_val (&se, code->ext.iterator->start);
1602 gfc_add_block_to_block (&block, &se.pre);
1603 from = gfc_evaluate_now (se.expr, &block);
1604
1605 gfc_init_se (&se, NULL);
1606 gfc_conv_expr_val (&se, code->ext.iterator->end);
1607 gfc_add_block_to_block (&block, &se.pre);
1608 to = gfc_evaluate_now (se.expr, &block);
1609
1610 gfc_init_se (&se, NULL);
1611 gfc_conv_expr_val (&se, code->ext.iterator->step);
1612 gfc_add_block_to_block (&block, &se.pre);
1613 step = gfc_evaluate_now (se.expr, &block);
1614
1615 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1616 {
1617 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1618 build_zero_cst (type));
1619 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1620 "DO step value is zero");
1621 }
1622
1623 /* Special case simple loops. */
1624 if (TREE_CODE (type) == INTEGER_TYPE
1625 && (integer_onep (step)
1626 || tree_int_cst_equal (step, integer_minus_one_node)))
1627 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1628
1629
1630 if (TREE_CODE (type) == INTEGER_TYPE)
1631 utype = unsigned_type_for (type);
1632 else
1633 utype = unsigned_type_for (gfc_array_index_type);
1634 countm1 = gfc_create_var (utype, "countm1");
1635
1636 /* Cycle and exit statements are implemented with gotos. */
1637 cycle_label = gfc_build_label_decl (NULL_TREE);
1638 exit_label = gfc_build_label_decl (NULL_TREE);
1639 TREE_USED (exit_label) = 1;
1640
1641 /* Put these labels where they can be found later. */
1642 code->cycle_label = cycle_label;
1643 code->exit_label = exit_label;
1644
1645 /* Initialize the DO variable: dovar = from. */
1646 gfc_add_modify (&block, dovar, from);
1647
1648 /* Save value for do-tinkering checking. */
1649 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1650 {
1651 saved_dovar = gfc_create_var (type, ".saved_dovar");
1652 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1653 }
1654
1655 /* Initialize loop count and jump to exit label if the loop is empty.
1656 This code is executed before we enter the loop body. We generate:
1657 if (step > 0)
1658 {
1659 countm1 = (to - from) / step;
1660 if (to < from)
1661 goto exit_label;
1662 }
1663 else
1664 {
1665 countm1 = (from - to) / -step;
1666 if (to > from)
1667 goto exit_label;
1668 }
1669 */
1670
1671 if (TREE_CODE (type) == INTEGER_TYPE)
1672 {
1673 tree pos, neg, tou, fromu, stepu, tmp2;
1674
1675 /* The distance from FROM to TO cannot always be represented in a signed
1676 type, thus use unsigned arithmetic, also to avoid any undefined
1677 overflow issues. */
1678 tou = fold_convert (utype, to);
1679 fromu = fold_convert (utype, from);
1680 stepu = fold_convert (utype, step);
1681
1682 /* For a positive step, when to < from, exit, otherwise compute
1683 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1684 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1685 tmp2 = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype,
1686 fold_build2_loc (loc, MINUS_EXPR, utype,
1687 tou, fromu),
1688 stepu);
1689 pos = build2 (COMPOUND_EXPR, void_type_node,
1690 fold_build2 (MODIFY_EXPR, void_type_node,
1691 countm1, tmp2),
1692 build3_loc (loc, COND_EXPR, void_type_node, tmp,
1693 build1_loc (loc, GOTO_EXPR, void_type_node,
1694 exit_label), NULL_TREE));
1695
1696 /* For a negative step, when to > from, exit, otherwise compute
1697 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1698 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to, from);
1699 tmp2 = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype,
1700 fold_build2_loc (loc, MINUS_EXPR, utype,
1701 fromu, tou),
1702 fold_build1_loc (loc, NEGATE_EXPR, utype, stepu));
1703 neg = build2 (COMPOUND_EXPR, void_type_node,
1704 fold_build2 (MODIFY_EXPR, void_type_node,
1705 countm1, tmp2),
1706 build3_loc (loc, COND_EXPR, void_type_node, tmp,
1707 build1_loc (loc, GOTO_EXPR, void_type_node,
1708 exit_label), NULL_TREE));
1709
1710 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1711 build_int_cst (TREE_TYPE (step), 0));
1712 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp, neg, pos);
1713
1714 gfc_add_expr_to_block (&block, tmp);
1715 }
1716 else
1717 {
1718 tree pos_step;
1719
1720 /* TODO: We could use the same width as the real type.
1721 This would probably cause more problems that it solves
1722 when we implement "long double" types. */
1723
1724 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1725 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1726 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1727 gfc_add_modify (&block, countm1, tmp);
1728
1729 /* We need a special check for empty loops:
1730 empty = (step > 0 ? to < from : to > from); */
1731 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1732 build_zero_cst (type));
1733 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1734 fold_build2_loc (loc, LT_EXPR,
1735 boolean_type_node, to, from),
1736 fold_build2_loc (loc, GT_EXPR,
1737 boolean_type_node, to, from));
1738 /* If the loop is empty, go directly to the exit label. */
1739 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1740 build1_v (GOTO_EXPR, exit_label),
1741 build_empty_stmt (input_location));
1742 gfc_add_expr_to_block (&block, tmp);
1743 }
1744
1745 /* Loop body. */
1746 gfc_start_block (&body);
1747
1748 /* Main loop body. */
1749 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1750 gfc_add_expr_to_block (&body, tmp);
1751
1752 /* Label for cycle statements (if needed). */
1753 if (TREE_USED (cycle_label))
1754 {
1755 tmp = build1_v (LABEL_EXPR, cycle_label);
1756 gfc_add_expr_to_block (&body, tmp);
1757 }
1758
1759 /* Check whether someone has modified the loop variable. */
1760 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1761 {
1762 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1763 saved_dovar);
1764 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1765 "Loop variable has been modified");
1766 }
1767
1768 /* Exit the loop if there is an I/O result condition or error. */
1769 if (exit_cond)
1770 {
1771 tmp = build1_v (GOTO_EXPR, exit_label);
1772 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1773 exit_cond, tmp,
1774 build_empty_stmt (input_location));
1775 gfc_add_expr_to_block (&body, tmp);
1776 }
1777
1778 /* Increment the loop variable. */
1779 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1780 gfc_add_modify_loc (loc, &body, dovar, tmp);
1781
1782 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1783 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1784
1785 /* Initialize countm1t. */
1786 tree countm1t = gfc_create_var (utype, "countm1t");
1787 gfc_add_modify_loc (loc, &body, countm1t, countm1);
1788
1789 /* Decrement the loop count. */
1790 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1791 build_int_cst (utype, 1));
1792 gfc_add_modify_loc (loc, &body, countm1, tmp);
1793
1794 /* End with the loop condition. Loop until countm1t == 0. */
1795 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1t,
1796 build_int_cst (utype, 0));
1797 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1798 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1799 cond, tmp, build_empty_stmt (loc));
1800 gfc_add_expr_to_block (&body, tmp);
1801
1802 /* End of loop body. */
1803 tmp = gfc_finish_block (&body);
1804
1805 /* The for loop itself. */
1806 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1807 gfc_add_expr_to_block (&block, tmp);
1808
1809 /* Add the exit label. */
1810 tmp = build1_v (LABEL_EXPR, exit_label);
1811 gfc_add_expr_to_block (&block, tmp);
1812
1813 return gfc_finish_block (&block);
1814 }
1815
1816
1817 /* Translate the DO WHILE construct.
1818
1819 We translate
1820
1821 DO WHILE (cond)
1822 body
1823 END DO
1824
1825 to:
1826
1827 for ( ; ; )
1828 {
1829 pre_cond;
1830 if (! cond) goto exit_label;
1831 body;
1832 cycle_label:
1833 }
1834 exit_label:
1835
1836 Because the evaluation of the exit condition `cond' may have side
1837 effects, we can't do much for empty loop bodies. The backend optimizers
1838 should be smart enough to eliminate any dead loops. */
1839
1840 tree
1841 gfc_trans_do_while (gfc_code * code)
1842 {
1843 gfc_se cond;
1844 tree tmp;
1845 tree cycle_label;
1846 tree exit_label;
1847 stmtblock_t block;
1848
1849 /* Everything we build here is part of the loop body. */
1850 gfc_start_block (&block);
1851
1852 /* Cycle and exit statements are implemented with gotos. */
1853 cycle_label = gfc_build_label_decl (NULL_TREE);
1854 exit_label = gfc_build_label_decl (NULL_TREE);
1855
1856 /* Put the labels where they can be found later. See gfc_trans_do(). */
1857 code->cycle_label = cycle_label;
1858 code->exit_label = exit_label;
1859
1860 /* Create a GIMPLE version of the exit condition. */
1861 gfc_init_se (&cond, NULL);
1862 gfc_conv_expr_val (&cond, code->expr1);
1863 gfc_add_block_to_block (&block, &cond.pre);
1864 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1865 TRUTH_NOT_EXPR, TREE_TYPE (cond.expr), cond.expr);
1866
1867 /* Build "IF (! cond) GOTO exit_label". */
1868 tmp = build1_v (GOTO_EXPR, exit_label);
1869 TREE_USED (exit_label) = 1;
1870 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1871 void_type_node, cond.expr, tmp,
1872 build_empty_stmt (code->expr1->where.lb->location));
1873 gfc_add_expr_to_block (&block, tmp);
1874
1875 /* The main body of the loop. */
1876 tmp = gfc_trans_code (code->block->next);
1877 gfc_add_expr_to_block (&block, tmp);
1878
1879 /* Label for cycle statements (if needed). */
1880 if (TREE_USED (cycle_label))
1881 {
1882 tmp = build1_v (LABEL_EXPR, cycle_label);
1883 gfc_add_expr_to_block (&block, tmp);
1884 }
1885
1886 /* End of loop body. */
1887 tmp = gfc_finish_block (&block);
1888
1889 gfc_init_block (&block);
1890 /* Build the loop. */
1891 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1892 void_type_node, tmp);
1893 gfc_add_expr_to_block (&block, tmp);
1894
1895 /* Add the exit label. */
1896 tmp = build1_v (LABEL_EXPR, exit_label);
1897 gfc_add_expr_to_block (&block, tmp);
1898
1899 return gfc_finish_block (&block);
1900 }
1901
1902
1903 /* Translate the SELECT CASE construct for INTEGER case expressions,
1904 without killing all potential optimizations. The problem is that
1905 Fortran allows unbounded cases, but the back-end does not, so we
1906 need to intercept those before we enter the equivalent SWITCH_EXPR
1907 we can build.
1908
1909 For example, we translate this,
1910
1911 SELECT CASE (expr)
1912 CASE (:100,101,105:115)
1913 block_1
1914 CASE (190:199,200:)
1915 block_2
1916 CASE (300)
1917 block_3
1918 CASE DEFAULT
1919 block_4
1920 END SELECT
1921
1922 to the GENERIC equivalent,
1923
1924 switch (expr)
1925 {
1926 case (minimum value for typeof(expr) ... 100:
1927 case 101:
1928 case 105 ... 114:
1929 block1:
1930 goto end_label;
1931
1932 case 200 ... (maximum value for typeof(expr):
1933 case 190 ... 199:
1934 block2;
1935 goto end_label;
1936
1937 case 300:
1938 block_3;
1939 goto end_label;
1940
1941 default:
1942 block_4;
1943 goto end_label;
1944 }
1945
1946 end_label: */
1947
1948 static tree
1949 gfc_trans_integer_select (gfc_code * code)
1950 {
1951 gfc_code *c;
1952 gfc_case *cp;
1953 tree end_label;
1954 tree tmp;
1955 gfc_se se;
1956 stmtblock_t block;
1957 stmtblock_t body;
1958
1959 gfc_start_block (&block);
1960
1961 /* Calculate the switch expression. */
1962 gfc_init_se (&se, NULL);
1963 gfc_conv_expr_val (&se, code->expr1);
1964 gfc_add_block_to_block (&block, &se.pre);
1965
1966 end_label = gfc_build_label_decl (NULL_TREE);
1967
1968 gfc_init_block (&body);
1969
1970 for (c = code->block; c; c = c->block)
1971 {
1972 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1973 {
1974 tree low, high;
1975 tree label;
1976
1977 /* Assume it's the default case. */
1978 low = high = NULL_TREE;
1979
1980 if (cp->low)
1981 {
1982 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1983 cp->low->ts.kind);
1984
1985 /* If there's only a lower bound, set the high bound to the
1986 maximum value of the case expression. */
1987 if (!cp->high)
1988 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1989 }
1990
1991 if (cp->high)
1992 {
1993 /* Three cases are possible here:
1994
1995 1) There is no lower bound, e.g. CASE (:N).
1996 2) There is a lower bound .NE. high bound, that is
1997 a case range, e.g. CASE (N:M) where M>N (we make
1998 sure that M>N during type resolution).
1999 3) There is a lower bound, and it has the same value
2000 as the high bound, e.g. CASE (N:N). This is our
2001 internal representation of CASE(N).
2002
2003 In the first and second case, we need to set a value for
2004 high. In the third case, we don't because the GCC middle
2005 end represents a single case value by just letting high be
2006 a NULL_TREE. We can't do that because we need to be able
2007 to represent unbounded cases. */
2008
2009 if (!cp->low
2010 || (cp->low
2011 && mpz_cmp (cp->low->value.integer,
2012 cp->high->value.integer) != 0))
2013 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
2014 cp->high->ts.kind);
2015
2016 /* Unbounded case. */
2017 if (!cp->low)
2018 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
2019 }
2020
2021 /* Build a label. */
2022 label = gfc_build_label_decl (NULL_TREE);
2023
2024 /* Add this case label.
2025 Add parameter 'label', make it match GCC backend. */
2026 tmp = build_case_label (low, high, label);
2027 gfc_add_expr_to_block (&body, tmp);
2028 }
2029
2030 /* Add the statements for this case. */
2031 tmp = gfc_trans_code (c->next);
2032 gfc_add_expr_to_block (&body, tmp);
2033
2034 /* Break to the end of the construct. */
2035 tmp = build1_v (GOTO_EXPR, end_label);
2036 gfc_add_expr_to_block (&body, tmp);
2037 }
2038
2039 tmp = gfc_finish_block (&body);
2040 tmp = fold_build3_loc (input_location, SWITCH_EXPR, NULL_TREE,
2041 se.expr, tmp, NULL_TREE);
2042 gfc_add_expr_to_block (&block, tmp);
2043
2044 tmp = build1_v (LABEL_EXPR, end_label);
2045 gfc_add_expr_to_block (&block, tmp);
2046
2047 return gfc_finish_block (&block);
2048 }
2049
2050
2051 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2052
2053 There are only two cases possible here, even though the standard
2054 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2055 .FALSE., and DEFAULT.
2056
2057 We never generate more than two blocks here. Instead, we always
2058 try to eliminate the DEFAULT case. This way, we can translate this
2059 kind of SELECT construct to a simple
2060
2061 if {} else {};
2062
2063 expression in GENERIC. */
2064
2065 static tree
2066 gfc_trans_logical_select (gfc_code * code)
2067 {
2068 gfc_code *c;
2069 gfc_code *t, *f, *d;
2070 gfc_case *cp;
2071 gfc_se se;
2072 stmtblock_t block;
2073
2074 /* Assume we don't have any cases at all. */
2075 t = f = d = NULL;
2076
2077 /* Now see which ones we actually do have. We can have at most two
2078 cases in a single case list: one for .TRUE. and one for .FALSE.
2079 The default case is always separate. If the cases for .TRUE. and
2080 .FALSE. are in the same case list, the block for that case list
2081 always executed, and we don't generate code a COND_EXPR. */
2082 for (c = code->block; c; c = c->block)
2083 {
2084 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2085 {
2086 if (cp->low)
2087 {
2088 if (cp->low->value.logical == 0) /* .FALSE. */
2089 f = c;
2090 else /* if (cp->value.logical != 0), thus .TRUE. */
2091 t = c;
2092 }
2093 else
2094 d = c;
2095 }
2096 }
2097
2098 /* Start a new block. */
2099 gfc_start_block (&block);
2100
2101 /* Calculate the switch expression. We always need to do this
2102 because it may have side effects. */
2103 gfc_init_se (&se, NULL);
2104 gfc_conv_expr_val (&se, code->expr1);
2105 gfc_add_block_to_block (&block, &se.pre);
2106
2107 if (t == f && t != NULL)
2108 {
2109 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2110 translate the code for these cases, append it to the current
2111 block. */
2112 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
2113 }
2114 else
2115 {
2116 tree true_tree, false_tree, stmt;
2117
2118 true_tree = build_empty_stmt (input_location);
2119 false_tree = build_empty_stmt (input_location);
2120
2121 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2122 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2123 make the missing case the default case. */
2124 if (t != NULL && f != NULL)
2125 d = NULL;
2126 else if (d != NULL)
2127 {
2128 if (t == NULL)
2129 t = d;
2130 else
2131 f = d;
2132 }
2133
2134 /* Translate the code for each of these blocks, and append it to
2135 the current block. */
2136 if (t != NULL)
2137 true_tree = gfc_trans_code (t->next);
2138
2139 if (f != NULL)
2140 false_tree = gfc_trans_code (f->next);
2141
2142 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2143 se.expr, true_tree, false_tree);
2144 gfc_add_expr_to_block (&block, stmt);
2145 }
2146
2147 return gfc_finish_block (&block);
2148 }
2149
2150
2151 /* The jump table types are stored in static variables to avoid
2152 constructing them from scratch every single time. */
2153 static GTY(()) tree select_struct[2];
2154
2155 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2156 Instead of generating compares and jumps, it is far simpler to
2157 generate a data structure describing the cases in order and call a
2158 library subroutine that locates the right case.
2159 This is particularly true because this is the only case where we
2160 might have to dispose of a temporary.
2161 The library subroutine returns a pointer to jump to or NULL if no
2162 branches are to be taken. */
2163
2164 static tree
2165 gfc_trans_character_select (gfc_code *code)
2166 {
2167 tree init, end_label, tmp, type, case_num, label, fndecl;
2168 stmtblock_t block, body;
2169 gfc_case *cp, *d;
2170 gfc_code *c;
2171 gfc_se se, expr1se;
2172 int n, k;
2173 vec<constructor_elt, va_gc> *inits = NULL;
2174
2175 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
2176
2177 /* The jump table types are stored in static variables to avoid
2178 constructing them from scratch every single time. */
2179 static tree ss_string1[2], ss_string1_len[2];
2180 static tree ss_string2[2], ss_string2_len[2];
2181 static tree ss_target[2];
2182
2183 cp = code->block->ext.block.case_list;
2184 while (cp->left != NULL)
2185 cp = cp->left;
2186
2187 /* Generate the body */
2188 gfc_start_block (&block);
2189 gfc_init_se (&expr1se, NULL);
2190 gfc_conv_expr_reference (&expr1se, code->expr1);
2191
2192 gfc_add_block_to_block (&block, &expr1se.pre);
2193
2194 end_label = gfc_build_label_decl (NULL_TREE);
2195
2196 gfc_init_block (&body);
2197
2198 /* Attempt to optimize length 1 selects. */
2199 if (integer_onep (expr1se.string_length))
2200 {
2201 for (d = cp; d; d = d->right)
2202 {
2203 int i;
2204 if (d->low)
2205 {
2206 gcc_assert (d->low->expr_type == EXPR_CONSTANT
2207 && d->low->ts.type == BT_CHARACTER);
2208 if (d->low->value.character.length > 1)
2209 {
2210 for (i = 1; i < d->low->value.character.length; i++)
2211 if (d->low->value.character.string[i] != ' ')
2212 break;
2213 if (i != d->low->value.character.length)
2214 {
2215 if (optimize && d->high && i == 1)
2216 {
2217 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2218 && d->high->ts.type == BT_CHARACTER);
2219 if (d->high->value.character.length > 1
2220 && (d->low->value.character.string[0]
2221 == d->high->value.character.string[0])
2222 && d->high->value.character.string[1] != ' '
2223 && ((d->low->value.character.string[1] < ' ')
2224 == (d->high->value.character.string[1]
2225 < ' ')))
2226 continue;
2227 }
2228 break;
2229 }
2230 }
2231 }
2232 if (d->high)
2233 {
2234 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2235 && d->high->ts.type == BT_CHARACTER);
2236 if (d->high->value.character.length > 1)
2237 {
2238 for (i = 1; i < d->high->value.character.length; i++)
2239 if (d->high->value.character.string[i] != ' ')
2240 break;
2241 if (i != d->high->value.character.length)
2242 break;
2243 }
2244 }
2245 }
2246 if (d == NULL)
2247 {
2248 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
2249
2250 for (c = code->block; c; c = c->block)
2251 {
2252 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2253 {
2254 tree low, high;
2255 tree label;
2256 gfc_char_t r;
2257
2258 /* Assume it's the default case. */
2259 low = high = NULL_TREE;
2260
2261 if (cp->low)
2262 {
2263 /* CASE ('ab') or CASE ('ab':'az') will never match
2264 any length 1 character. */
2265 if (cp->low->value.character.length > 1
2266 && cp->low->value.character.string[1] != ' ')
2267 continue;
2268
2269 if (cp->low->value.character.length > 0)
2270 r = cp->low->value.character.string[0];
2271 else
2272 r = ' ';
2273 low = build_int_cst (ctype, r);
2274
2275 /* If there's only a lower bound, set the high bound
2276 to the maximum value of the case expression. */
2277 if (!cp->high)
2278 high = TYPE_MAX_VALUE (ctype);
2279 }
2280
2281 if (cp->high)
2282 {
2283 if (!cp->low
2284 || (cp->low->value.character.string[0]
2285 != cp->high->value.character.string[0]))
2286 {
2287 if (cp->high->value.character.length > 0)
2288 r = cp->high->value.character.string[0];
2289 else
2290 r = ' ';
2291 high = build_int_cst (ctype, r);
2292 }
2293
2294 /* Unbounded case. */
2295 if (!cp->low)
2296 low = TYPE_MIN_VALUE (ctype);
2297 }
2298
2299 /* Build a label. */
2300 label = gfc_build_label_decl (NULL_TREE);
2301
2302 /* Add this case label.
2303 Add parameter 'label', make it match GCC backend. */
2304 tmp = build_case_label (low, high, label);
2305 gfc_add_expr_to_block (&body, tmp);
2306 }
2307
2308 /* Add the statements for this case. */
2309 tmp = gfc_trans_code (c->next);
2310 gfc_add_expr_to_block (&body, tmp);
2311
2312 /* Break to the end of the construct. */
2313 tmp = build1_v (GOTO_EXPR, end_label);
2314 gfc_add_expr_to_block (&body, tmp);
2315 }
2316
2317 tmp = gfc_string_to_single_character (expr1se.string_length,
2318 expr1se.expr,
2319 code->expr1->ts.kind);
2320 case_num = gfc_create_var (ctype, "case_num");
2321 gfc_add_modify (&block, case_num, tmp);
2322
2323 gfc_add_block_to_block (&block, &expr1se.post);
2324
2325 tmp = gfc_finish_block (&body);
2326 tmp = fold_build3_loc (input_location, SWITCH_EXPR, NULL_TREE,
2327 case_num, tmp, NULL_TREE);
2328 gfc_add_expr_to_block (&block, tmp);
2329
2330 tmp = build1_v (LABEL_EXPR, end_label);
2331 gfc_add_expr_to_block (&block, tmp);
2332
2333 return gfc_finish_block (&block);
2334 }
2335 }
2336
2337 if (code->expr1->ts.kind == 1)
2338 k = 0;
2339 else if (code->expr1->ts.kind == 4)
2340 k = 1;
2341 else
2342 gcc_unreachable ();
2343
2344 if (select_struct[k] == NULL)
2345 {
2346 tree *chain = NULL;
2347 select_struct[k] = make_node (RECORD_TYPE);
2348
2349 if (code->expr1->ts.kind == 1)
2350 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
2351 else if (code->expr1->ts.kind == 4)
2352 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
2353 else
2354 gcc_unreachable ();
2355
2356 #undef ADD_FIELD
2357 #define ADD_FIELD(NAME, TYPE) \
2358 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2359 get_identifier (stringize(NAME)), \
2360 TYPE, \
2361 &chain)
2362
2363 ADD_FIELD (string1, pchartype);
2364 ADD_FIELD (string1_len, gfc_charlen_type_node);
2365
2366 ADD_FIELD (string2, pchartype);
2367 ADD_FIELD (string2_len, gfc_charlen_type_node);
2368
2369 ADD_FIELD (target, integer_type_node);
2370 #undef ADD_FIELD
2371
2372 gfc_finish_type (select_struct[k]);
2373 }
2374
2375 n = 0;
2376 for (d = cp; d; d = d->right)
2377 d->n = n++;
2378
2379 for (c = code->block; c; c = c->block)
2380 {
2381 for (d = c->ext.block.case_list; d; d = d->next)
2382 {
2383 label = gfc_build_label_decl (NULL_TREE);
2384 tmp = build_case_label ((d->low == NULL && d->high == NULL)
2385 ? NULL
2386 : build_int_cst (integer_type_node, d->n),
2387 NULL, label);
2388 gfc_add_expr_to_block (&body, tmp);
2389 }
2390
2391 tmp = gfc_trans_code (c->next);
2392 gfc_add_expr_to_block (&body, tmp);
2393
2394 tmp = build1_v (GOTO_EXPR, end_label);
2395 gfc_add_expr_to_block (&body, tmp);
2396 }
2397
2398 /* Generate the structure describing the branches */
2399 for (d = cp; d; d = d->right)
2400 {
2401 vec<constructor_elt, va_gc> *node = NULL;
2402
2403 gfc_init_se (&se, NULL);
2404
2405 if (d->low == NULL)
2406 {
2407 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2408 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2409 }
2410 else
2411 {
2412 gfc_conv_expr_reference (&se, d->low);
2413
2414 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2415 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2416 }
2417
2418 if (d->high == NULL)
2419 {
2420 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2421 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2422 }
2423 else
2424 {
2425 gfc_init_se (&se, NULL);
2426 gfc_conv_expr_reference (&se, d->high);
2427
2428 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2429 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2430 }
2431
2432 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2433 build_int_cst (integer_type_node, d->n));
2434
2435 tmp = build_constructor (select_struct[k], node);
2436 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2437 }
2438
2439 type = build_array_type (select_struct[k],
2440 build_index_type (size_int (n-1)));
2441
2442 init = build_constructor (type, inits);
2443 TREE_CONSTANT (init) = 1;
2444 TREE_STATIC (init) = 1;
2445 /* Create a static variable to hold the jump table. */
2446 tmp = gfc_create_var (type, "jumptable");
2447 TREE_CONSTANT (tmp) = 1;
2448 TREE_STATIC (tmp) = 1;
2449 TREE_READONLY (tmp) = 1;
2450 DECL_INITIAL (tmp) = init;
2451 init = tmp;
2452
2453 /* Build the library call */
2454 init = gfc_build_addr_expr (pvoid_type_node, init);
2455
2456 if (code->expr1->ts.kind == 1)
2457 fndecl = gfor_fndecl_select_string;
2458 else if (code->expr1->ts.kind == 4)
2459 fndecl = gfor_fndecl_select_string_char4;
2460 else
2461 gcc_unreachable ();
2462
2463 tmp = build_call_expr_loc (input_location,
2464 fndecl, 4, init,
2465 build_int_cst (gfc_charlen_type_node, n),
2466 expr1se.expr, expr1se.string_length);
2467 case_num = gfc_create_var (integer_type_node, "case_num");
2468 gfc_add_modify (&block, case_num, tmp);
2469
2470 gfc_add_block_to_block (&block, &expr1se.post);
2471
2472 tmp = gfc_finish_block (&body);
2473 tmp = fold_build3_loc (input_location, SWITCH_EXPR, NULL_TREE,
2474 case_num, tmp, NULL_TREE);
2475 gfc_add_expr_to_block (&block, tmp);
2476
2477 tmp = build1_v (LABEL_EXPR, end_label);
2478 gfc_add_expr_to_block (&block, tmp);
2479
2480 return gfc_finish_block (&block);
2481 }
2482
2483
2484 /* Translate the three variants of the SELECT CASE construct.
2485
2486 SELECT CASEs with INTEGER case expressions can be translated to an
2487 equivalent GENERIC switch statement, and for LOGICAL case
2488 expressions we build one or two if-else compares.
2489
2490 SELECT CASEs with CHARACTER case expressions are a whole different
2491 story, because they don't exist in GENERIC. So we sort them and
2492 do a binary search at runtime.
2493
2494 Fortran has no BREAK statement, and it does not allow jumps from
2495 one case block to another. That makes things a lot easier for
2496 the optimizers. */
2497
2498 tree
2499 gfc_trans_select (gfc_code * code)
2500 {
2501 stmtblock_t block;
2502 tree body;
2503 tree exit_label;
2504
2505 gcc_assert (code && code->expr1);
2506 gfc_init_block (&block);
2507
2508 /* Build the exit label and hang it in. */
2509 exit_label = gfc_build_label_decl (NULL_TREE);
2510 code->exit_label = exit_label;
2511
2512 /* Empty SELECT constructs are legal. */
2513 if (code->block == NULL)
2514 body = build_empty_stmt (input_location);
2515
2516 /* Select the correct translation function. */
2517 else
2518 switch (code->expr1->ts.type)
2519 {
2520 case BT_LOGICAL:
2521 body = gfc_trans_logical_select (code);
2522 break;
2523
2524 case BT_INTEGER:
2525 body = gfc_trans_integer_select (code);
2526 break;
2527
2528 case BT_CHARACTER:
2529 body = gfc_trans_character_select (code);
2530 break;
2531
2532 default:
2533 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2534 /* Not reached */
2535 }
2536
2537 /* Build everything together. */
2538 gfc_add_expr_to_block (&block, body);
2539 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2540
2541 return gfc_finish_block (&block);
2542 }
2543
2544
2545 /* Traversal function to substitute a replacement symtree if the symbol
2546 in the expression is the same as that passed. f == 2 signals that
2547 that variable itself is not to be checked - only the references.
2548 This group of functions is used when the variable expression in a
2549 FORALL assignment has internal references. For example:
2550 FORALL (i = 1:4) p(p(i)) = i
2551 The only recourse here is to store a copy of 'p' for the index
2552 expression. */
2553
2554 static gfc_symtree *new_symtree;
2555 static gfc_symtree *old_symtree;
2556
2557 static bool
2558 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2559 {
2560 if (expr->expr_type != EXPR_VARIABLE)
2561 return false;
2562
2563 if (*f == 2)
2564 *f = 1;
2565 else if (expr->symtree->n.sym == sym)
2566 expr->symtree = new_symtree;
2567
2568 return false;
2569 }
2570
2571 static void
2572 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2573 {
2574 gfc_traverse_expr (e, sym, forall_replace, f);
2575 }
2576
2577 static bool
2578 forall_restore (gfc_expr *expr,
2579 gfc_symbol *sym ATTRIBUTE_UNUSED,
2580 int *f ATTRIBUTE_UNUSED)
2581 {
2582 if (expr->expr_type != EXPR_VARIABLE)
2583 return false;
2584
2585 if (expr->symtree == new_symtree)
2586 expr->symtree = old_symtree;
2587
2588 return false;
2589 }
2590
2591 static void
2592 forall_restore_symtree (gfc_expr *e)
2593 {
2594 gfc_traverse_expr (e, NULL, forall_restore, 0);
2595 }
2596
2597 static void
2598 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2599 {
2600 gfc_se tse;
2601 gfc_se rse;
2602 gfc_expr *e;
2603 gfc_symbol *new_sym;
2604 gfc_symbol *old_sym;
2605 gfc_symtree *root;
2606 tree tmp;
2607
2608 /* Build a copy of the lvalue. */
2609 old_symtree = c->expr1->symtree;
2610 old_sym = old_symtree->n.sym;
2611 e = gfc_lval_expr_from_sym (old_sym);
2612 if (old_sym->attr.dimension)
2613 {
2614 gfc_init_se (&tse, NULL);
2615 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2616 gfc_add_block_to_block (pre, &tse.pre);
2617 gfc_add_block_to_block (post, &tse.post);
2618 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2619
2620 if (e->ts.type != BT_CHARACTER)
2621 {
2622 /* Use the variable offset for the temporary. */
2623 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2624 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2625 }
2626 }
2627 else
2628 {
2629 gfc_init_se (&tse, NULL);
2630 gfc_init_se (&rse, NULL);
2631 gfc_conv_expr (&rse, e);
2632 if (e->ts.type == BT_CHARACTER)
2633 {
2634 tse.string_length = rse.string_length;
2635 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2636 tse.string_length);
2637 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2638 rse.string_length);
2639 gfc_add_block_to_block (pre, &tse.pre);
2640 gfc_add_block_to_block (post, &tse.post);
2641 }
2642 else
2643 {
2644 tmp = gfc_typenode_for_spec (&e->ts);
2645 tse.expr = gfc_create_var (tmp, "temp");
2646 }
2647
2648 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2649 e->expr_type == EXPR_VARIABLE, true);
2650 gfc_add_expr_to_block (pre, tmp);
2651 }
2652 gfc_free_expr (e);
2653
2654 /* Create a new symbol to represent the lvalue. */
2655 new_sym = gfc_new_symbol (old_sym->name, NULL);
2656 new_sym->ts = old_sym->ts;
2657 new_sym->attr.referenced = 1;
2658 new_sym->attr.temporary = 1;
2659 new_sym->attr.dimension = old_sym->attr.dimension;
2660 new_sym->attr.flavor = old_sym->attr.flavor;
2661
2662 /* Use the temporary as the backend_decl. */
2663 new_sym->backend_decl = tse.expr;
2664
2665 /* Create a fake symtree for it. */
2666 root = NULL;
2667 new_symtree = gfc_new_symtree (&root, old_sym->name);
2668 new_symtree->n.sym = new_sym;
2669 gcc_assert (new_symtree == root);
2670
2671 /* Go through the expression reference replacing the old_symtree
2672 with the new. */
2673 forall_replace_symtree (c->expr1, old_sym, 2);
2674
2675 /* Now we have made this temporary, we might as well use it for
2676 the right hand side. */
2677 forall_replace_symtree (c->expr2, old_sym, 1);
2678 }
2679
2680
2681 /* Handles dependencies in forall assignments. */
2682 static int
2683 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2684 {
2685 gfc_ref *lref;
2686 gfc_ref *rref;
2687 int need_temp;
2688 gfc_symbol *lsym;
2689
2690 lsym = c->expr1->symtree->n.sym;
2691 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2692
2693 /* Now check for dependencies within the 'variable'
2694 expression itself. These are treated by making a complete
2695 copy of variable and changing all the references to it
2696 point to the copy instead. Note that the shallow copy of
2697 the variable will not suffice for derived types with
2698 pointer components. We therefore leave these to their
2699 own devices. */
2700 if (lsym->ts.type == BT_DERIVED
2701 && lsym->ts.u.derived->attr.pointer_comp)
2702 return need_temp;
2703
2704 new_symtree = NULL;
2705 if (find_forall_index (c->expr1, lsym, 2))
2706 {
2707 forall_make_variable_temp (c, pre, post);
2708 need_temp = 0;
2709 }
2710
2711 /* Substrings with dependencies are treated in the same
2712 way. */
2713 if (c->expr1->ts.type == BT_CHARACTER
2714 && c->expr1->ref
2715 && c->expr2->expr_type == EXPR_VARIABLE
2716 && lsym == c->expr2->symtree->n.sym)
2717 {
2718 for (lref = c->expr1->ref; lref; lref = lref->next)
2719 if (lref->type == REF_SUBSTRING)
2720 break;
2721 for (rref = c->expr2->ref; rref; rref = rref->next)
2722 if (rref->type == REF_SUBSTRING)
2723 break;
2724
2725 if (rref && lref
2726 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2727 {
2728 forall_make_variable_temp (c, pre, post);
2729 need_temp = 0;
2730 }
2731 }
2732 return need_temp;
2733 }
2734
2735
2736 static void
2737 cleanup_forall_symtrees (gfc_code *c)
2738 {
2739 forall_restore_symtree (c->expr1);
2740 forall_restore_symtree (c->expr2);
2741 free (new_symtree->n.sym);
2742 free (new_symtree);
2743 }
2744
2745
2746 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2747 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2748 indicates whether we should generate code to test the FORALLs mask
2749 array. OUTER is the loop header to be used for initializing mask
2750 indices.
2751
2752 The generated loop format is:
2753 count = (end - start + step) / step
2754 loopvar = start
2755 while (1)
2756 {
2757 if (count <=0 )
2758 goto end_of_loop
2759 <body>
2760 loopvar += step
2761 count --
2762 }
2763 end_of_loop: */
2764
2765 static tree
2766 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2767 int mask_flag, stmtblock_t *outer)
2768 {
2769 int n, nvar;
2770 tree tmp;
2771 tree cond;
2772 stmtblock_t block;
2773 tree exit_label;
2774 tree count;
2775 tree var, start, end, step;
2776 iter_info *iter;
2777
2778 /* Initialize the mask index outside the FORALL nest. */
2779 if (mask_flag && forall_tmp->mask)
2780 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2781
2782 iter = forall_tmp->this_loop;
2783 nvar = forall_tmp->nvar;
2784 for (n = 0; n < nvar; n++)
2785 {
2786 var = iter->var;
2787 start = iter->start;
2788 end = iter->end;
2789 step = iter->step;
2790
2791 exit_label = gfc_build_label_decl (NULL_TREE);
2792 TREE_USED (exit_label) = 1;
2793
2794 /* The loop counter. */
2795 count = gfc_create_var (TREE_TYPE (var), "count");
2796
2797 /* The body of the loop. */
2798 gfc_init_block (&block);
2799
2800 /* The exit condition. */
2801 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2802 count, build_int_cst (TREE_TYPE (count), 0));
2803 if (forall_tmp->do_concurrent)
2804 cond = build2 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
2805 build_int_cst (integer_type_node,
2806 annot_expr_ivdep_kind));
2807
2808 tmp = build1_v (GOTO_EXPR, exit_label);
2809 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2810 cond, tmp, build_empty_stmt (input_location));
2811 gfc_add_expr_to_block (&block, tmp);
2812
2813 /* The main loop body. */
2814 gfc_add_expr_to_block (&block, body);
2815
2816 /* Increment the loop variable. */
2817 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2818 step);
2819 gfc_add_modify (&block, var, tmp);
2820
2821 /* Advance to the next mask element. Only do this for the
2822 innermost loop. */
2823 if (n == 0 && mask_flag && forall_tmp->mask)
2824 {
2825 tree maskindex = forall_tmp->maskindex;
2826 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2827 maskindex, gfc_index_one_node);
2828 gfc_add_modify (&block, maskindex, tmp);
2829 }
2830
2831 /* Decrement the loop counter. */
2832 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2833 build_int_cst (TREE_TYPE (var), 1));
2834 gfc_add_modify (&block, count, tmp);
2835
2836 body = gfc_finish_block (&block);
2837
2838 /* Loop var initialization. */
2839 gfc_init_block (&block);
2840 gfc_add_modify (&block, var, start);
2841
2842
2843 /* Initialize the loop counter. */
2844 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2845 start);
2846 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2847 tmp);
2848 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2849 tmp, step);
2850 gfc_add_modify (&block, count, tmp);
2851
2852 /* The loop expression. */
2853 tmp = build1_v (LOOP_EXPR, body);
2854 gfc_add_expr_to_block (&block, tmp);
2855
2856 /* The exit label. */
2857 tmp = build1_v (LABEL_EXPR, exit_label);
2858 gfc_add_expr_to_block (&block, tmp);
2859
2860 body = gfc_finish_block (&block);
2861 iter = iter->next;
2862 }
2863 return body;
2864 }
2865
2866
2867 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2868 is nonzero, the body is controlled by all masks in the forall nest.
2869 Otherwise, the innermost loop is not controlled by it's mask. This
2870 is used for initializing that mask. */
2871
2872 static tree
2873 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2874 int mask_flag)
2875 {
2876 tree tmp;
2877 stmtblock_t header;
2878 forall_info *forall_tmp;
2879 tree mask, maskindex;
2880
2881 gfc_start_block (&header);
2882
2883 forall_tmp = nested_forall_info;
2884 while (forall_tmp != NULL)
2885 {
2886 /* Generate body with masks' control. */
2887 if (mask_flag)
2888 {
2889 mask = forall_tmp->mask;
2890 maskindex = forall_tmp->maskindex;
2891
2892 /* If a mask was specified make the assignment conditional. */
2893 if (mask)
2894 {
2895 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2896 body = build3_v (COND_EXPR, tmp, body,
2897 build_empty_stmt (input_location));
2898 }
2899 }
2900 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2901 forall_tmp = forall_tmp->prev_nest;
2902 mask_flag = 1;
2903 }
2904
2905 gfc_add_expr_to_block (&header, body);
2906 return gfc_finish_block (&header);
2907 }
2908
2909
2910 /* Allocate data for holding a temporary array. Returns either a local
2911 temporary array or a pointer variable. */
2912
2913 static tree
2914 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2915 tree elem_type)
2916 {
2917 tree tmpvar;
2918 tree type;
2919 tree tmp;
2920
2921 if (INTEGER_CST_P (size))
2922 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2923 size, gfc_index_one_node);
2924 else
2925 tmp = NULL_TREE;
2926
2927 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2928 type = build_array_type (elem_type, type);
2929 if (gfc_can_put_var_on_stack (bytesize))
2930 {
2931 gcc_assert (INTEGER_CST_P (size));
2932 tmpvar = gfc_create_var (type, "temp");
2933 *pdata = NULL_TREE;
2934 }
2935 else
2936 {
2937 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2938 *pdata = convert (pvoid_type_node, tmpvar);
2939
2940 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2941 gfc_add_modify (pblock, tmpvar, tmp);
2942 }
2943 return tmpvar;
2944 }
2945
2946
2947 /* Generate codes to copy the temporary to the actual lhs. */
2948
2949 static tree
2950 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2951 tree count1, tree wheremask, bool invert)
2952 {
2953 gfc_ss *lss;
2954 gfc_se lse, rse;
2955 stmtblock_t block, body;
2956 gfc_loopinfo loop1;
2957 tree tmp;
2958 tree wheremaskexpr;
2959
2960 /* Walk the lhs. */
2961 lss = gfc_walk_expr (expr);
2962
2963 if (lss == gfc_ss_terminator)
2964 {
2965 gfc_start_block (&block);
2966
2967 gfc_init_se (&lse, NULL);
2968
2969 /* Translate the expression. */
2970 gfc_conv_expr (&lse, expr);
2971
2972 /* Form the expression for the temporary. */
2973 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2974
2975 /* Use the scalar assignment as is. */
2976 gfc_add_block_to_block (&block, &lse.pre);
2977 gfc_add_modify (&block, lse.expr, tmp);
2978 gfc_add_block_to_block (&block, &lse.post);
2979
2980 /* Increment the count1. */
2981 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2982 count1, gfc_index_one_node);
2983 gfc_add_modify (&block, count1, tmp);
2984
2985 tmp = gfc_finish_block (&block);
2986 }
2987 else
2988 {
2989 gfc_start_block (&block);
2990
2991 gfc_init_loopinfo (&loop1);
2992 gfc_init_se (&rse, NULL);
2993 gfc_init_se (&lse, NULL);
2994
2995 /* Associate the lss with the loop. */
2996 gfc_add_ss_to_loop (&loop1, lss);
2997
2998 /* Calculate the bounds of the scalarization. */
2999 gfc_conv_ss_startstride (&loop1);
3000 /* Setup the scalarizing loops. */
3001 gfc_conv_loop_setup (&loop1, &expr->where);
3002
3003 gfc_mark_ss_chain_used (lss, 1);
3004
3005 /* Start the scalarized loop body. */
3006 gfc_start_scalarized_body (&loop1, &body);
3007
3008 /* Setup the gfc_se structures. */
3009 gfc_copy_loopinfo_to_se (&lse, &loop1);
3010 lse.ss = lss;
3011
3012 /* Form the expression of the temporary. */
3013 if (lss != gfc_ss_terminator)
3014 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
3015 /* Translate expr. */
3016 gfc_conv_expr (&lse, expr);
3017
3018 /* Use the scalar assignment. */
3019 rse.string_length = lse.string_length;
3020 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
3021
3022 /* Form the mask expression according to the mask tree list. */
3023 if (wheremask)
3024 {
3025 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
3026 if (invert)
3027 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3028 TREE_TYPE (wheremaskexpr),
3029 wheremaskexpr);
3030 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
3031 wheremaskexpr, tmp,
3032 build_empty_stmt (input_location));
3033 }
3034
3035 gfc_add_expr_to_block (&body, tmp);
3036
3037 /* Increment count1. */
3038 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3039 count1, gfc_index_one_node);
3040 gfc_add_modify (&body, count1, tmp);
3041
3042 /* Increment count3. */
3043 if (count3)
3044 {
3045 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3046 gfc_array_index_type, count3,
3047 gfc_index_one_node);
3048 gfc_add_modify (&body, count3, tmp);
3049 }
3050
3051 /* Generate the copying loops. */
3052 gfc_trans_scalarizing_loops (&loop1, &body);
3053 gfc_add_block_to_block (&block, &loop1.pre);
3054 gfc_add_block_to_block (&block, &loop1.post);
3055 gfc_cleanup_loop (&loop1);
3056
3057 tmp = gfc_finish_block (&block);
3058 }
3059 return tmp;
3060 }
3061
3062
3063 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3064 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3065 and should not be freed. WHEREMASK is the conditional execution mask
3066 whose sense may be inverted by INVERT. */
3067
3068 static tree
3069 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
3070 tree count1, gfc_ss *lss, gfc_ss *rss,
3071 tree wheremask, bool invert)
3072 {
3073 stmtblock_t block, body1;
3074 gfc_loopinfo loop;
3075 gfc_se lse;
3076 gfc_se rse;
3077 tree tmp;
3078 tree wheremaskexpr;
3079
3080 gfc_start_block (&block);
3081
3082 gfc_init_se (&rse, NULL);
3083 gfc_init_se (&lse, NULL);
3084
3085 if (lss == gfc_ss_terminator)
3086 {
3087 gfc_init_block (&body1);
3088 gfc_conv_expr (&rse, expr2);
3089 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
3090 }
3091 else
3092 {
3093 /* Initialize the loop. */
3094 gfc_init_loopinfo (&loop);
3095
3096 /* We may need LSS to determine the shape of the expression. */
3097 gfc_add_ss_to_loop (&loop, lss);
3098 gfc_add_ss_to_loop (&loop, rss);
3099
3100 gfc_conv_ss_startstride (&loop);
3101 gfc_conv_loop_setup (&loop, &expr2->where);
3102
3103 gfc_mark_ss_chain_used (rss, 1);
3104 /* Start the loop body. */
3105 gfc_start_scalarized_body (&loop, &body1);
3106
3107 /* Translate the expression. */
3108 gfc_copy_loopinfo_to_se (&rse, &loop);
3109 rse.ss = rss;
3110 gfc_conv_expr (&rse, expr2);
3111
3112 /* Form the expression of the temporary. */
3113 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
3114 }
3115
3116 /* Use the scalar assignment. */
3117 lse.string_length = rse.string_length;
3118 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
3119 expr2->expr_type == EXPR_VARIABLE, true);
3120
3121 /* Form the mask expression according to the mask tree list. */
3122 if (wheremask)
3123 {
3124 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
3125 if (invert)
3126 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3127 TREE_TYPE (wheremaskexpr),
3128 wheremaskexpr);
3129 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
3130 wheremaskexpr, tmp,
3131 build_empty_stmt (input_location));
3132 }
3133
3134 gfc_add_expr_to_block (&body1, tmp);
3135
3136 if (lss == gfc_ss_terminator)
3137 {
3138 gfc_add_block_to_block (&block, &body1);
3139
3140 /* Increment count1. */
3141 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
3142 count1, gfc_index_one_node);
3143 gfc_add_modify (&block, count1, tmp);
3144 }
3145 else
3146 {
3147 /* Increment count1. */
3148 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3149 count1, gfc_index_one_node);
3150 gfc_add_modify (&body1, count1, tmp);
3151
3152 /* Increment count3. */
3153 if (count3)
3154 {
3155 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3156 gfc_array_index_type,
3157 count3, gfc_index_one_node);
3158 gfc_add_modify (&body1, count3, tmp);
3159 }
3160
3161 /* Generate the copying loops. */
3162 gfc_trans_scalarizing_loops (&loop, &body1);
3163
3164 gfc_add_block_to_block (&block, &loop.pre);
3165 gfc_add_block_to_block (&block, &loop.post);
3166
3167 gfc_cleanup_loop (&loop);
3168 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3169 as tree nodes in SS may not be valid in different scope. */
3170 }
3171
3172 tmp = gfc_finish_block (&block);
3173 return tmp;
3174 }
3175
3176
3177 /* Calculate the size of temporary needed in the assignment inside forall.
3178 LSS and RSS are filled in this function. */
3179
3180 static tree
3181 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
3182 stmtblock_t * pblock,
3183 gfc_ss **lss, gfc_ss **rss)
3184 {
3185 gfc_loopinfo loop;
3186 tree size;
3187 int i;
3188 int save_flag;
3189 tree tmp;
3190
3191 *lss = gfc_walk_expr (expr1);
3192 *rss = NULL;
3193
3194 size = gfc_index_one_node;
3195 if (*lss != gfc_ss_terminator)
3196 {
3197 gfc_init_loopinfo (&loop);
3198
3199 /* Walk the RHS of the expression. */
3200 *rss = gfc_walk_expr (expr2);
3201 if (*rss == gfc_ss_terminator)
3202 /* The rhs is scalar. Add a ss for the expression. */
3203 *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
3204
3205 /* Associate the SS with the loop. */
3206 gfc_add_ss_to_loop (&loop, *lss);
3207 /* We don't actually need to add the rhs at this point, but it might
3208 make guessing the loop bounds a bit easier. */
3209 gfc_add_ss_to_loop (&loop, *rss);
3210
3211 /* We only want the shape of the expression, not rest of the junk
3212 generated by the scalarizer. */
3213 loop.array_parameter = 1;
3214
3215 /* Calculate the bounds of the scalarization. */
3216 save_flag = gfc_option.rtcheck;
3217 gfc_option.rtcheck &= ~GFC_RTCHECK_BOUNDS;
3218 gfc_conv_ss_startstride (&loop);
3219 gfc_option.rtcheck = save_flag;
3220 gfc_conv_loop_setup (&loop, &expr2->where);
3221
3222 /* Figure out how many elements we need. */
3223 for (i = 0; i < loop.dimen; i++)
3224 {
3225 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3226 gfc_array_index_type,
3227 gfc_index_one_node, loop.from[i]);
3228 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3229 gfc_array_index_type, tmp, loop.to[i]);
3230 size = fold_build2_loc (input_location, MULT_EXPR,
3231 gfc_array_index_type, size, tmp);
3232 }
3233 gfc_add_block_to_block (pblock, &loop.pre);
3234 size = gfc_evaluate_now (size, pblock);
3235 gfc_add_block_to_block (pblock, &loop.post);
3236
3237 /* TODO: write a function that cleans up a loopinfo without freeing
3238 the SS chains. Currently a NOP. */
3239 }
3240
3241 return size;
3242 }
3243
3244
3245 /* Calculate the overall iterator number of the nested forall construct.
3246 This routine actually calculates the number of times the body of the
3247 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3248 that by the expression INNER_SIZE. The BLOCK argument specifies the
3249 block in which to calculate the result, and the optional INNER_SIZE_BODY
3250 argument contains any statements that need to executed (inside the loop)
3251 to initialize or calculate INNER_SIZE. */
3252
3253 static tree
3254 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
3255 stmtblock_t *inner_size_body, stmtblock_t *block)
3256 {
3257 forall_info *forall_tmp = nested_forall_info;
3258 tree tmp, number;
3259 stmtblock_t body;
3260
3261 /* We can eliminate the innermost unconditional loops with constant
3262 array bounds. */
3263 if (INTEGER_CST_P (inner_size))
3264 {
3265 while (forall_tmp
3266 && !forall_tmp->mask
3267 && INTEGER_CST_P (forall_tmp->size))
3268 {
3269 inner_size = fold_build2_loc (input_location, MULT_EXPR,
3270 gfc_array_index_type,
3271 inner_size, forall_tmp->size);
3272 forall_tmp = forall_tmp->prev_nest;
3273 }
3274
3275 /* If there are no loops left, we have our constant result. */
3276 if (!forall_tmp)
3277 return inner_size;
3278 }
3279
3280 /* Otherwise, create a temporary variable to compute the result. */
3281 number = gfc_create_var (gfc_array_index_type, "num");
3282 gfc_add_modify (block, number, gfc_index_zero_node);
3283
3284 gfc_start_block (&body);
3285 if (inner_size_body)
3286 gfc_add_block_to_block (&body, inner_size_body);
3287 if (forall_tmp)
3288 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3289 gfc_array_index_type, number, inner_size);
3290 else
3291 tmp = inner_size;
3292 gfc_add_modify (&body, number, tmp);
3293 tmp = gfc_finish_block (&body);
3294
3295 /* Generate loops. */
3296 if (forall_tmp != NULL)
3297 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
3298
3299 gfc_add_expr_to_block (block, tmp);
3300
3301 return number;
3302 }
3303
3304
3305 /* Allocate temporary for forall construct. SIZE is the size of temporary
3306 needed. PTEMP1 is returned for space free. */
3307
3308 static tree
3309 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
3310 tree * ptemp1)
3311 {
3312 tree bytesize;
3313 tree unit;
3314 tree tmp;
3315
3316 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
3317 if (!integer_onep (unit))
3318 bytesize = fold_build2_loc (input_location, MULT_EXPR,
3319 gfc_array_index_type, size, unit);
3320 else
3321 bytesize = size;
3322
3323 *ptemp1 = NULL;
3324 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
3325
3326 if (*ptemp1)
3327 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3328 return tmp;
3329 }
3330
3331
3332 /* Allocate temporary for forall construct according to the information in
3333 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3334 assignment inside forall. PTEMP1 is returned for space free. */
3335
3336 static tree
3337 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
3338 tree inner_size, stmtblock_t * inner_size_body,
3339 stmtblock_t * block, tree * ptemp1)
3340 {
3341 tree size;
3342
3343 /* Calculate the total size of temporary needed in forall construct. */
3344 size = compute_overall_iter_number (nested_forall_info, inner_size,
3345 inner_size_body, block);
3346
3347 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
3348 }
3349
3350
3351 /* Handle assignments inside forall which need temporary.
3352
3353 forall (i=start:end:stride; maskexpr)
3354 e<i> = f<i>
3355 end forall
3356 (where e,f<i> are arbitrary expressions possibly involving i
3357 and there is a dependency between e<i> and f<i>)
3358 Translates to:
3359 masktmp(:) = maskexpr(:)
3360
3361 maskindex = 0;
3362 count1 = 0;
3363 num = 0;
3364 for (i = start; i <= end; i += stride)
3365 num += SIZE (f<i>)
3366 count1 = 0;
3367 ALLOCATE (tmp(num))
3368 for (i = start; i <= end; i += stride)
3369 {
3370 if (masktmp[maskindex++])
3371 tmp[count1++] = f<i>
3372 }
3373 maskindex = 0;
3374 count1 = 0;
3375 for (i = start; i <= end; i += stride)
3376 {
3377 if (masktmp[maskindex++])
3378 e<i> = tmp[count1++]
3379 }
3380 DEALLOCATE (tmp)
3381 */
3382 static void
3383 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3384 tree wheremask, bool invert,
3385 forall_info * nested_forall_info,
3386 stmtblock_t * block)
3387 {
3388 tree type;
3389 tree inner_size;
3390 gfc_ss *lss, *rss;
3391 tree count, count1;
3392 tree tmp, tmp1;
3393 tree ptemp1;
3394 stmtblock_t inner_size_body;
3395
3396 /* Create vars. count1 is the current iterator number of the nested
3397 forall. */
3398 count1 = gfc_create_var (gfc_array_index_type, "count1");
3399
3400 /* Count is the wheremask index. */
3401 if (wheremask)
3402 {
3403 count = gfc_create_var (gfc_array_index_type, "count");
3404 gfc_add_modify (block, count, gfc_index_zero_node);
3405 }
3406 else
3407 count = NULL;
3408
3409 /* Initialize count1. */
3410 gfc_add_modify (block, count1, gfc_index_zero_node);
3411
3412 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3413 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3414 gfc_init_block (&inner_size_body);
3415 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3416 &lss, &rss);
3417
3418 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3419 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3420 {
3421 if (!expr1->ts.u.cl->backend_decl)
3422 {
3423 gfc_se tse;
3424 gfc_init_se (&tse, NULL);
3425 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3426 expr1->ts.u.cl->backend_decl = tse.expr;
3427 }
3428 type = gfc_get_character_type_len (gfc_default_character_kind,
3429 expr1->ts.u.cl->backend_decl);
3430 }
3431 else
3432 type = gfc_typenode_for_spec (&expr1->ts);
3433
3434 /* Allocate temporary for nested forall construct according to the
3435 information in nested_forall_info and inner_size. */
3436 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3437 &inner_size_body, block, &ptemp1);
3438
3439 /* Generate codes to copy rhs to the temporary . */
3440 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3441 wheremask, invert);
3442
3443 /* Generate body and loops according to the information in
3444 nested_forall_info. */
3445 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3446 gfc_add_expr_to_block (block, tmp);
3447
3448 /* Reset count1. */
3449 gfc_add_modify (block, count1, gfc_index_zero_node);
3450
3451 /* Reset count. */
3452 if (wheremask)
3453 gfc_add_modify (block, count, gfc_index_zero_node);
3454
3455 /* Generate codes to copy the temporary to lhs. */
3456 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3457 wheremask, invert);
3458
3459 /* Generate body and loops according to the information in
3460 nested_forall_info. */
3461 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3462 gfc_add_expr_to_block (block, tmp);
3463
3464 if (ptemp1)
3465 {
3466 /* Free the temporary. */
3467 tmp = gfc_call_free (ptemp1);
3468 gfc_add_expr_to_block (block, tmp);
3469 }
3470 }
3471
3472
3473 /* Translate pointer assignment inside FORALL which need temporary. */
3474
3475 static void
3476 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3477 forall_info * nested_forall_info,
3478 stmtblock_t * block)
3479 {
3480 tree type;
3481 tree inner_size;
3482 gfc_ss *lss, *rss;
3483 gfc_se lse;
3484 gfc_se rse;
3485 gfc_array_info *info;
3486 gfc_loopinfo loop;
3487 tree desc;
3488 tree parm;
3489 tree parmtype;
3490 stmtblock_t body;
3491 tree count;
3492 tree tmp, tmp1, ptemp1;
3493
3494 count = gfc_create_var (gfc_array_index_type, "count");
3495 gfc_add_modify (block, count, gfc_index_zero_node);
3496
3497 inner_size = gfc_index_one_node;
3498 lss = gfc_walk_expr (expr1);
3499 rss = gfc_walk_expr (expr2);
3500 if (lss == gfc_ss_terminator)
3501 {
3502 type = gfc_typenode_for_spec (&expr1->ts);
3503 type = build_pointer_type (type);
3504
3505 /* Allocate temporary for nested forall construct according to the
3506 information in nested_forall_info and inner_size. */
3507 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3508 inner_size, NULL, block, &ptemp1);
3509 gfc_start_block (&body);
3510 gfc_init_se (&lse, NULL);
3511 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3512 gfc_init_se (&rse, NULL);
3513 rse.want_pointer = 1;
3514 gfc_conv_expr (&rse, expr2);
3515 gfc_add_block_to_block (&body, &rse.pre);
3516 gfc_add_modify (&body, lse.expr,
3517 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3518 gfc_add_block_to_block (&body, &rse.post);
3519
3520 /* Increment count. */
3521 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3522 count, gfc_index_one_node);
3523 gfc_add_modify (&body, count, tmp);
3524
3525 tmp = gfc_finish_block (&body);
3526
3527 /* Generate body and loops according to the information in
3528 nested_forall_info. */
3529 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3530 gfc_add_expr_to_block (block, tmp);
3531
3532 /* Reset count. */
3533 gfc_add_modify (block, count, gfc_index_zero_node);
3534
3535 gfc_start_block (&body);
3536 gfc_init_se (&lse, NULL);
3537 gfc_init_se (&rse, NULL);
3538 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3539 lse.want_pointer = 1;
3540 gfc_conv_expr (&lse, expr1);
3541 gfc_add_block_to_block (&body, &lse.pre);
3542 gfc_add_modify (&body, lse.expr, rse.expr);
3543 gfc_add_block_to_block (&body, &lse.post);
3544 /* Increment count. */
3545 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3546 count, gfc_index_one_node);
3547 gfc_add_modify (&body, count, tmp);
3548 tmp = gfc_finish_block (&body);
3549
3550 /* Generate body and loops according to the information in
3551 nested_forall_info. */
3552 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3553 gfc_add_expr_to_block (block, tmp);
3554 }
3555 else
3556 {
3557 gfc_init_loopinfo (&loop);
3558
3559 /* Associate the SS with the loop. */
3560 gfc_add_ss_to_loop (&loop, rss);
3561
3562 /* Setup the scalarizing loops and bounds. */
3563 gfc_conv_ss_startstride (&loop);
3564
3565 gfc_conv_loop_setup (&loop, &expr2->where);
3566
3567 info = &rss->info->data.array;
3568 desc = info->descriptor;
3569
3570 /* Make a new descriptor. */
3571 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3572 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3573 loop.from, loop.to, 1,
3574 GFC_ARRAY_UNKNOWN, true);
3575
3576 /* Allocate temporary for nested forall construct. */
3577 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3578 inner_size, NULL, block, &ptemp1);
3579 gfc_start_block (&body);
3580 gfc_init_se (&lse, NULL);
3581 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3582 lse.direct_byref = 1;
3583 gfc_conv_expr_descriptor (&lse, expr2);
3584
3585 gfc_add_block_to_block (&body, &lse.pre);
3586 gfc_add_block_to_block (&body, &lse.post);
3587
3588 /* Increment count. */
3589 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3590 count, gfc_index_one_node);
3591 gfc_add_modify (&body, count, tmp);
3592
3593 tmp = gfc_finish_block (&body);
3594
3595 /* Generate body and loops according to the information in
3596 nested_forall_info. */
3597 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3598 gfc_add_expr_to_block (block, tmp);
3599
3600 /* Reset count. */
3601 gfc_add_modify (block, count, gfc_index_zero_node);
3602
3603 parm = gfc_build_array_ref (tmp1, count, NULL);
3604 gfc_init_se (&lse, NULL);
3605 gfc_conv_expr_descriptor (&lse, expr1);
3606 gfc_add_modify (&lse.pre, lse.expr, parm);
3607 gfc_start_block (&body);
3608 gfc_add_block_to_block (&body, &lse.pre);
3609 gfc_add_block_to_block (&body, &lse.post);
3610
3611 /* Increment count. */
3612 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3613 count, gfc_index_one_node);
3614 gfc_add_modify (&body, count, tmp);
3615
3616 tmp = gfc_finish_block (&body);
3617
3618 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3619 gfc_add_expr_to_block (block, tmp);
3620 }
3621 /* Free the temporary. */
3622 if (ptemp1)
3623 {
3624 tmp = gfc_call_free (ptemp1);
3625 gfc_add_expr_to_block (block, tmp);
3626 }
3627 }
3628
3629
3630 /* FORALL and WHERE statements are really nasty, especially when you nest
3631 them. All the rhs of a forall assignment must be evaluated before the
3632 actual assignments are performed. Presumably this also applies to all the
3633 assignments in an inner where statement. */
3634
3635 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3636 linear array, relying on the fact that we process in the same order in all
3637 loops.
3638
3639 forall (i=start:end:stride; maskexpr)
3640 e<i> = f<i>
3641 g<i> = h<i>
3642 end forall
3643 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3644 Translates to:
3645 count = ((end + 1 - start) / stride)
3646 masktmp(:) = maskexpr(:)
3647
3648 maskindex = 0;
3649 for (i = start; i <= end; i += stride)
3650 {
3651 if (masktmp[maskindex++])
3652 e<i> = f<i>
3653 }
3654 maskindex = 0;
3655 for (i = start; i <= end; i += stride)
3656 {
3657 if (masktmp[maskindex++])
3658 g<i> = h<i>
3659 }
3660
3661 Note that this code only works when there are no dependencies.
3662 Forall loop with array assignments and data dependencies are a real pain,
3663 because the size of the temporary cannot always be determined before the
3664 loop is executed. This problem is compounded by the presence of nested
3665 FORALL constructs.
3666 */
3667
3668 static tree
3669 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3670 {
3671 stmtblock_t pre;
3672 stmtblock_t post;
3673 stmtblock_t block;
3674 stmtblock_t body;
3675 tree *var;
3676 tree *start;
3677 tree *end;
3678 tree *step;
3679 gfc_expr **varexpr;
3680 tree tmp;
3681 tree assign;
3682 tree size;
3683 tree maskindex;
3684 tree mask;
3685 tree pmask;
3686 tree cycle_label = NULL_TREE;
3687 int n;
3688 int nvar;
3689 int need_temp;
3690 gfc_forall_iterator *fa;
3691 gfc_se se;
3692 gfc_code *c;
3693 gfc_saved_var *saved_vars;
3694 iter_info *this_forall;
3695 forall_info *info;
3696 bool need_mask;
3697
3698 /* Do nothing if the mask is false. */
3699 if (code->expr1
3700 && code->expr1->expr_type == EXPR_CONSTANT
3701 && !code->expr1->value.logical)
3702 return build_empty_stmt (input_location);
3703
3704 n = 0;
3705 /* Count the FORALL index number. */
3706 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3707 n++;
3708 nvar = n;
3709
3710 /* Allocate the space for var, start, end, step, varexpr. */
3711 var = XCNEWVEC (tree, nvar);
3712 start = XCNEWVEC (tree, nvar);
3713 end = XCNEWVEC (tree, nvar);
3714 step = XCNEWVEC (tree, nvar);
3715 varexpr = XCNEWVEC (gfc_expr *, nvar);
3716 saved_vars = XCNEWVEC (gfc_saved_var, nvar);
3717
3718 /* Allocate the space for info. */
3719 info = XCNEW (forall_info);
3720
3721 gfc_start_block (&pre);
3722 gfc_init_block (&post);
3723 gfc_init_block (&block);
3724
3725 n = 0;
3726 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3727 {
3728 gfc_symbol *sym = fa->var->symtree->n.sym;
3729
3730 /* Allocate space for this_forall. */
3731 this_forall = XCNEW (iter_info);
3732
3733 /* Create a temporary variable for the FORALL index. */
3734 tmp = gfc_typenode_for_spec (&sym->ts);
3735 var[n] = gfc_create_var (tmp, sym->name);
3736 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3737
3738 /* Record it in this_forall. */
3739 this_forall->var = var[n];
3740
3741 /* Replace the index symbol's backend_decl with the temporary decl. */
3742 sym->backend_decl = var[n];
3743
3744 /* Work out the start, end and stride for the loop. */
3745 gfc_init_se (&se, NULL);
3746 gfc_conv_expr_val (&se, fa->start);
3747 /* Record it in this_forall. */
3748 this_forall->start = se.expr;
3749 gfc_add_block_to_block (&block, &se.pre);
3750 start[n] = se.expr;
3751
3752 gfc_init_se (&se, NULL);
3753 gfc_conv_expr_val (&se, fa->end);
3754 /* Record it in this_forall. */
3755 this_forall->end = se.expr;
3756 gfc_make_safe_expr (&se);
3757 gfc_add_block_to_block (&block, &se.pre);
3758 end[n] = se.expr;
3759
3760 gfc_init_se (&se, NULL);
3761 gfc_conv_expr_val (&se, fa->stride);
3762 /* Record it in this_forall. */
3763 this_forall->step = se.expr;
3764 gfc_make_safe_expr (&se);
3765 gfc_add_block_to_block (&block, &se.pre);
3766 step[n] = se.expr;
3767
3768 /* Set the NEXT field of this_forall to NULL. */
3769 this_forall->next = NULL;
3770 /* Link this_forall to the info construct. */
3771 if (info->this_loop)
3772 {
3773 iter_info *iter_tmp = info->this_loop;
3774 while (iter_tmp->next != NULL)
3775 iter_tmp = iter_tmp->next;
3776 iter_tmp->next = this_forall;
3777 }
3778 else
3779 info->this_loop = this_forall;
3780
3781 n++;
3782 }
3783 nvar = n;
3784
3785 /* Calculate the size needed for the current forall level. */
3786 size = gfc_index_one_node;
3787 for (n = 0; n < nvar; n++)
3788 {
3789 /* size = (end + step - start) / step. */
3790 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3791 step[n], start[n]);
3792 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3793 end[n], tmp);
3794 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3795 tmp, step[n]);
3796 tmp = convert (gfc_array_index_type, tmp);
3797
3798 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3799 size, tmp);
3800 }
3801
3802 /* Record the nvar and size of current forall level. */
3803 info->nvar = nvar;
3804 info->size = size;
3805
3806 if (code->expr1)
3807 {
3808 /* If the mask is .true., consider the FORALL unconditional. */
3809 if (code->expr1->expr_type == EXPR_CONSTANT
3810 && code->expr1->value.logical)
3811 need_mask = false;
3812 else
3813 need_mask = true;
3814 }
3815 else
3816 need_mask = false;
3817
3818 /* First we need to allocate the mask. */
3819 if (need_mask)
3820 {
3821 /* As the mask array can be very big, prefer compact boolean types. */
3822 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3823 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3824 size, NULL, &block, &pmask);
3825 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3826
3827 /* Record them in the info structure. */
3828 info->maskindex = maskindex;
3829 info->mask = mask;
3830 }
3831 else
3832 {
3833 /* No mask was specified. */
3834 maskindex = NULL_TREE;
3835 mask = pmask = NULL_TREE;
3836 }
3837
3838 /* Link the current forall level to nested_forall_info. */
3839 info->prev_nest = nested_forall_info;
3840 nested_forall_info = info;
3841
3842 /* Copy the mask into a temporary variable if required.
3843 For now we assume a mask temporary is needed. */
3844 if (need_mask)
3845 {
3846 /* As the mask array can be very big, prefer compact boolean types. */
3847 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3848
3849 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3850
3851 /* Start of mask assignment loop body. */
3852 gfc_start_block (&body);
3853
3854 /* Evaluate the mask expression. */
3855 gfc_init_se (&se, NULL);
3856 gfc_conv_expr_val (&se, code->expr1);
3857 gfc_add_block_to_block (&body, &se.pre);
3858
3859 /* Store the mask. */
3860 se.expr = convert (mask_type, se.expr);
3861
3862 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3863 gfc_add_modify (&body, tmp, se.expr);
3864
3865 /* Advance to the next mask element. */
3866 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3867 maskindex, gfc_index_one_node);
3868 gfc_add_modify (&body, maskindex, tmp);
3869
3870 /* Generate the loops. */
3871 tmp = gfc_finish_block (&body);
3872 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3873 gfc_add_expr_to_block (&block, tmp);
3874 }
3875
3876 if (code->op == EXEC_DO_CONCURRENT)
3877 {
3878 gfc_init_block (&body);
3879 cycle_label = gfc_build_label_decl (NULL_TREE);
3880 code->cycle_label = cycle_label;
3881 tmp = gfc_trans_code (code->block->next);
3882 gfc_add_expr_to_block (&body, tmp);
3883
3884 if (TREE_USED (cycle_label))
3885 {
3886 tmp = build1_v (LABEL_EXPR, cycle_label);
3887 gfc_add_expr_to_block (&body, tmp);
3888 }
3889
3890 tmp = gfc_finish_block (&body);
3891 nested_forall_info->do_concurrent = true;
3892 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3893 gfc_add_expr_to_block (&block, tmp);
3894 goto done;
3895 }
3896
3897 c = code->block->next;
3898
3899 /* TODO: loop merging in FORALL statements. */
3900 /* Now that we've got a copy of the mask, generate the assignment loops. */
3901 while (c)
3902 {
3903 switch (c->op)
3904 {
3905 case EXEC_ASSIGN:
3906 /* A scalar or array assignment. DO the simple check for
3907 lhs to rhs dependencies. These make a temporary for the
3908 rhs and form a second forall block to copy to variable. */
3909 need_temp = check_forall_dependencies(c, &pre, &post);
3910
3911 /* Temporaries due to array assignment data dependencies introduce
3912 no end of problems. */
3913 if (need_temp)
3914 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3915 nested_forall_info, &block);
3916 else
3917 {
3918 /* Use the normal assignment copying routines. */
3919 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3920
3921 /* Generate body and loops. */
3922 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3923 assign, 1);
3924 gfc_add_expr_to_block (&block, tmp);
3925 }
3926
3927 /* Cleanup any temporary symtrees that have been made to deal
3928 with dependencies. */
3929 if (new_symtree)
3930 cleanup_forall_symtrees (c);
3931
3932 break;
3933
3934 case EXEC_WHERE:
3935 /* Translate WHERE or WHERE construct nested in FORALL. */
3936 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3937 break;
3938
3939 /* Pointer assignment inside FORALL. */
3940 case EXEC_POINTER_ASSIGN:
3941 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3942 if (need_temp)
3943 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3944 nested_forall_info, &block);
3945 else
3946 {
3947 /* Use the normal assignment copying routines. */
3948 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3949
3950 /* Generate body and loops. */
3951 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3952 assign, 1);
3953 gfc_add_expr_to_block (&block, tmp);
3954 }
3955 break;
3956
3957 case EXEC_FORALL:
3958 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3959 gfc_add_expr_to_block (&block, tmp);
3960 break;
3961
3962 /* Explicit subroutine calls are prevented by the frontend but interface
3963 assignments can legitimately produce them. */
3964 case EXEC_ASSIGN_CALL:
3965 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3966 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3967 gfc_add_expr_to_block (&block, tmp);
3968 break;
3969
3970 default:
3971 gcc_unreachable ();
3972 }
3973
3974 c = c->next;
3975 }
3976
3977 done:
3978 /* Restore the original index variables. */
3979 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3980 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3981
3982 /* Free the space for var, start, end, step, varexpr. */
3983 free (var);
3984 free (start);
3985 free (end);
3986 free (step);
3987 free (varexpr);
3988 free (saved_vars);
3989
3990 for (this_forall = info->this_loop; this_forall;)
3991 {
3992 iter_info *next = this_forall->next;
3993 free (this_forall);
3994 this_forall = next;
3995 }
3996
3997 /* Free the space for this forall_info. */
3998 free (info);
3999
4000 if (pmask)
4001 {
4002 /* Free the temporary for the mask. */
4003 tmp = gfc_call_free (pmask);
4004 gfc_add_expr_to_block (&block, tmp);
4005 }
4006 if (maskindex)
4007 pushdecl (maskindex);
4008
4009 gfc_add_block_to_block (&pre, &block);
4010 gfc_add_block_to_block (&pre, &post);
4011
4012 return gfc_finish_block (&pre);
4013 }
4014
4015
4016 /* Translate the FORALL statement or construct. */
4017
4018 tree gfc_trans_forall (gfc_code * code)
4019 {
4020 return gfc_trans_forall_1 (code, NULL);
4021 }
4022
4023
4024 /* Translate the DO CONCURRENT construct. */
4025
4026 tree gfc_trans_do_concurrent (gfc_code * code)
4027 {
4028 return gfc_trans_forall_1 (code, NULL);
4029 }
4030
4031
4032 /* Evaluate the WHERE mask expression, copy its value to a temporary.
4033 If the WHERE construct is nested in FORALL, compute the overall temporary
4034 needed by the WHERE mask expression multiplied by the iterator number of
4035 the nested forall.
4036 ME is the WHERE mask expression.
4037 MASK is the current execution mask upon input, whose sense may or may
4038 not be inverted as specified by the INVERT argument.
4039 CMASK is the updated execution mask on output, or NULL if not required.
4040 PMASK is the pending execution mask on output, or NULL if not required.
4041 BLOCK is the block in which to place the condition evaluation loops. */
4042
4043 static void
4044 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
4045 tree mask, bool invert, tree cmask, tree pmask,
4046 tree mask_type, stmtblock_t * block)
4047 {
4048 tree tmp, tmp1;
4049 gfc_ss *lss, *rss;
4050 gfc_loopinfo loop;
4051 stmtblock_t body, body1;
4052 tree count, cond, mtmp;
4053 gfc_se lse, rse;
4054
4055 gfc_init_loopinfo (&loop);
4056
4057 lss = gfc_walk_expr (me);
4058 rss = gfc_walk_expr (me);
4059
4060 /* Variable to index the temporary. */
4061 count = gfc_create_var (gfc_array_index_type, "count");
4062 /* Initialize count. */
4063 gfc_add_modify (block, count, gfc_index_zero_node);
4064
4065 gfc_start_block (&body);
4066
4067 gfc_init_se (&rse, NULL);
4068 gfc_init_se (&lse, NULL);
4069
4070 if (lss == gfc_ss_terminator)
4071 {
4072 gfc_init_block (&body1);
4073 }
4074 else
4075 {
4076 /* Initialize the loop. */
4077 gfc_init_loopinfo (&loop);
4078
4079 /* We may need LSS to determine the shape of the expression. */
4080 gfc_add_ss_to_loop (&loop, lss);
4081 gfc_add_ss_to_loop (&loop, rss);
4082
4083 gfc_conv_ss_startstride (&loop);
4084 gfc_conv_loop_setup (&loop, &me->where);
4085
4086 gfc_mark_ss_chain_used (rss, 1);
4087 /* Start the loop body. */
4088 gfc_start_scalarized_body (&loop, &body1);
4089
4090 /* Translate the expression. */
4091 gfc_copy_loopinfo_to_se (&rse, &loop);
4092 rse.ss = rss;
4093 gfc_conv_expr (&rse, me);
4094 }
4095
4096 /* Variable to evaluate mask condition. */
4097 cond = gfc_create_var (mask_type, "cond");
4098 if (mask && (cmask || pmask))
4099 mtmp = gfc_create_var (mask_type, "mask");
4100 else mtmp = NULL_TREE;
4101
4102 gfc_add_block_to_block (&body1, &lse.pre);
4103 gfc_add_block_to_block (&body1, &rse.pre);
4104
4105 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
4106
4107 if (mask && (cmask || pmask))
4108 {
4109 tmp = gfc_build_array_ref (mask, count, NULL);
4110 if (invert)
4111 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
4112 gfc_add_modify (&body1, mtmp, tmp);
4113 }
4114
4115 if (cmask)
4116 {
4117 tmp1 = gfc_build_array_ref (cmask, count, NULL);
4118 tmp = cond;
4119 if (mask)
4120 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
4121 mtmp, tmp);
4122 gfc_add_modify (&body1, tmp1, tmp);
4123 }
4124
4125 if (pmask)
4126 {
4127 tmp1 = gfc_build_array_ref (pmask, count, NULL);
4128 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
4129 if (mask)
4130 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
4131 tmp);
4132 gfc_add_modify (&body1, tmp1, tmp);
4133 }
4134
4135 gfc_add_block_to_block (&body1, &lse.post);
4136 gfc_add_block_to_block (&body1, &rse.post);
4137
4138 if (lss == gfc_ss_terminator)
4139 {
4140 gfc_add_block_to_block (&body, &body1);
4141 }
4142 else
4143 {
4144 /* Increment count. */
4145 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4146 count, gfc_index_one_node);
4147 gfc_add_modify (&body1, count, tmp1);
4148
4149 /* Generate the copying loops. */
4150 gfc_trans_scalarizing_loops (&loop, &body1);
4151
4152 gfc_add_block_to_block (&body, &loop.pre);
4153 gfc_add_block_to_block (&body, &loop.post);
4154
4155 gfc_cleanup_loop (&loop);
4156 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4157 as tree nodes in SS may not be valid in different scope. */
4158 }
4159
4160 tmp1 = gfc_finish_block (&body);
4161 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4162 if (nested_forall_info != NULL)
4163 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
4164
4165 gfc_add_expr_to_block (block, tmp1);
4166 }
4167
4168
4169 /* Translate an assignment statement in a WHERE statement or construct
4170 statement. The MASK expression is used to control which elements
4171 of EXPR1 shall be assigned. The sense of MASK is specified by
4172 INVERT. */
4173
4174 static tree
4175 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
4176 tree mask, bool invert,
4177 tree count1, tree count2,
4178 gfc_code *cnext)
4179 {
4180 gfc_se lse;
4181 gfc_se rse;
4182 gfc_ss *lss;
4183 gfc_ss *lss_section;
4184 gfc_ss *rss;
4185
4186 gfc_loopinfo loop;
4187 tree tmp;
4188 stmtblock_t block;
4189 stmtblock_t body;
4190 tree index, maskexpr;
4191
4192 /* A defined assignment. */
4193 if (cnext && cnext->resolved_sym)
4194 return gfc_trans_call (cnext, true, mask, count1, invert);
4195
4196 #if 0
4197 /* TODO: handle this special case.
4198 Special case a single function returning an array. */
4199 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
4200 {
4201 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
4202 if (tmp)
4203 return tmp;
4204 }
4205 #endif
4206
4207 /* Assignment of the form lhs = rhs. */
4208 gfc_start_block (&block);
4209
4210 gfc_init_se (&lse, NULL);
4211 gfc_init_se (&rse, NULL);
4212
4213 /* Walk the lhs. */
4214 lss = gfc_walk_expr (expr1);
4215 rss = NULL;
4216
4217 /* In each where-assign-stmt, the mask-expr and the variable being
4218 defined shall be arrays of the same shape. */
4219 gcc_assert (lss != gfc_ss_terminator);
4220
4221 /* The assignment needs scalarization. */
4222 lss_section = lss;
4223
4224 /* Find a non-scalar SS from the lhs. */
4225 while (lss_section != gfc_ss_terminator
4226 && lss_section->info->type != GFC_SS_SECTION)
4227 lss_section = lss_section->next;
4228
4229 gcc_assert (lss_section != gfc_ss_terminator);
4230
4231 /* Initialize the scalarizer. */
4232 gfc_init_loopinfo (&loop);
4233
4234 /* Walk the rhs. */
4235 rss = gfc_walk_expr (expr2);
4236 if (rss == gfc_ss_terminator)
4237 {
4238 /* The rhs is scalar. Add a ss for the expression. */
4239 rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
4240 rss->info->where = 1;
4241 }
4242
4243 /* Associate the SS with the loop. */
4244 gfc_add_ss_to_loop (&loop, lss);
4245 gfc_add_ss_to_loop (&loop, rss);
4246
4247 /* Calculate the bounds of the scalarization. */
4248 gfc_conv_ss_startstride (&loop);
4249
4250 /* Resolve any data dependencies in the statement. */
4251 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
4252
4253 /* Setup the scalarizing loops. */
4254 gfc_conv_loop_setup (&loop, &expr2->where);
4255
4256 /* Setup the gfc_se structures. */
4257 gfc_copy_loopinfo_to_se (&lse, &loop);
4258 gfc_copy_loopinfo_to_se (&rse, &loop);
4259
4260 rse.ss = rss;
4261 gfc_mark_ss_chain_used (rss, 1);
4262 if (loop.temp_ss == NULL)
4263 {
4264 lse.ss = lss;
4265 gfc_mark_ss_chain_used (lss, 1);
4266 }
4267 else
4268 {
4269 lse.ss = loop.temp_ss;
4270 gfc_mark_ss_chain_used (lss, 3);
4271 gfc_mark_ss_chain_used (loop.temp_ss, 3);
4272 }
4273
4274 /* Start the scalarized loop body. */
4275 gfc_start_scalarized_body (&loop, &body);
4276
4277 /* Translate the expression. */
4278 gfc_conv_expr (&rse, expr2);
4279 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
4280 gfc_conv_tmp_array_ref (&lse);
4281 else
4282 gfc_conv_expr (&lse, expr1);
4283
4284 /* Form the mask expression according to the mask. */
4285 index = count1;
4286 maskexpr = gfc_build_array_ref (mask, index, NULL);
4287 if (invert)
4288 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4289 TREE_TYPE (maskexpr), maskexpr);
4290
4291 /* Use the scalar assignment as is. */
4292 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
4293 loop.temp_ss != NULL, false, true);
4294
4295 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
4296
4297 gfc_add_expr_to_block (&body, tmp);
4298
4299 if (lss == gfc_ss_terminator)
4300 {
4301 /* Increment count1. */
4302 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4303 count1, gfc_index_one_node);
4304 gfc_add_modify (&body, count1, tmp);
4305
4306 /* Use the scalar assignment as is. */
4307 gfc_add_block_to_block (&block, &body);
4308 }
4309 else
4310 {
4311 gcc_assert (lse.ss == gfc_ss_terminator
4312 && rse.ss == gfc_ss_terminator);
4313
4314 if (loop.temp_ss != NULL)
4315 {
4316 /* Increment count1 before finish the main body of a scalarized
4317 expression. */
4318 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4319 gfc_array_index_type, count1, gfc_index_one_node);
4320 gfc_add_modify (&body, count1, tmp);
4321 gfc_trans_scalarized_loop_boundary (&loop, &body);
4322
4323 /* We need to copy the temporary to the actual lhs. */
4324 gfc_init_se (&lse, NULL);
4325 gfc_init_se (&rse, NULL);
4326 gfc_copy_loopinfo_to_se (&lse, &loop);
4327 gfc_copy_loopinfo_to_se (&rse, &loop);
4328
4329 rse.ss = loop.temp_ss;
4330 lse.ss = lss;
4331
4332 gfc_conv_tmp_array_ref (&rse);
4333 gfc_conv_expr (&lse, expr1);
4334
4335 gcc_assert (lse.ss == gfc_ss_terminator
4336 && rse.ss == gfc_ss_terminator);
4337
4338 /* Form the mask expression according to the mask tree list. */
4339 index = count2;
4340 maskexpr = gfc_build_array_ref (mask, index, NULL);
4341 if (invert)
4342 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4343 TREE_TYPE (maskexpr), maskexpr);
4344
4345 /* Use the scalar assignment as is. */
4346 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
4347 true);
4348 tmp = build3_v (COND_EXPR, maskexpr, tmp,
4349 build_empty_stmt (input_location));
4350 gfc_add_expr_to_block (&body, tmp);
4351
4352 /* Increment count2. */
4353 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4354 gfc_array_index_type, count2,
4355 gfc_index_one_node);
4356 gfc_add_modify (&body, count2, tmp);
4357 }
4358 else
4359 {
4360 /* Increment count1. */
4361 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4362 gfc_array_index_type, count1,
4363 gfc_index_one_node);
4364 gfc_add_modify (&body, count1, tmp);
4365 }
4366
4367 /* Generate the copying loops. */
4368 gfc_trans_scalarizing_loops (&loop, &body);
4369
4370 /* Wrap the whole thing up. */
4371 gfc_add_block_to_block (&block, &loop.pre);
4372 gfc_add_block_to_block (&block, &loop.post);
4373 gfc_cleanup_loop (&loop);
4374 }
4375
4376 return gfc_finish_block (&block);
4377 }
4378
4379
4380 /* Translate the WHERE construct or statement.
4381 This function can be called iteratively to translate the nested WHERE
4382 construct or statement.
4383 MASK is the control mask. */
4384
4385 static void
4386 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
4387 forall_info * nested_forall_info, stmtblock_t * block)
4388 {
4389 stmtblock_t inner_size_body;
4390 tree inner_size, size;
4391 gfc_ss *lss, *rss;
4392 tree mask_type;
4393 gfc_expr *expr1;
4394 gfc_expr *expr2;
4395 gfc_code *cblock;
4396 gfc_code *cnext;
4397 tree tmp;
4398 tree cond;
4399 tree count1, count2;
4400 bool need_cmask;
4401 bool need_pmask;
4402 int need_temp;
4403 tree pcmask = NULL_TREE;
4404 tree ppmask = NULL_TREE;
4405 tree cmask = NULL_TREE;
4406 tree pmask = NULL_TREE;
4407 gfc_actual_arglist *arg;
4408
4409 /* the WHERE statement or the WHERE construct statement. */
4410 cblock = code->block;
4411
4412 /* As the mask array can be very big, prefer compact boolean types. */
4413 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
4414
4415 /* Determine which temporary masks are needed. */
4416 if (!cblock->block)
4417 {
4418 /* One clause: No ELSEWHEREs. */
4419 need_cmask = (cblock->next != 0);
4420 need_pmask = false;
4421 }
4422 else if (cblock->block->block)
4423 {
4424 /* Three or more clauses: Conditional ELSEWHEREs. */
4425 need_cmask = true;
4426 need_pmask = true;
4427 }
4428 else if (cblock->next)
4429 {
4430 /* Two clauses, the first non-empty. */
4431 need_cmask = true;
4432 need_pmask = (mask != NULL_TREE
4433 && cblock->block->next != 0);
4434 }
4435 else if (!cblock->block->next)
4436 {
4437 /* Two clauses, both empty. */
4438 need_cmask = false;
4439 need_pmask = false;
4440 }
4441 /* Two clauses, the first empty, the second non-empty. */
4442 else if (mask)
4443 {
4444 need_cmask = (cblock->block->expr1 != 0);
4445 need_pmask = true;
4446 }
4447 else
4448 {
4449 need_cmask = true;
4450 need_pmask = false;
4451 }
4452
4453 if (need_cmask || need_pmask)
4454 {
4455 /* Calculate the size of temporary needed by the mask-expr. */
4456 gfc_init_block (&inner_size_body);
4457 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4458 &inner_size_body, &lss, &rss);
4459
4460 gfc_free_ss_chain (lss);
4461 gfc_free_ss_chain (rss);
4462
4463 /* Calculate the total size of temporary needed. */
4464 size = compute_overall_iter_number (nested_forall_info, inner_size,
4465 &inner_size_body, block);
4466
4467 /* Check whether the size is negative. */
4468 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4469 gfc_index_zero_node);
4470 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4471 cond, gfc_index_zero_node, size);
4472 size = gfc_evaluate_now (size, block);
4473
4474 /* Allocate temporary for WHERE mask if needed. */
4475 if (need_cmask)
4476 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4477 &pcmask);
4478
4479 /* Allocate temporary for !mask if needed. */
4480 if (need_pmask)
4481 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4482 &ppmask);
4483 }
4484
4485 while (cblock)
4486 {
4487 /* Each time around this loop, the where clause is conditional
4488 on the value of mask and invert, which are updated at the
4489 bottom of the loop. */
4490
4491 /* Has mask-expr. */
4492 if (cblock->expr1)
4493 {
4494 /* Ensure that the WHERE mask will be evaluated exactly once.
4495 If there are no statements in this WHERE/ELSEWHERE clause,
4496 then we don't need to update the control mask (cmask).
4497 If this is the last clause of the WHERE construct, then
4498 we don't need to update the pending control mask (pmask). */
4499 if (mask)
4500 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4501 mask, invert,
4502 cblock->next ? cmask : NULL_TREE,
4503 cblock->block ? pmask : NULL_TREE,
4504 mask_type, block);
4505 else
4506 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4507 NULL_TREE, false,
4508 (cblock->next || cblock->block)
4509 ? cmask : NULL_TREE,
4510 NULL_TREE, mask_type, block);
4511
4512 invert = false;
4513 }
4514 /* It's a final elsewhere-stmt. No mask-expr is present. */
4515 else
4516 cmask = mask;
4517
4518 /* The body of this where clause are controlled by cmask with
4519 sense specified by invert. */
4520
4521 /* Get the assignment statement of a WHERE statement, or the first
4522 statement in where-body-construct of a WHERE construct. */
4523 cnext = cblock->next;
4524 while (cnext)
4525 {
4526 switch (cnext->op)
4527 {
4528 /* WHERE assignment statement. */
4529 case EXEC_ASSIGN_CALL:
4530
4531 arg = cnext->ext.actual;
4532 expr1 = expr2 = NULL;
4533 for (; arg; arg = arg->next)
4534 {
4535 if (!arg->expr)
4536 continue;
4537 if (expr1 == NULL)
4538 expr1 = arg->expr;
4539 else
4540 expr2 = arg->expr;
4541 }
4542 goto evaluate;
4543
4544 case EXEC_ASSIGN:
4545 expr1 = cnext->expr1;
4546 expr2 = cnext->expr2;
4547 evaluate:
4548 if (nested_forall_info != NULL)
4549 {
4550 need_temp = gfc_check_dependency (expr1, expr2, 0);
4551 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4552 gfc_trans_assign_need_temp (expr1, expr2,
4553 cmask, invert,
4554 nested_forall_info, block);
4555 else
4556 {
4557 /* Variables to control maskexpr. */
4558 count1 = gfc_create_var (gfc_array_index_type, "count1");
4559 count2 = gfc_create_var (gfc_array_index_type, "count2");
4560 gfc_add_modify (block, count1, gfc_index_zero_node);
4561 gfc_add_modify (block, count2, gfc_index_zero_node);
4562
4563 tmp = gfc_trans_where_assign (expr1, expr2,
4564 cmask, invert,
4565 count1, count2,
4566 cnext);
4567
4568 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4569 tmp, 1);
4570 gfc_add_expr_to_block (block, tmp);
4571 }
4572 }
4573 else
4574 {
4575 /* Variables to control maskexpr. */
4576 count1 = gfc_create_var (gfc_array_index_type, "count1");
4577 count2 = gfc_create_var (gfc_array_index_type, "count2");
4578 gfc_add_modify (block, count1, gfc_index_zero_node);
4579 gfc_add_modify (block, count2, gfc_index_zero_node);
4580
4581 tmp = gfc_trans_where_assign (expr1, expr2,
4582 cmask, invert,
4583 count1, count2,
4584 cnext);
4585 gfc_add_expr_to_block (block, tmp);
4586
4587 }
4588 break;
4589
4590 /* WHERE or WHERE construct is part of a where-body-construct. */
4591 case EXEC_WHERE:
4592 gfc_trans_where_2 (cnext, cmask, invert,
4593 nested_forall_info, block);
4594 break;
4595
4596 default:
4597 gcc_unreachable ();
4598 }
4599
4600 /* The next statement within the same where-body-construct. */
4601 cnext = cnext->next;
4602 }
4603 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4604 cblock = cblock->block;
4605 if (mask == NULL_TREE)
4606 {
4607 /* If we're the initial WHERE, we can simply invert the sense
4608 of the current mask to obtain the "mask" for the remaining
4609 ELSEWHEREs. */
4610 invert = true;
4611 mask = cmask;
4612 }
4613 else
4614 {
4615 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4616 invert = false;
4617 mask = pmask;
4618 }
4619 }
4620
4621 /* If we allocated a pending mask array, deallocate it now. */
4622 if (ppmask)
4623 {
4624 tmp = gfc_call_free (ppmask);
4625 gfc_add_expr_to_block (block, tmp);
4626 }
4627
4628 /* If we allocated a current mask array, deallocate it now. */
4629 if (pcmask)
4630 {
4631 tmp = gfc_call_free (pcmask);
4632 gfc_add_expr_to_block (block, tmp);
4633 }
4634 }
4635
4636 /* Translate a simple WHERE construct or statement without dependencies.
4637 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4638 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4639 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4640
4641 static tree
4642 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4643 {
4644 stmtblock_t block, body;
4645 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4646 tree tmp, cexpr, tstmt, estmt;
4647 gfc_ss *css, *tdss, *tsss;
4648 gfc_se cse, tdse, tsse, edse, esse;
4649 gfc_loopinfo loop;
4650 gfc_ss *edss = 0;
4651 gfc_ss *esss = 0;
4652
4653 /* Allow the scalarizer to workshare simple where loops. */
4654 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4655 ompws_flags |= OMPWS_SCALARIZER_WS;
4656
4657 cond = cblock->expr1;
4658 tdst = cblock->next->expr1;
4659 tsrc = cblock->next->expr2;
4660 edst = eblock ? eblock->next->expr1 : NULL;
4661 esrc = eblock ? eblock->next->expr2 : NULL;
4662
4663 gfc_start_block (&block);
4664 gfc_init_loopinfo (&loop);
4665
4666 /* Handle the condition. */
4667 gfc_init_se (&cse, NULL);
4668 css = gfc_walk_expr (cond);
4669 gfc_add_ss_to_loop (&loop, css);
4670
4671 /* Handle the then-clause. */
4672 gfc_init_se (&tdse, NULL);
4673 gfc_init_se (&tsse, NULL);
4674 tdss = gfc_walk_expr (tdst);
4675 tsss = gfc_walk_expr (tsrc);
4676 if (tsss == gfc_ss_terminator)
4677 {
4678 tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc);
4679 tsss->info->where = 1;
4680 }
4681 gfc_add_ss_to_loop (&loop, tdss);
4682 gfc_add_ss_to_loop (&loop, tsss);
4683
4684 if (eblock)
4685 {
4686 /* Handle the else clause. */
4687 gfc_init_se (&edse, NULL);
4688 gfc_init_se (&esse, NULL);
4689 edss = gfc_walk_expr (edst);
4690 esss = gfc_walk_expr (esrc);
4691 if (esss == gfc_ss_terminator)
4692 {
4693 esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc);
4694 esss->info->where = 1;
4695 }
4696 gfc_add_ss_to_loop (&loop, edss);
4697 gfc_add_ss_to_loop (&loop, esss);
4698 }
4699
4700 gfc_conv_ss_startstride (&loop);
4701 gfc_conv_loop_setup (&loop, &tdst->where);
4702
4703 gfc_mark_ss_chain_used (css, 1);
4704 gfc_mark_ss_chain_used (tdss, 1);
4705 gfc_mark_ss_chain_used (tsss, 1);
4706 if (eblock)
4707 {
4708 gfc_mark_ss_chain_used (edss, 1);
4709 gfc_mark_ss_chain_used (esss, 1);
4710 }
4711
4712 gfc_start_scalarized_body (&loop, &body);
4713
4714 gfc_copy_loopinfo_to_se (&cse, &loop);
4715 gfc_copy_loopinfo_to_se (&tdse, &loop);
4716 gfc_copy_loopinfo_to_se (&tsse, &loop);
4717 cse.ss = css;
4718 tdse.ss = tdss;
4719 tsse.ss = tsss;
4720 if (eblock)
4721 {
4722 gfc_copy_loopinfo_to_se (&edse, &loop);
4723 gfc_copy_loopinfo_to_se (&esse, &loop);
4724 edse.ss = edss;
4725 esse.ss = esss;
4726 }
4727
4728 gfc_conv_expr (&cse, cond);
4729 gfc_add_block_to_block (&body, &cse.pre);
4730 cexpr = cse.expr;
4731
4732 gfc_conv_expr (&tsse, tsrc);
4733 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4734 gfc_conv_tmp_array_ref (&tdse);
4735 else
4736 gfc_conv_expr (&tdse, tdst);
4737
4738 if (eblock)
4739 {
4740 gfc_conv_expr (&esse, esrc);
4741 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4742 gfc_conv_tmp_array_ref (&edse);
4743 else
4744 gfc_conv_expr (&edse, edst);
4745 }
4746
4747 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4748 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4749 false, true)
4750 : build_empty_stmt (input_location);
4751 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4752 gfc_add_expr_to_block (&body, tmp);
4753 gfc_add_block_to_block (&body, &cse.post);
4754
4755 gfc_trans_scalarizing_loops (&loop, &body);
4756 gfc_add_block_to_block (&block, &loop.pre);
4757 gfc_add_block_to_block (&block, &loop.post);
4758 gfc_cleanup_loop (&loop);
4759
4760 return gfc_finish_block (&block);
4761 }
4762
4763 /* As the WHERE or WHERE construct statement can be nested, we call
4764 gfc_trans_where_2 to do the translation, and pass the initial
4765 NULL values for both the control mask and the pending control mask. */
4766
4767 tree
4768 gfc_trans_where (gfc_code * code)
4769 {
4770 stmtblock_t block;
4771 gfc_code *cblock;
4772 gfc_code *eblock;
4773
4774 cblock = code->block;
4775 if (cblock->next
4776 && cblock->next->op == EXEC_ASSIGN
4777 && !cblock->next->next)
4778 {
4779 eblock = cblock->block;
4780 if (!eblock)
4781 {
4782 /* A simple "WHERE (cond) x = y" statement or block is
4783 dependence free if cond is not dependent upon writing x,
4784 and the source y is unaffected by the destination x. */
4785 if (!gfc_check_dependency (cblock->next->expr1,
4786 cblock->expr1, 0)
4787 && !gfc_check_dependency (cblock->next->expr1,
4788 cblock->next->expr2, 0))
4789 return gfc_trans_where_3 (cblock, NULL);
4790 }
4791 else if (!eblock->expr1
4792 && !eblock->block
4793 && eblock->next
4794 && eblock->next->op == EXEC_ASSIGN
4795 && !eblock->next->next)
4796 {
4797 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4798 block is dependence free if cond is not dependent on writes
4799 to x1 and x2, y1 is not dependent on writes to x2, and y2
4800 is not dependent on writes to x1, and both y's are not
4801 dependent upon their own x's. In addition to this, the
4802 final two dependency checks below exclude all but the same
4803 array reference if the where and elswhere destinations
4804 are the same. In short, this is VERY conservative and this
4805 is needed because the two loops, required by the standard
4806 are coalesced in gfc_trans_where_3. */
4807 if (!gfc_check_dependency (cblock->next->expr1,
4808 cblock->expr1, 0)
4809 && !gfc_check_dependency (eblock->next->expr1,
4810 cblock->expr1, 0)
4811 && !gfc_check_dependency (cblock->next->expr1,
4812 eblock->next->expr2, 1)
4813 && !gfc_check_dependency (eblock->next->expr1,
4814 cblock->next->expr2, 1)
4815 && !gfc_check_dependency (cblock->next->expr1,
4816 cblock->next->expr2, 1)
4817 && !gfc_check_dependency (eblock->next->expr1,
4818 eblock->next->expr2, 1)
4819 && !gfc_check_dependency (cblock->next->expr1,
4820 eblock->next->expr1, 0)
4821 && !gfc_check_dependency (eblock->next->expr1,
4822 cblock->next->expr1, 0))
4823 return gfc_trans_where_3 (cblock, eblock);
4824 }
4825 }
4826
4827 gfc_start_block (&block);
4828
4829 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4830
4831 return gfc_finish_block (&block);
4832 }
4833
4834
4835 /* CYCLE a DO loop. The label decl has already been created by
4836 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4837 node at the head of the loop. We must mark the label as used. */
4838
4839 tree
4840 gfc_trans_cycle (gfc_code * code)
4841 {
4842 tree cycle_label;
4843
4844 cycle_label = code->ext.which_construct->cycle_label;
4845 gcc_assert (cycle_label);
4846
4847 TREE_USED (cycle_label) = 1;
4848 return build1_v (GOTO_EXPR, cycle_label);
4849 }
4850
4851
4852 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4853 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4854 loop. */
4855
4856 tree
4857 gfc_trans_exit (gfc_code * code)
4858 {
4859 tree exit_label;
4860
4861 exit_label = code->ext.which_construct->exit_label;
4862 gcc_assert (exit_label);
4863
4864 TREE_USED (exit_label) = 1;
4865 return build1_v (GOTO_EXPR, exit_label);
4866 }
4867
4868
4869 /* Translate the ALLOCATE statement. */
4870
4871 tree
4872 gfc_trans_allocate (gfc_code * code)
4873 {
4874 gfc_alloc *al;
4875 gfc_expr *e;
4876 gfc_expr *expr;
4877 gfc_se se;
4878 tree tmp;
4879 tree parm;
4880 tree stat;
4881 tree errmsg;
4882 tree errlen;
4883 tree label_errmsg;
4884 tree label_finish;
4885 tree memsz;
4886 tree expr3;
4887 tree slen3;
4888 stmtblock_t block;
4889 stmtblock_t post;
4890 gfc_expr *sz;
4891 gfc_se se_sz;
4892 tree class_expr;
4893 tree nelems;
4894 tree memsize = NULL_TREE;
4895 tree classexpr = NULL_TREE;
4896
4897 if (!code->ext.alloc.list)
4898 return NULL_TREE;
4899
4900 stat = tmp = memsz = NULL_TREE;
4901 label_errmsg = label_finish = errmsg = errlen = NULL_TREE;
4902
4903 gfc_init_block (&block);
4904 gfc_init_block (&post);
4905
4906 /* STAT= (and maybe ERRMSG=) is present. */
4907 if (code->expr1)
4908 {
4909 /* STAT=. */
4910 tree gfc_int4_type_node = gfc_get_int_type (4);
4911 stat = gfc_create_var (gfc_int4_type_node, "stat");
4912
4913 /* ERRMSG= only makes sense with STAT=. */
4914 if (code->expr2)
4915 {
4916 gfc_init_se (&se, NULL);
4917 se.want_pointer = 1;
4918 gfc_conv_expr_lhs (&se, code->expr2);
4919 errmsg = se.expr;
4920 errlen = se.string_length;
4921 }
4922 else
4923 {
4924 errmsg = null_pointer_node;
4925 errlen = build_int_cst (gfc_charlen_type_node, 0);
4926 }
4927
4928 /* GOTO destinations. */
4929 label_errmsg = gfc_build_label_decl (NULL_TREE);
4930 label_finish = gfc_build_label_decl (NULL_TREE);
4931 TREE_USED (label_finish) = 0;
4932 }
4933
4934 expr3 = NULL_TREE;
4935 slen3 = NULL_TREE;
4936
4937 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4938 {
4939 expr = gfc_copy_expr (al->expr);
4940
4941 if (expr->ts.type == BT_CLASS)
4942 gfc_add_data_component (expr);
4943
4944 gfc_init_se (&se, NULL);
4945
4946 se.want_pointer = 1;
4947 se.descriptor_only = 1;
4948 gfc_conv_expr (&se, expr);
4949
4950 /* Evaluate expr3 just once if not a variable. */
4951 if (al == code->ext.alloc.list
4952 && al->expr->ts.type == BT_CLASS
4953 && code->expr3
4954 && code->expr3->ts.type == BT_CLASS
4955 && code->expr3->expr_type != EXPR_VARIABLE)
4956 {
4957 gfc_init_se (&se_sz, NULL);
4958 gfc_conv_expr_reference (&se_sz, code->expr3);
4959 gfc_conv_class_to_class (&se_sz, code->expr3,
4960 code->expr3->ts, false, true, false, false);
4961 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4962 gfc_add_block_to_block (&se.post, &se_sz.post);
4963 classexpr = build_fold_indirect_ref_loc (input_location,
4964 se_sz.expr);
4965 classexpr = gfc_evaluate_now (classexpr, &se.pre);
4966 memsize = gfc_vtable_size_get (classexpr);
4967 memsize = fold_convert (sizetype, memsize);
4968 }
4969
4970 memsz = memsize;
4971 class_expr = classexpr;
4972
4973 nelems = NULL_TREE;
4974 if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen, label_finish,
4975 memsz, &nelems, code->expr3, &code->ext.alloc.ts))
4976 {
4977 bool unlimited_char;
4978
4979 unlimited_char = UNLIMITED_POLY (al->expr)
4980 && ((code->expr3 && code->expr3->ts.type == BT_CHARACTER)
4981 || (code->ext.alloc.ts.type == BT_CHARACTER
4982 && code->ext.alloc.ts.u.cl
4983 && code->ext.alloc.ts.u.cl->length));
4984
4985 /* A scalar or derived type. */
4986
4987 /* Determine allocate size. */
4988 if (al->expr->ts.type == BT_CLASS
4989 && !unlimited_char
4990 && code->expr3
4991 && memsz == NULL_TREE)
4992 {
4993 if (code->expr3->ts.type == BT_CLASS)
4994 {
4995 sz = gfc_copy_expr (code->expr3);
4996 gfc_add_vptr_component (sz);
4997 gfc_add_size_component (sz);
4998 gfc_init_se (&se_sz, NULL);
4999 gfc_conv_expr (&se_sz, sz);
5000 gfc_free_expr (sz);
5001 memsz = se_sz.expr;
5002 }
5003 else
5004 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
5005 }
5006 else if (((al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
5007 || unlimited_char) && code->expr3)
5008 {
5009 if (!code->expr3->ts.u.cl->backend_decl)
5010 {
5011 /* Convert and use the length expression. */
5012 gfc_init_se (&se_sz, NULL);
5013 if (code->expr3->expr_type == EXPR_VARIABLE
5014 || code->expr3->expr_type == EXPR_CONSTANT)
5015 {
5016 gfc_conv_expr (&se_sz, code->expr3);
5017 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5018 se_sz.string_length
5019 = gfc_evaluate_now (se_sz.string_length, &se.pre);
5020 gfc_add_block_to_block (&se.pre, &se_sz.post);
5021 memsz = se_sz.string_length;
5022 }
5023 else if (code->expr3->mold
5024 && code->expr3->ts.u.cl
5025 && code->expr3->ts.u.cl->length)
5026 {
5027 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
5028 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5029 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
5030 gfc_add_block_to_block (&se.pre, &se_sz.post);
5031 memsz = se_sz.expr;
5032 }
5033 else
5034 {
5035 /* This is would be inefficient and possibly could
5036 generate wrong code if the result were not stored
5037 in expr3/slen3. */
5038 if (slen3 == NULL_TREE)
5039 {
5040 gfc_conv_expr (&se_sz, code->expr3);
5041 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5042 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
5043 gfc_add_block_to_block (&post, &se_sz.post);
5044 slen3 = gfc_evaluate_now (se_sz.string_length,
5045 &se.pre);
5046 }
5047 memsz = slen3;
5048 }
5049 }
5050 else
5051 /* Otherwise use the stored string length. */
5052 memsz = code->expr3->ts.u.cl->backend_decl;
5053 tmp = al->expr->ts.u.cl->backend_decl;
5054
5055 /* Store the string length. */
5056 if (tmp && TREE_CODE (tmp) == VAR_DECL)
5057 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
5058 memsz));
5059 else if (al->expr->ts.type == BT_CHARACTER
5060 && al->expr->ts.deferred && se.string_length)
5061 gfc_add_modify (&se.pre, se.string_length,
5062 fold_convert (TREE_TYPE (se.string_length),
5063 memsz));
5064
5065 /* Convert to size in bytes, using the character KIND. */
5066 if (unlimited_char)
5067 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->expr3->ts));
5068 else
5069 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
5070 tmp = TYPE_SIZE_UNIT (tmp);
5071 memsz = fold_build2_loc (input_location, MULT_EXPR,
5072 TREE_TYPE (tmp), tmp,
5073 fold_convert (TREE_TYPE (tmp), memsz));
5074 }
5075 else if ((al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
5076 || unlimited_char)
5077 {
5078 gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length);
5079 gfc_init_se (&se_sz, NULL);
5080 gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length);
5081 gfc_add_block_to_block (&se.pre, &se_sz.pre);
5082 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
5083 gfc_add_block_to_block (&se.pre, &se_sz.post);
5084 /* Store the string length. */
5085 tmp = al->expr->ts.u.cl->backend_decl;
5086 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
5087 se_sz.expr));
5088 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
5089 tmp = TYPE_SIZE_UNIT (tmp);
5090 memsz = fold_build2_loc (input_location, MULT_EXPR,
5091 TREE_TYPE (tmp), tmp,
5092 fold_convert (TREE_TYPE (se_sz.expr),
5093 se_sz.expr));
5094 }
5095 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
5096 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
5097 else if (memsz == NULL_TREE)
5098 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
5099
5100 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
5101 {
5102 memsz = se.string_length;
5103
5104 /* Convert to size in bytes, using the character KIND. */
5105 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
5106 tmp = TYPE_SIZE_UNIT (tmp);
5107 memsz = fold_build2_loc (input_location, MULT_EXPR,
5108 TREE_TYPE (tmp), tmp,
5109 fold_convert (TREE_TYPE (tmp), memsz));
5110 }
5111
5112 /* Allocate - for non-pointers with re-alloc checking. */
5113 if (gfc_expr_attr (expr).allocatable)
5114 gfc_allocate_allocatable (&se.pre, se.expr, memsz, NULL_TREE,
5115 stat, errmsg, errlen, label_finish, expr);
5116 else
5117 gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat);
5118
5119 if (al->expr->ts.type == BT_DERIVED
5120 && expr->ts.u.derived->attr.alloc_comp)
5121 {
5122 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
5123 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
5124 gfc_add_expr_to_block (&se.pre, tmp);
5125 }
5126 }
5127
5128 gfc_add_block_to_block (&block, &se.pre);
5129
5130 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5131 if (code->expr1)
5132 {
5133 tmp = build1_v (GOTO_EXPR, label_errmsg);
5134 parm = fold_build2_loc (input_location, NE_EXPR,
5135 boolean_type_node, stat,
5136 build_int_cst (TREE_TYPE (stat), 0));
5137 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5138 gfc_unlikely (parm, PRED_FORTRAN_FAIL_ALLOC),
5139 tmp, build_empty_stmt (input_location));
5140 gfc_add_expr_to_block (&block, tmp);
5141 }
5142
5143 /* We need the vptr of CLASS objects to be initialized. */
5144 e = gfc_copy_expr (al->expr);
5145 if (e->ts.type == BT_CLASS)
5146 {
5147 gfc_expr *lhs, *rhs;
5148 gfc_se lse;
5149 gfc_ref *ref, *class_ref, *tail;
5150
5151 /* Find the last class reference. */
5152 class_ref = NULL;
5153 for (ref = e->ref; ref; ref = ref->next)
5154 {
5155 if (ref->type == REF_COMPONENT
5156 && ref->u.c.component->ts.type == BT_CLASS)
5157 class_ref = ref;
5158
5159 if (ref->next == NULL)
5160 break;
5161 }
5162
5163 /* Remove and store all subsequent references after the
5164 CLASS reference. */
5165 if (class_ref)
5166 {
5167 tail = class_ref->next;
5168 class_ref->next = NULL;
5169 }
5170 else
5171 {
5172 tail = e->ref;
5173 e->ref = NULL;
5174 }
5175
5176 lhs = gfc_expr_to_initialize (e);
5177 gfc_add_vptr_component (lhs);
5178
5179 /* Remove the _vptr component and restore the original tail
5180 references. */
5181 if (class_ref)
5182 {
5183 gfc_free_ref_list (class_ref->next);
5184 class_ref->next = tail;
5185 }
5186 else
5187 {
5188 gfc_free_ref_list (e->ref);
5189 e->ref = tail;
5190 }
5191
5192 if (class_expr != NULL_TREE)
5193 {
5194 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5195 gfc_init_se (&lse, NULL);
5196 lse.want_pointer = 1;
5197 gfc_conv_expr (&lse, lhs);
5198 tmp = gfc_class_vptr_get (class_expr);
5199 gfc_add_modify (&block, lse.expr,
5200 fold_convert (TREE_TYPE (lse.expr), tmp));
5201 }
5202 else if (code->expr3 && code->expr3->ts.type == BT_CLASS)
5203 {
5204 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5205 rhs = gfc_copy_expr (code->expr3);
5206 gfc_add_vptr_component (rhs);
5207 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5208 gfc_add_expr_to_block (&block, tmp);
5209 gfc_free_expr (rhs);
5210 rhs = gfc_expr_to_initialize (e);
5211 }
5212 else
5213 {
5214 /* VPTR is fixed at compile time. */
5215 gfc_symbol *vtab;
5216 gfc_typespec *ts;
5217 if (code->expr3)
5218 ts = &code->expr3->ts;
5219 else if (e->ts.type == BT_DERIVED)
5220 ts = &e->ts;
5221 else if (code->ext.alloc.ts.type == BT_DERIVED || UNLIMITED_POLY (al->expr))
5222 ts = &code->ext.alloc.ts;
5223 else if (e->ts.type == BT_CLASS)
5224 ts = &CLASS_DATA (e)->ts;
5225 else
5226 ts = &e->ts;
5227
5228 if (ts->type == BT_DERIVED || UNLIMITED_POLY (e))
5229 {
5230 vtab = gfc_find_vtab (ts);
5231 gcc_assert (vtab);
5232 gfc_init_se (&lse, NULL);
5233 lse.want_pointer = 1;
5234 gfc_conv_expr (&lse, lhs);
5235 tmp = gfc_build_addr_expr (NULL_TREE,
5236 gfc_get_symbol_decl (vtab));
5237 gfc_add_modify (&block, lse.expr,
5238 fold_convert (TREE_TYPE (lse.expr), tmp));
5239 }
5240 }
5241 gfc_free_expr (lhs);
5242 }
5243
5244 gfc_free_expr (e);
5245
5246 if (code->expr3 && !code->expr3->mold)
5247 {
5248 /* Initialization via SOURCE block
5249 (or static default initializer). */
5250 gfc_expr *rhs = gfc_copy_expr (code->expr3);
5251 if (class_expr != NULL_TREE)
5252 {
5253 tree to;
5254 to = TREE_OPERAND (se.expr, 0);
5255
5256 tmp = gfc_copy_class_to_class (class_expr, to, nelems);
5257 }
5258 else if (al->expr->ts.type == BT_CLASS)
5259 {
5260 gfc_actual_arglist *actual;
5261 gfc_expr *ppc;
5262 gfc_code *ppc_code;
5263 gfc_ref *ref, *dataref;
5264
5265 /* Do a polymorphic deep copy. */
5266 actual = gfc_get_actual_arglist ();
5267 actual->expr = gfc_copy_expr (rhs);
5268 if (rhs->ts.type == BT_CLASS)
5269 gfc_add_data_component (actual->expr);
5270 actual->next = gfc_get_actual_arglist ();
5271 actual->next->expr = gfc_copy_expr (al->expr);
5272 actual->next->expr->ts.type = BT_CLASS;
5273 gfc_add_data_component (actual->next->expr);
5274
5275 dataref = NULL;
5276 /* Make sure we go up through the reference chain to
5277 the _data reference, where the arrayspec is found. */
5278 for (ref = actual->next->expr->ref; ref; ref = ref->next)
5279 if (ref->type == REF_COMPONENT
5280 && strcmp (ref->u.c.component->name, "_data") == 0)
5281 dataref = ref;
5282
5283 if (dataref && dataref->u.c.component->as)
5284 {
5285 int dim;
5286 gfc_expr *temp;
5287 gfc_ref *ref = dataref->next;
5288 ref->u.ar.type = AR_SECTION;
5289 /* We have to set up the array reference to give ranges
5290 in all dimensions and ensure that the end and stride
5291 are set so that the copy can be scalarized. */
5292 dim = 0;
5293 for (; dim < dataref->u.c.component->as->rank; dim++)
5294 {
5295 ref->u.ar.dimen_type[dim] = DIMEN_RANGE;
5296 if (ref->u.ar.end[dim] == NULL)
5297 {
5298 ref->u.ar.end[dim] = ref->u.ar.start[dim];
5299 temp = gfc_get_int_expr (gfc_default_integer_kind,
5300 &al->expr->where, 1);
5301 ref->u.ar.start[dim] = temp;
5302 }
5303 temp = gfc_subtract (gfc_copy_expr (ref->u.ar.end[dim]),
5304 gfc_copy_expr (ref->u.ar.start[dim]));
5305 temp = gfc_add (gfc_get_int_expr (gfc_default_integer_kind,
5306 &al->expr->where, 1),
5307 temp);
5308 }
5309 }
5310 if (rhs->ts.type == BT_CLASS)
5311 {
5312 ppc = gfc_copy_expr (rhs);
5313 gfc_add_vptr_component (ppc);
5314 }
5315 else
5316 ppc = gfc_lval_expr_from_sym (gfc_find_vtab (&rhs->ts));
5317 gfc_add_component_ref (ppc, "_copy");
5318
5319 ppc_code = gfc_get_code (EXEC_CALL);
5320 ppc_code->resolved_sym = ppc->symtree->n.sym;
5321 /* Although '_copy' is set to be elemental in class.c, it is
5322 not staying that way. Find out why, sometime.... */
5323 ppc_code->resolved_sym->attr.elemental = 1;
5324 ppc_code->ext.actual = actual;
5325 ppc_code->expr1 = ppc;
5326 /* Since '_copy' is elemental, the scalarizer will take care
5327 of arrays in gfc_trans_call. */
5328 tmp = gfc_trans_call (ppc_code, true, NULL, NULL, false);
5329 gfc_free_statements (ppc_code);
5330 }
5331 else if (expr3 != NULL_TREE)
5332 {
5333 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
5334 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
5335 slen3, expr3, code->expr3->ts.kind);
5336 tmp = NULL_TREE;
5337 }
5338 else
5339 {
5340 /* Switch off automatic reallocation since we have just done
5341 the ALLOCATE. */
5342 int realloc_lhs = flag_realloc_lhs;
5343 flag_realloc_lhs = 0;
5344 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
5345 rhs, false, false);
5346 flag_realloc_lhs = realloc_lhs;
5347 }
5348 gfc_free_expr (rhs);
5349 gfc_add_expr_to_block (&block, tmp);
5350 }
5351 else if (code->expr3 && code->expr3->mold
5352 && code->expr3->ts.type == BT_CLASS)
5353 {
5354 /* Since the _vptr has already been assigned to the allocate
5355 object, we can use gfc_copy_class_to_class in its
5356 initialization mode. */
5357 tmp = TREE_OPERAND (se.expr, 0);
5358 tmp = gfc_copy_class_to_class (NULL_TREE, tmp, nelems);
5359 gfc_add_expr_to_block (&block, tmp);
5360 }
5361
5362 gfc_free_expr (expr);
5363 }
5364
5365 /* STAT. */
5366 if (code->expr1)
5367 {
5368 tmp = build1_v (LABEL_EXPR, label_errmsg);
5369 gfc_add_expr_to_block (&block, tmp);
5370 }
5371
5372 /* ERRMSG - only useful if STAT is present. */
5373 if (code->expr1 && code->expr2)
5374 {
5375 const char *msg = "Attempt to allocate an allocated object";
5376 tree slen, dlen, errmsg_str;
5377 stmtblock_t errmsg_block;
5378
5379 gfc_init_block (&errmsg_block);
5380
5381 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5382 gfc_add_modify (&errmsg_block, errmsg_str,
5383 gfc_build_addr_expr (pchar_type_node,
5384 gfc_build_localized_cstring_const (msg)));
5385
5386 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5387 dlen = gfc_get_expr_charlen (code->expr2);
5388 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5389 slen);
5390
5391 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5392 slen, errmsg_str, gfc_default_character_kind);
5393 dlen = gfc_finish_block (&errmsg_block);
5394
5395 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5396 build_int_cst (TREE_TYPE (stat), 0));
5397
5398 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5399
5400 gfc_add_expr_to_block (&block, tmp);
5401 }
5402
5403 /* STAT block. */
5404 if (code->expr1)
5405 {
5406 if (TREE_USED (label_finish))
5407 {
5408 tmp = build1_v (LABEL_EXPR, label_finish);
5409 gfc_add_expr_to_block (&block, tmp);
5410 }
5411
5412 gfc_init_se (&se, NULL);
5413 gfc_conv_expr_lhs (&se, code->expr1);
5414 tmp = convert (TREE_TYPE (se.expr), stat);
5415 gfc_add_modify (&block, se.expr, tmp);
5416 }
5417
5418 gfc_add_block_to_block (&block, &se.post);
5419 gfc_add_block_to_block (&block, &post);
5420
5421 return gfc_finish_block (&block);
5422 }
5423
5424
5425 /* Translate a DEALLOCATE statement. */
5426
5427 tree
5428 gfc_trans_deallocate (gfc_code *code)
5429 {
5430 gfc_se se;
5431 gfc_alloc *al;
5432 tree apstat, pstat, stat, errmsg, errlen, tmp;
5433 tree label_finish, label_errmsg;
5434 stmtblock_t block;
5435
5436 pstat = apstat = stat = errmsg = errlen = tmp = NULL_TREE;
5437 label_finish = label_errmsg = NULL_TREE;
5438
5439 gfc_start_block (&block);
5440
5441 /* Count the number of failed deallocations. If deallocate() was
5442 called with STAT= , then set STAT to the count. If deallocate
5443 was called with ERRMSG, then set ERRMG to a string. */
5444 if (code->expr1)
5445 {
5446 tree gfc_int4_type_node = gfc_get_int_type (4);
5447
5448 stat = gfc_create_var (gfc_int4_type_node, "stat");
5449 pstat = gfc_build_addr_expr (NULL_TREE, stat);
5450
5451 /* GOTO destinations. */
5452 label_errmsg = gfc_build_label_decl (NULL_TREE);
5453 label_finish = gfc_build_label_decl (NULL_TREE);
5454 TREE_USED (label_finish) = 0;
5455 }
5456
5457 /* Set ERRMSG - only needed if STAT is available. */
5458 if (code->expr1 && code->expr2)
5459 {
5460 gfc_init_se (&se, NULL);
5461 se.want_pointer = 1;
5462 gfc_conv_expr_lhs (&se, code->expr2);
5463 errmsg = se.expr;
5464 errlen = se.string_length;
5465 }
5466
5467 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5468 {
5469 gfc_expr *expr = gfc_copy_expr (al->expr);
5470 gcc_assert (expr->expr_type == EXPR_VARIABLE);
5471
5472 if (expr->ts.type == BT_CLASS)
5473 gfc_add_data_component (expr);
5474
5475 gfc_init_se (&se, NULL);
5476 gfc_start_block (&se.pre);
5477
5478 se.want_pointer = 1;
5479 se.descriptor_only = 1;
5480 gfc_conv_expr (&se, expr);
5481
5482 if (expr->rank || gfc_is_coarray (expr))
5483 {
5484 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp
5485 && !gfc_is_finalizable (expr->ts.u.derived, NULL))
5486 {
5487 gfc_ref *ref;
5488 gfc_ref *last = NULL;
5489 for (ref = expr->ref; ref; ref = ref->next)
5490 if (ref->type == REF_COMPONENT)
5491 last = ref;
5492
5493 /* Do not deallocate the components of a derived type
5494 ultimate pointer component. */
5495 if (!(last && last->u.c.component->attr.pointer)
5496 && !(!last && expr->symtree->n.sym->attr.pointer))
5497 {
5498 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
5499 expr->rank);
5500 gfc_add_expr_to_block (&se.pre, tmp);
5501 }
5502 }
5503 tmp = gfc_array_deallocate (se.expr, pstat, errmsg, errlen,
5504 label_finish, expr);
5505 gfc_add_expr_to_block (&se.pre, tmp);
5506 if (al->expr->ts.type == BT_CLASS)
5507 gfc_reset_vptr (&se.pre, al->expr);
5508 }
5509 else
5510 {
5511 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
5512 al->expr, al->expr->ts);
5513 gfc_add_expr_to_block (&se.pre, tmp);
5514
5515 /* Set to zero after deallocation. */
5516 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5517 se.expr,
5518 build_int_cst (TREE_TYPE (se.expr), 0));
5519 gfc_add_expr_to_block (&se.pre, tmp);
5520
5521 if (al->expr->ts.type == BT_CLASS)
5522 gfc_reset_vptr (&se.pre, al->expr);
5523 }
5524
5525 if (code->expr1)
5526 {
5527 tree cond;
5528
5529 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5530 build_int_cst (TREE_TYPE (stat), 0));
5531 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5532 gfc_unlikely (cond, PRED_FORTRAN_FAIL_ALLOC),
5533 build1_v (GOTO_EXPR, label_errmsg),
5534 build_empty_stmt (input_location));
5535 gfc_add_expr_to_block (&se.pre, tmp);
5536 }
5537
5538 tmp = gfc_finish_block (&se.pre);
5539 gfc_add_expr_to_block (&block, tmp);
5540 gfc_free_expr (expr);
5541 }
5542
5543 if (code->expr1)
5544 {
5545 tmp = build1_v (LABEL_EXPR, label_errmsg);
5546 gfc_add_expr_to_block (&block, tmp);
5547 }
5548
5549 /* Set ERRMSG - only needed if STAT is available. */
5550 if (code->expr1 && code->expr2)
5551 {
5552 const char *msg = "Attempt to deallocate an unallocated object";
5553 stmtblock_t errmsg_block;
5554 tree errmsg_str, slen, dlen, cond;
5555
5556 gfc_init_block (&errmsg_block);
5557
5558 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5559 gfc_add_modify (&errmsg_block, errmsg_str,
5560 gfc_build_addr_expr (pchar_type_node,
5561 gfc_build_localized_cstring_const (msg)));
5562 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5563 dlen = gfc_get_expr_charlen (code->expr2);
5564
5565 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5566 slen, errmsg_str, gfc_default_character_kind);
5567 tmp = gfc_finish_block (&errmsg_block);
5568
5569 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5570 build_int_cst (TREE_TYPE (stat), 0));
5571 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5572 gfc_unlikely (cond, PRED_FORTRAN_FAIL_ALLOC), tmp,
5573 build_empty_stmt (input_location));
5574
5575 gfc_add_expr_to_block (&block, tmp);
5576 }
5577
5578 if (code->expr1 && TREE_USED (label_finish))
5579 {
5580 tmp = build1_v (LABEL_EXPR, label_finish);
5581 gfc_add_expr_to_block (&block, tmp);
5582 }
5583
5584 /* Set STAT. */
5585 if (code->expr1)
5586 {
5587 gfc_init_se (&se, NULL);
5588 gfc_conv_expr_lhs (&se, code->expr1);
5589 tmp = convert (TREE_TYPE (se.expr), stat);
5590 gfc_add_modify (&block, se.expr, tmp);
5591 }
5592
5593 return gfc_finish_block (&block);
5594 }
5595
5596 #include "gt-fortran-trans-stmt.h"