c8343f3971b598f113ad2a1e77aee8ae85254675
[gcc.git] / gcc / fortran / trans-stmt.c
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "tree-gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
41
42 typedef struct iter_info
43 {
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
49 }
50 iter_info;
51
52 typedef struct forall_info
53 {
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
60 }
61 forall_info;
62
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
65
66 /* Translate a F95 label number to a LABEL_EXPR. */
67
68 tree
69 gfc_trans_label_here (gfc_code * code)
70 {
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72 }
73
74
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
78
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
81 {
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
90 }
91
92 /* Translate a label assignment statement. */
93
94 tree
95 gfc_trans_label_assign (gfc_code * code)
96 {
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 char *label_str;
103 int label_len;
104
105 /* Start a new block. */
106 gfc_init_se (&se, NULL);
107 gfc_start_block (&se.pre);
108 gfc_conv_label_variable (&se, code->expr);
109
110 len = GFC_DECL_STRING_LEN (se.expr);
111 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112
113 label_tree = gfc_get_label_decl (code->label);
114
115 if (code->label->defined == ST_LABEL_TARGET)
116 {
117 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
118 len_tree = integer_minus_one_node;
119 }
120 else
121 {
122 label_str = code->label->format->value.character.string;
123 label_len = code->label->format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_string_const (label_len + 1, label_str);
126 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 }
128
129 gfc_add_modify_expr (&se.pre, len, len_tree);
130 gfc_add_modify_expr (&se.pre, addr, label_tree);
131
132 return gfc_finish_block (&se.pre);
133 }
134
135 /* Translate a GOTO statement. */
136
137 tree
138 gfc_trans_goto (gfc_code * code)
139 {
140 locus loc = code->loc;
141 tree assigned_goto;
142 tree target;
143 tree tmp;
144 gfc_se se;
145
146 if (code->label != NULL)
147 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
148
149 /* ASSIGNED GOTO. */
150 gfc_init_se (&se, NULL);
151 gfc_start_block (&se.pre);
152 gfc_conv_label_variable (&se, code->expr);
153 tmp = GFC_DECL_STRING_LEN (se.expr);
154 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
155 build_int_cst (TREE_TYPE (tmp), -1));
156 gfc_trans_runtime_check (tmp, &se.pre, &loc,
157 "Assigned label is not a target label");
158
159 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160
161 code = code->block;
162 if (code == NULL)
163 {
164 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
165 gfc_add_expr_to_block (&se.pre, target);
166 return gfc_finish_block (&se.pre);
167 }
168
169 /* Check the label list. */
170 do
171 {
172 target = gfc_get_label_decl (code->label);
173 tmp = gfc_build_addr_expr (pvoid_type_node, target);
174 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
175 tmp = build3_v (COND_EXPR, tmp,
176 build1 (GOTO_EXPR, void_type_node, target),
177 build_empty_stmt ());
178 gfc_add_expr_to_block (&se.pre, tmp);
179 code = code->block;
180 }
181 while (code != NULL);
182 gfc_trans_runtime_check (boolean_true_node, &se.pre, &loc,
183 "Assigned label is not in the list");
184
185 return gfc_finish_block (&se.pre);
186 }
187
188
189 /* Translate an ENTRY statement. Just adds a label for this entry point. */
190 tree
191 gfc_trans_entry (gfc_code * code)
192 {
193 return build1_v (LABEL_EXPR, code->ext.entry->label);
194 }
195
196
197 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
198 elemental subroutines. Make temporaries for output arguments if any such
199 dependencies are found. Output arguments are chosen because internal_unpack
200 can be used, as is, to copy the result back to the variable. */
201 static void
202 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
203 gfc_symbol * sym, gfc_actual_arglist * arg)
204 {
205 gfc_actual_arglist *arg0;
206 gfc_expr *e;
207 gfc_formal_arglist *formal;
208 gfc_loopinfo tmp_loop;
209 gfc_se parmse;
210 gfc_ss *ss;
211 gfc_ss_info *info;
212 gfc_symbol *fsym;
213 int n;
214 stmtblock_t block;
215 tree data;
216 tree offset;
217 tree size;
218 tree tmp;
219
220 if (loopse->ss == NULL)
221 return;
222
223 ss = loopse->ss;
224 arg0 = arg;
225 formal = sym->formal;
226
227 /* Loop over all the arguments testing for dependencies. */
228 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
229 {
230 e = arg->expr;
231 if (e == NULL)
232 continue;
233
234 /* Obtain the info structure for the current argument. */
235 info = NULL;
236 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
237 {
238 if (ss->expr != e)
239 continue;
240 info = &ss->data.info;
241 break;
242 }
243
244 /* If there is a dependency, create a temporary and use it
245 instead of the variable. */
246 fsym = formal ? formal->sym : NULL;
247 if (e->expr_type == EXPR_VARIABLE
248 && e->rank && fsym
249 && fsym->attr.intent != INTENT_IN
250 && gfc_check_fncall_dependency (e, fsym->attr.intent,
251 sym, arg0))
252 {
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
257 {
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
261 }
262
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
271 false, true, false);
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
276
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
282
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
286 {
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
288 gfc_rank_cst[n]);
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
292 offset, tmp);
293 }
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
296
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
300
301 gfc_add_block_to_block (&se->post, &parmse.post);
302 }
303 }
304 }
305
306
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
308
309 tree
310 gfc_trans_call (gfc_code * code, bool dependency_check)
311 {
312 gfc_se se;
313 gfc_ss * ss;
314 int has_alternate_specifier;
315
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
320
321 gcc_assert (code->resolved_sym);
322
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
326
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
329 {
330
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
334 NULL_TREE);
335
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
338
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
341 {
342 gfc_code *select_code;
343 gfc_symbol *sym;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
351 }
352 else
353 gfc_add_expr_to_block (&se.pre, se.expr);
354
355 gfc_add_block_to_block (&se.pre, &se.post);
356 }
357
358 else
359 {
360 /* An elemental subroutine call with array valued arguments has
361 to be scalarized. */
362 gfc_loopinfo loop;
363 stmtblock_t body;
364 stmtblock_t block;
365 gfc_se loopse;
366
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
370
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
375
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
379
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
382 loopse.ss = ss;
383
384 /* For operator assignment, do dependency checking. */
385 if (dependency_check)
386 {
387 gfc_symbol *sym;
388 sym = code->resolved_sym;
389 gfc_conv_elemental_dependencies (&se, &loopse, sym,
390 code->ext.actual);
391 }
392
393 /* Generate the loop body. */
394 gfc_start_scalarized_body (&loop, &body);
395 gfc_init_block (&block);
396
397 /* Add the subroutine call to the block. */
398 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
399 NULL_TREE);
400 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
401
402 gfc_add_block_to_block (&block, &loopse.pre);
403 gfc_add_block_to_block (&block, &loopse.post);
404
405 /* Finish up the loop block and the loop. */
406 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
407 gfc_trans_scalarizing_loops (&loop, &body);
408 gfc_add_block_to_block (&se.pre, &loop.pre);
409 gfc_add_block_to_block (&se.pre, &loop.post);
410 gfc_add_block_to_block (&se.pre, &se.post);
411 gfc_cleanup_loop (&loop);
412 }
413
414 return gfc_finish_block (&se.pre);
415 }
416
417
418 /* Translate the RETURN statement. */
419
420 tree
421 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
422 {
423 if (code->expr)
424 {
425 gfc_se se;
426 tree tmp;
427 tree result;
428
429 /* If code->expr is not NULL, this return statement must appear
430 in a subroutine and current_fake_result_decl has already
431 been generated. */
432
433 result = gfc_get_fake_result_decl (NULL, 0);
434 if (!result)
435 {
436 gfc_warning ("An alternate return at %L without a * dummy argument",
437 &code->expr->where);
438 return build1_v (GOTO_EXPR, gfc_get_return_label ());
439 }
440
441 /* Start a new block for this statement. */
442 gfc_init_se (&se, NULL);
443 gfc_start_block (&se.pre);
444
445 gfc_conv_expr (&se, code->expr);
446
447 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result,
448 fold_convert (TREE_TYPE (result), se.expr));
449 gfc_add_expr_to_block (&se.pre, tmp);
450
451 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
452 gfc_add_expr_to_block (&se.pre, tmp);
453 gfc_add_block_to_block (&se.pre, &se.post);
454 return gfc_finish_block (&se.pre);
455 }
456 else
457 return build1_v (GOTO_EXPR, gfc_get_return_label ());
458 }
459
460
461 /* Translate the PAUSE statement. We have to translate this statement
462 to a runtime library call. */
463
464 tree
465 gfc_trans_pause (gfc_code * code)
466 {
467 tree gfc_int4_type_node = gfc_get_int_type (4);
468 gfc_se se;
469 tree tmp;
470
471 /* Start a new block for this statement. */
472 gfc_init_se (&se, NULL);
473 gfc_start_block (&se.pre);
474
475
476 if (code->expr == NULL)
477 {
478 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
479 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
480 }
481 else
482 {
483 gfc_conv_expr_reference (&se, code->expr);
484 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
485 se.expr, se.string_length);
486 }
487
488 gfc_add_expr_to_block (&se.pre, tmp);
489
490 gfc_add_block_to_block (&se.pre, &se.post);
491
492 return gfc_finish_block (&se.pre);
493 }
494
495
496 /* Translate the STOP statement. We have to translate this statement
497 to a runtime library call. */
498
499 tree
500 gfc_trans_stop (gfc_code * code)
501 {
502 tree gfc_int4_type_node = gfc_get_int_type (4);
503 gfc_se se;
504 tree tmp;
505
506 /* Start a new block for this statement. */
507 gfc_init_se (&se, NULL);
508 gfc_start_block (&se.pre);
509
510
511 if (code->expr == NULL)
512 {
513 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
514 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
515 }
516 else
517 {
518 gfc_conv_expr_reference (&se, code->expr);
519 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
520 se.expr, se.string_length);
521 }
522
523 gfc_add_expr_to_block (&se.pre, tmp);
524
525 gfc_add_block_to_block (&se.pre, &se.post);
526
527 return gfc_finish_block (&se.pre);
528 }
529
530
531 /* Generate GENERIC for the IF construct. This function also deals with
532 the simple IF statement, because the front end translates the IF
533 statement into an IF construct.
534
535 We translate:
536
537 IF (cond) THEN
538 then_clause
539 ELSEIF (cond2)
540 elseif_clause
541 ELSE
542 else_clause
543 ENDIF
544
545 into:
546
547 pre_cond_s;
548 if (cond_s)
549 {
550 then_clause;
551 }
552 else
553 {
554 pre_cond_s
555 if (cond_s)
556 {
557 elseif_clause
558 }
559 else
560 {
561 else_clause;
562 }
563 }
564
565 where COND_S is the simplified version of the predicate. PRE_COND_S
566 are the pre side-effects produced by the translation of the
567 conditional.
568 We need to build the chain recursively otherwise we run into
569 problems with folding incomplete statements. */
570
571 static tree
572 gfc_trans_if_1 (gfc_code * code)
573 {
574 gfc_se if_se;
575 tree stmt, elsestmt;
576
577 /* Check for an unconditional ELSE clause. */
578 if (!code->expr)
579 return gfc_trans_code (code->next);
580
581 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
582 gfc_init_se (&if_se, NULL);
583 gfc_start_block (&if_se.pre);
584
585 /* Calculate the IF condition expression. */
586 gfc_conv_expr_val (&if_se, code->expr);
587
588 /* Translate the THEN clause. */
589 stmt = gfc_trans_code (code->next);
590
591 /* Translate the ELSE clause. */
592 if (code->block)
593 elsestmt = gfc_trans_if_1 (code->block);
594 else
595 elsestmt = build_empty_stmt ();
596
597 /* Build the condition expression and add it to the condition block. */
598 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
599
600 gfc_add_expr_to_block (&if_se.pre, stmt);
601
602 /* Finish off this statement. */
603 return gfc_finish_block (&if_se.pre);
604 }
605
606 tree
607 gfc_trans_if (gfc_code * code)
608 {
609 /* Ignore the top EXEC_IF, it only announces an IF construct. The
610 actual code we must translate is in code->block. */
611
612 return gfc_trans_if_1 (code->block);
613 }
614
615
616 /* Translate an arithmetic IF expression.
617
618 IF (cond) label1, label2, label3 translates to
619
620 if (cond <= 0)
621 {
622 if (cond < 0)
623 goto label1;
624 else // cond == 0
625 goto label2;
626 }
627 else // cond > 0
628 goto label3;
629
630 An optimized version can be generated in case of equal labels.
631 E.g., if label1 is equal to label2, we can translate it to
632
633 if (cond <= 0)
634 goto label1;
635 else
636 goto label3;
637 */
638
639 tree
640 gfc_trans_arithmetic_if (gfc_code * code)
641 {
642 gfc_se se;
643 tree tmp;
644 tree branch1;
645 tree branch2;
646 tree zero;
647
648 /* Start a new block. */
649 gfc_init_se (&se, NULL);
650 gfc_start_block (&se.pre);
651
652 /* Pre-evaluate COND. */
653 gfc_conv_expr_val (&se, code->expr);
654 se.expr = gfc_evaluate_now (se.expr, &se.pre);
655
656 /* Build something to compare with. */
657 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
658
659 if (code->label->value != code->label2->value)
660 {
661 /* If (cond < 0) take branch1 else take branch2.
662 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
663 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
664 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
665
666 if (code->label->value != code->label3->value)
667 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
668 else
669 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
670
671 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
672 }
673 else
674 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
675
676 if (code->label->value != code->label3->value
677 && code->label2->value != code->label3->value)
678 {
679 /* if (cond <= 0) take branch1 else take branch2. */
680 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
681 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
682 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
683 }
684
685 /* Append the COND_EXPR to the evaluation of COND, and return. */
686 gfc_add_expr_to_block (&se.pre, branch1);
687 return gfc_finish_block (&se.pre);
688 }
689
690
691 /* Translate the simple DO construct. This is where the loop variable has
692 integer type and step +-1. We can't use this in the general case
693 because integer overflow and floating point errors could give incorrect
694 results.
695 We translate a do loop from:
696
697 DO dovar = from, to, step
698 body
699 END DO
700
701 to:
702
703 [Evaluate loop bounds and step]
704 dovar = from;
705 if ((step > 0) ? (dovar <= to) : (dovar => to))
706 {
707 for (;;)
708 {
709 body;
710 cycle_label:
711 cond = (dovar == to);
712 dovar += step;
713 if (cond) goto end_label;
714 }
715 }
716 end_label:
717
718 This helps the optimizers by avoiding the extra induction variable
719 used in the general case. */
720
721 static tree
722 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
723 tree from, tree to, tree step)
724 {
725 stmtblock_t body;
726 tree type;
727 tree cond;
728 tree tmp;
729 tree cycle_label;
730 tree exit_label;
731
732 type = TREE_TYPE (dovar);
733
734 /* Initialize the DO variable: dovar = from. */
735 gfc_add_modify_expr (pblock, dovar, from);
736
737 /* Cycle and exit statements are implemented with gotos. */
738 cycle_label = gfc_build_label_decl (NULL_TREE);
739 exit_label = gfc_build_label_decl (NULL_TREE);
740
741 /* Put the labels where they can be found later. See gfc_trans_do(). */
742 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
743
744 /* Loop body. */
745 gfc_start_block (&body);
746
747 /* Main loop body. */
748 tmp = gfc_trans_code (code->block->next);
749 gfc_add_expr_to_block (&body, tmp);
750
751 /* Label for cycle statements (if needed). */
752 if (TREE_USED (cycle_label))
753 {
754 tmp = build1_v (LABEL_EXPR, cycle_label);
755 gfc_add_expr_to_block (&body, tmp);
756 }
757
758 /* Evaluate the loop condition. */
759 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
760 cond = gfc_evaluate_now (cond, &body);
761
762 /* Increment the loop variable. */
763 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
764 gfc_add_modify_expr (&body, dovar, tmp);
765
766 /* The loop exit. */
767 tmp = build1_v (GOTO_EXPR, exit_label);
768 TREE_USED (exit_label) = 1;
769 tmp = fold_build3 (COND_EXPR, void_type_node,
770 cond, tmp, build_empty_stmt ());
771 gfc_add_expr_to_block (&body, tmp);
772
773 /* Finish the loop body. */
774 tmp = gfc_finish_block (&body);
775 tmp = build1_v (LOOP_EXPR, tmp);
776
777 /* Only execute the loop if the number of iterations is positive. */
778 if (tree_int_cst_sgn (step) > 0)
779 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
780 else
781 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
782 tmp = fold_build3 (COND_EXPR, void_type_node,
783 cond, tmp, build_empty_stmt ());
784 gfc_add_expr_to_block (pblock, tmp);
785
786 /* Add the exit label. */
787 tmp = build1_v (LABEL_EXPR, exit_label);
788 gfc_add_expr_to_block (pblock, tmp);
789
790 return gfc_finish_block (pblock);
791 }
792
793 /* Translate the DO construct. This obviously is one of the most
794 important ones to get right with any compiler, but especially
795 so for Fortran.
796
797 We special case some loop forms as described in gfc_trans_simple_do.
798 For other cases we implement them with a separate loop count,
799 as described in the standard.
800
801 We translate a do loop from:
802
803 DO dovar = from, to, step
804 body
805 END DO
806
807 to:
808
809 [evaluate loop bounds and step]
810 empty = (step > 0 ? to < from : to > from);
811 countm1 = (to - from) / step;
812 dovar = from;
813 if (empty) goto exit_label;
814 for (;;)
815 {
816 body;
817 cycle_label:
818 dovar += step
819 if (countm1 ==0) goto exit_label;
820 countm1--;
821 }
822 exit_label:
823
824 countm1 is an unsigned integer. It is equal to the loop count minus one,
825 because the loop count itself can overflow. */
826
827 tree
828 gfc_trans_do (gfc_code * code)
829 {
830 gfc_se se;
831 tree dovar;
832 tree from;
833 tree to;
834 tree step;
835 tree empty;
836 tree countm1;
837 tree type;
838 tree utype;
839 tree cond;
840 tree cycle_label;
841 tree exit_label;
842 tree tmp;
843 tree pos_step;
844 stmtblock_t block;
845 stmtblock_t body;
846
847 gfc_start_block (&block);
848
849 /* Evaluate all the expressions in the iterator. */
850 gfc_init_se (&se, NULL);
851 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
852 gfc_add_block_to_block (&block, &se.pre);
853 dovar = se.expr;
854 type = TREE_TYPE (dovar);
855
856 gfc_init_se (&se, NULL);
857 gfc_conv_expr_val (&se, code->ext.iterator->start);
858 gfc_add_block_to_block (&block, &se.pre);
859 from = gfc_evaluate_now (se.expr, &block);
860
861 gfc_init_se (&se, NULL);
862 gfc_conv_expr_val (&se, code->ext.iterator->end);
863 gfc_add_block_to_block (&block, &se.pre);
864 to = gfc_evaluate_now (se.expr, &block);
865
866 gfc_init_se (&se, NULL);
867 gfc_conv_expr_val (&se, code->ext.iterator->step);
868 gfc_add_block_to_block (&block, &se.pre);
869 step = gfc_evaluate_now (se.expr, &block);
870
871 /* Special case simple loops. */
872 if (TREE_CODE (type) == INTEGER_TYPE
873 && (integer_onep (step)
874 || tree_int_cst_equal (step, integer_minus_one_node)))
875 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
876
877 /* We need a special check for empty loops:
878 empty = (step > 0 ? to < from : to > from); */
879 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
880 fold_convert (type, integer_zero_node));
881 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
882 fold_build2 (LT_EXPR, boolean_type_node, to, from),
883 fold_build2 (GT_EXPR, boolean_type_node, to, from));
884
885 /* Initialize loop count. This code is executed before we enter the
886 loop body. We generate: countm1 = abs(to - from) / abs(step). */
887 if (TREE_CODE (type) == INTEGER_TYPE)
888 {
889 tree ustep;
890
891 utype = unsigned_type_for (type);
892
893 /* tmp = abs(to - from) / abs(step) */
894 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
895 tmp = fold_build3 (COND_EXPR, type, pos_step,
896 fold_build2 (MINUS_EXPR, type, to, from),
897 fold_build2 (MINUS_EXPR, type, from, to));
898 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
899 ustep);
900 }
901 else
902 {
903 /* TODO: We could use the same width as the real type.
904 This would probably cause more problems that it solves
905 when we implement "long double" types. */
906 utype = unsigned_type_for (gfc_array_index_type);
907 tmp = fold_build2 (MINUS_EXPR, type, to, from);
908 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
909 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
910 }
911 countm1 = gfc_create_var (utype, "countm1");
912 gfc_add_modify_expr (&block, countm1, tmp);
913
914 /* Cycle and exit statements are implemented with gotos. */
915 cycle_label = gfc_build_label_decl (NULL_TREE);
916 exit_label = gfc_build_label_decl (NULL_TREE);
917 TREE_USED (exit_label) = 1;
918
919 /* Initialize the DO variable: dovar = from. */
920 gfc_add_modify_expr (&block, dovar, from);
921
922 /* If the loop is empty, go directly to the exit label. */
923 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
924 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
925 gfc_add_expr_to_block (&block, tmp);
926
927 /* Loop body. */
928 gfc_start_block (&body);
929
930 /* Put these labels where they can be found later. We put the
931 labels in a TREE_LIST node (because TREE_CHAIN is already
932 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
933 label in TREE_VALUE (backend_decl). */
934
935 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
936
937 /* Main loop body. */
938 tmp = gfc_trans_code (code->block->next);
939 gfc_add_expr_to_block (&body, tmp);
940
941 /* Label for cycle statements (if needed). */
942 if (TREE_USED (cycle_label))
943 {
944 tmp = build1_v (LABEL_EXPR, cycle_label);
945 gfc_add_expr_to_block (&body, tmp);
946 }
947
948 /* Increment the loop variable. */
949 tmp = build2 (PLUS_EXPR, type, dovar, step);
950 gfc_add_modify_expr (&body, dovar, tmp);
951
952 /* End with the loop condition. Loop until countm1 == 0. */
953 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
954 build_int_cst (utype, 0));
955 tmp = build1_v (GOTO_EXPR, exit_label);
956 tmp = fold_build3 (COND_EXPR, void_type_node,
957 cond, tmp, build_empty_stmt ());
958 gfc_add_expr_to_block (&body, tmp);
959
960 /* Decrement the loop count. */
961 tmp = build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
962 gfc_add_modify_expr (&body, countm1, tmp);
963
964 /* End of loop body. */
965 tmp = gfc_finish_block (&body);
966
967 /* The for loop itself. */
968 tmp = build1_v (LOOP_EXPR, tmp);
969 gfc_add_expr_to_block (&block, tmp);
970
971 /* Add the exit label. */
972 tmp = build1_v (LABEL_EXPR, exit_label);
973 gfc_add_expr_to_block (&block, tmp);
974
975 return gfc_finish_block (&block);
976 }
977
978
979 /* Translate the DO WHILE construct.
980
981 We translate
982
983 DO WHILE (cond)
984 body
985 END DO
986
987 to:
988
989 for ( ; ; )
990 {
991 pre_cond;
992 if (! cond) goto exit_label;
993 body;
994 cycle_label:
995 }
996 exit_label:
997
998 Because the evaluation of the exit condition `cond' may have side
999 effects, we can't do much for empty loop bodies. The backend optimizers
1000 should be smart enough to eliminate any dead loops. */
1001
1002 tree
1003 gfc_trans_do_while (gfc_code * code)
1004 {
1005 gfc_se cond;
1006 tree tmp;
1007 tree cycle_label;
1008 tree exit_label;
1009 stmtblock_t block;
1010
1011 /* Everything we build here is part of the loop body. */
1012 gfc_start_block (&block);
1013
1014 /* Cycle and exit statements are implemented with gotos. */
1015 cycle_label = gfc_build_label_decl (NULL_TREE);
1016 exit_label = gfc_build_label_decl (NULL_TREE);
1017
1018 /* Put the labels where they can be found later. See gfc_trans_do(). */
1019 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1020
1021 /* Create a GIMPLE version of the exit condition. */
1022 gfc_init_se (&cond, NULL);
1023 gfc_conv_expr_val (&cond, code->expr);
1024 gfc_add_block_to_block (&block, &cond.pre);
1025 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1026
1027 /* Build "IF (! cond) GOTO exit_label". */
1028 tmp = build1_v (GOTO_EXPR, exit_label);
1029 TREE_USED (exit_label) = 1;
1030 tmp = fold_build3 (COND_EXPR, void_type_node,
1031 cond.expr, tmp, build_empty_stmt ());
1032 gfc_add_expr_to_block (&block, tmp);
1033
1034 /* The main body of the loop. */
1035 tmp = gfc_trans_code (code->block->next);
1036 gfc_add_expr_to_block (&block, tmp);
1037
1038 /* Label for cycle statements (if needed). */
1039 if (TREE_USED (cycle_label))
1040 {
1041 tmp = build1_v (LABEL_EXPR, cycle_label);
1042 gfc_add_expr_to_block (&block, tmp);
1043 }
1044
1045 /* End of loop body. */
1046 tmp = gfc_finish_block (&block);
1047
1048 gfc_init_block (&block);
1049 /* Build the loop. */
1050 tmp = build1_v (LOOP_EXPR, tmp);
1051 gfc_add_expr_to_block (&block, tmp);
1052
1053 /* Add the exit label. */
1054 tmp = build1_v (LABEL_EXPR, exit_label);
1055 gfc_add_expr_to_block (&block, tmp);
1056
1057 return gfc_finish_block (&block);
1058 }
1059
1060
1061 /* Translate the SELECT CASE construct for INTEGER case expressions,
1062 without killing all potential optimizations. The problem is that
1063 Fortran allows unbounded cases, but the back-end does not, so we
1064 need to intercept those before we enter the equivalent SWITCH_EXPR
1065 we can build.
1066
1067 For example, we translate this,
1068
1069 SELECT CASE (expr)
1070 CASE (:100,101,105:115)
1071 block_1
1072 CASE (190:199,200:)
1073 block_2
1074 CASE (300)
1075 block_3
1076 CASE DEFAULT
1077 block_4
1078 END SELECT
1079
1080 to the GENERIC equivalent,
1081
1082 switch (expr)
1083 {
1084 case (minimum value for typeof(expr) ... 100:
1085 case 101:
1086 case 105 ... 114:
1087 block1:
1088 goto end_label;
1089
1090 case 200 ... (maximum value for typeof(expr):
1091 case 190 ... 199:
1092 block2;
1093 goto end_label;
1094
1095 case 300:
1096 block_3;
1097 goto end_label;
1098
1099 default:
1100 block_4;
1101 goto end_label;
1102 }
1103
1104 end_label: */
1105
1106 static tree
1107 gfc_trans_integer_select (gfc_code * code)
1108 {
1109 gfc_code *c;
1110 gfc_case *cp;
1111 tree end_label;
1112 tree tmp;
1113 gfc_se se;
1114 stmtblock_t block;
1115 stmtblock_t body;
1116
1117 gfc_start_block (&block);
1118
1119 /* Calculate the switch expression. */
1120 gfc_init_se (&se, NULL);
1121 gfc_conv_expr_val (&se, code->expr);
1122 gfc_add_block_to_block (&block, &se.pre);
1123
1124 end_label = gfc_build_label_decl (NULL_TREE);
1125
1126 gfc_init_block (&body);
1127
1128 for (c = code->block; c; c = c->block)
1129 {
1130 for (cp = c->ext.case_list; cp; cp = cp->next)
1131 {
1132 tree low, high;
1133 tree label;
1134
1135 /* Assume it's the default case. */
1136 low = high = NULL_TREE;
1137
1138 if (cp->low)
1139 {
1140 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1141 cp->low->ts.kind);
1142
1143 /* If there's only a lower bound, set the high bound to the
1144 maximum value of the case expression. */
1145 if (!cp->high)
1146 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1147 }
1148
1149 if (cp->high)
1150 {
1151 /* Three cases are possible here:
1152
1153 1) There is no lower bound, e.g. CASE (:N).
1154 2) There is a lower bound .NE. high bound, that is
1155 a case range, e.g. CASE (N:M) where M>N (we make
1156 sure that M>N during type resolution).
1157 3) There is a lower bound, and it has the same value
1158 as the high bound, e.g. CASE (N:N). This is our
1159 internal representation of CASE(N).
1160
1161 In the first and second case, we need to set a value for
1162 high. In the third case, we don't because the GCC middle
1163 end represents a single case value by just letting high be
1164 a NULL_TREE. We can't do that because we need to be able
1165 to represent unbounded cases. */
1166
1167 if (!cp->low
1168 || (cp->low
1169 && mpz_cmp (cp->low->value.integer,
1170 cp->high->value.integer) != 0))
1171 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1172 cp->high->ts.kind);
1173
1174 /* Unbounded case. */
1175 if (!cp->low)
1176 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1177 }
1178
1179 /* Build a label. */
1180 label = gfc_build_label_decl (NULL_TREE);
1181
1182 /* Add this case label.
1183 Add parameter 'label', make it match GCC backend. */
1184 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1185 gfc_add_expr_to_block (&body, tmp);
1186 }
1187
1188 /* Add the statements for this case. */
1189 tmp = gfc_trans_code (c->next);
1190 gfc_add_expr_to_block (&body, tmp);
1191
1192 /* Break to the end of the construct. */
1193 tmp = build1_v (GOTO_EXPR, end_label);
1194 gfc_add_expr_to_block (&body, tmp);
1195 }
1196
1197 tmp = gfc_finish_block (&body);
1198 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1199 gfc_add_expr_to_block (&block, tmp);
1200
1201 tmp = build1_v (LABEL_EXPR, end_label);
1202 gfc_add_expr_to_block (&block, tmp);
1203
1204 return gfc_finish_block (&block);
1205 }
1206
1207
1208 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1209
1210 There are only two cases possible here, even though the standard
1211 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1212 .FALSE., and DEFAULT.
1213
1214 We never generate more than two blocks here. Instead, we always
1215 try to eliminate the DEFAULT case. This way, we can translate this
1216 kind of SELECT construct to a simple
1217
1218 if {} else {};
1219
1220 expression in GENERIC. */
1221
1222 static tree
1223 gfc_trans_logical_select (gfc_code * code)
1224 {
1225 gfc_code *c;
1226 gfc_code *t, *f, *d;
1227 gfc_case *cp;
1228 gfc_se se;
1229 stmtblock_t block;
1230
1231 /* Assume we don't have any cases at all. */
1232 t = f = d = NULL;
1233
1234 /* Now see which ones we actually do have. We can have at most two
1235 cases in a single case list: one for .TRUE. and one for .FALSE.
1236 The default case is always separate. If the cases for .TRUE. and
1237 .FALSE. are in the same case list, the block for that case list
1238 always executed, and we don't generate code a COND_EXPR. */
1239 for (c = code->block; c; c = c->block)
1240 {
1241 for (cp = c->ext.case_list; cp; cp = cp->next)
1242 {
1243 if (cp->low)
1244 {
1245 if (cp->low->value.logical == 0) /* .FALSE. */
1246 f = c;
1247 else /* if (cp->value.logical != 0), thus .TRUE. */
1248 t = c;
1249 }
1250 else
1251 d = c;
1252 }
1253 }
1254
1255 /* Start a new block. */
1256 gfc_start_block (&block);
1257
1258 /* Calculate the switch expression. We always need to do this
1259 because it may have side effects. */
1260 gfc_init_se (&se, NULL);
1261 gfc_conv_expr_val (&se, code->expr);
1262 gfc_add_block_to_block (&block, &se.pre);
1263
1264 if (t == f && t != NULL)
1265 {
1266 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1267 translate the code for these cases, append it to the current
1268 block. */
1269 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1270 }
1271 else
1272 {
1273 tree true_tree, false_tree, stmt;
1274
1275 true_tree = build_empty_stmt ();
1276 false_tree = build_empty_stmt ();
1277
1278 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1279 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1280 make the missing case the default case. */
1281 if (t != NULL && f != NULL)
1282 d = NULL;
1283 else if (d != NULL)
1284 {
1285 if (t == NULL)
1286 t = d;
1287 else
1288 f = d;
1289 }
1290
1291 /* Translate the code for each of these blocks, and append it to
1292 the current block. */
1293 if (t != NULL)
1294 true_tree = gfc_trans_code (t->next);
1295
1296 if (f != NULL)
1297 false_tree = gfc_trans_code (f->next);
1298
1299 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1300 true_tree, false_tree);
1301 gfc_add_expr_to_block (&block, stmt);
1302 }
1303
1304 return gfc_finish_block (&block);
1305 }
1306
1307
1308 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1309 Instead of generating compares and jumps, it is far simpler to
1310 generate a data structure describing the cases in order and call a
1311 library subroutine that locates the right case.
1312 This is particularly true because this is the only case where we
1313 might have to dispose of a temporary.
1314 The library subroutine returns a pointer to jump to or NULL if no
1315 branches are to be taken. */
1316
1317 static tree
1318 gfc_trans_character_select (gfc_code *code)
1319 {
1320 tree init, node, end_label, tmp, type, case_num, label;
1321 stmtblock_t block, body;
1322 gfc_case *cp, *d;
1323 gfc_code *c;
1324 gfc_se se;
1325 int n;
1326
1327 static tree select_struct;
1328 static tree ss_string1, ss_string1_len;
1329 static tree ss_string2, ss_string2_len;
1330 static tree ss_target;
1331
1332 if (select_struct == NULL)
1333 {
1334 tree gfc_int4_type_node = gfc_get_int_type (4);
1335
1336 select_struct = make_node (RECORD_TYPE);
1337 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1338
1339 #undef ADD_FIELD
1340 #define ADD_FIELD(NAME, TYPE) \
1341 ss_##NAME = gfc_add_field_to_struct \
1342 (&(TYPE_FIELDS (select_struct)), select_struct, \
1343 get_identifier (stringize(NAME)), TYPE)
1344
1345 ADD_FIELD (string1, pchar_type_node);
1346 ADD_FIELD (string1_len, gfc_int4_type_node);
1347
1348 ADD_FIELD (string2, pchar_type_node);
1349 ADD_FIELD (string2_len, gfc_int4_type_node);
1350
1351 ADD_FIELD (target, integer_type_node);
1352 #undef ADD_FIELD
1353
1354 gfc_finish_type (select_struct);
1355 }
1356
1357 cp = code->block->ext.case_list;
1358 while (cp->left != NULL)
1359 cp = cp->left;
1360
1361 n = 0;
1362 for (d = cp; d; d = d->right)
1363 d->n = n++;
1364
1365 end_label = gfc_build_label_decl (NULL_TREE);
1366
1367 /* Generate the body */
1368 gfc_start_block (&block);
1369 gfc_init_block (&body);
1370
1371 for (c = code->block; c; c = c->block)
1372 {
1373 for (d = c->ext.case_list; d; d = d->next)
1374 {
1375 label = gfc_build_label_decl (NULL_TREE);
1376 tmp = build3 (CASE_LABEL_EXPR, void_type_node,
1377 build_int_cst (NULL_TREE, d->n),
1378 build_int_cst (NULL_TREE, d->n), label);
1379 gfc_add_expr_to_block (&body, tmp);
1380 }
1381
1382 tmp = gfc_trans_code (c->next);
1383 gfc_add_expr_to_block (&body, tmp);
1384
1385 tmp = build1_v (GOTO_EXPR, end_label);
1386 gfc_add_expr_to_block (&body, tmp);
1387 }
1388
1389 /* Generate the structure describing the branches */
1390 init = NULL_TREE;
1391
1392 for(d = cp; d; d = d->right)
1393 {
1394 node = NULL_TREE;
1395
1396 gfc_init_se (&se, NULL);
1397
1398 if (d->low == NULL)
1399 {
1400 node = tree_cons (ss_string1, null_pointer_node, node);
1401 node = tree_cons (ss_string1_len, integer_zero_node, node);
1402 }
1403 else
1404 {
1405 gfc_conv_expr_reference (&se, d->low);
1406
1407 node = tree_cons (ss_string1, se.expr, node);
1408 node = tree_cons (ss_string1_len, se.string_length, node);
1409 }
1410
1411 if (d->high == NULL)
1412 {
1413 node = tree_cons (ss_string2, null_pointer_node, node);
1414 node = tree_cons (ss_string2_len, integer_zero_node, node);
1415 }
1416 else
1417 {
1418 gfc_init_se (&se, NULL);
1419 gfc_conv_expr_reference (&se, d->high);
1420
1421 node = tree_cons (ss_string2, se.expr, node);
1422 node = tree_cons (ss_string2_len, se.string_length, node);
1423 }
1424
1425 node = tree_cons (ss_target, build_int_cst (integer_type_node, d->n),
1426 node);
1427
1428 tmp = build_constructor_from_list (select_struct, nreverse (node));
1429 init = tree_cons (NULL_TREE, tmp, init);
1430 }
1431
1432 type = build_array_type (select_struct, build_index_type
1433 (build_int_cst (NULL_TREE, n - 1)));
1434
1435 init = build_constructor_from_list (type, nreverse(init));
1436 TREE_CONSTANT (init) = 1;
1437 TREE_INVARIANT (init) = 1;
1438 TREE_STATIC (init) = 1;
1439 /* Create a static variable to hold the jump table. */
1440 tmp = gfc_create_var (type, "jumptable");
1441 TREE_CONSTANT (tmp) = 1;
1442 TREE_INVARIANT (tmp) = 1;
1443 TREE_STATIC (tmp) = 1;
1444 TREE_READONLY (tmp) = 1;
1445 DECL_INITIAL (tmp) = init;
1446 init = tmp;
1447
1448 /* Build the library call */
1449 init = gfc_build_addr_expr (pvoid_type_node, init);
1450
1451 gfc_init_se (&se, NULL);
1452 gfc_conv_expr_reference (&se, code->expr);
1453
1454 gfc_add_block_to_block (&block, &se.pre);
1455
1456 tmp = build_call_expr (gfor_fndecl_select_string, 4, init,
1457 build_int_cst (NULL_TREE, n), se.expr,
1458 se.string_length);
1459 case_num = gfc_create_var (integer_type_node, "case_num");
1460 gfc_add_modify_expr (&block, case_num, tmp);
1461
1462 gfc_add_block_to_block (&block, &se.post);
1463
1464 tmp = gfc_finish_block (&body);
1465 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1466 gfc_add_expr_to_block (&block, tmp);
1467
1468 tmp = build1_v (LABEL_EXPR, end_label);
1469 gfc_add_expr_to_block (&block, tmp);
1470
1471 return gfc_finish_block (&block);
1472 }
1473
1474
1475 /* Translate the three variants of the SELECT CASE construct.
1476
1477 SELECT CASEs with INTEGER case expressions can be translated to an
1478 equivalent GENERIC switch statement, and for LOGICAL case
1479 expressions we build one or two if-else compares.
1480
1481 SELECT CASEs with CHARACTER case expressions are a whole different
1482 story, because they don't exist in GENERIC. So we sort them and
1483 do a binary search at runtime.
1484
1485 Fortran has no BREAK statement, and it does not allow jumps from
1486 one case block to another. That makes things a lot easier for
1487 the optimizers. */
1488
1489 tree
1490 gfc_trans_select (gfc_code * code)
1491 {
1492 gcc_assert (code && code->expr);
1493
1494 /* Empty SELECT constructs are legal. */
1495 if (code->block == NULL)
1496 return build_empty_stmt ();
1497
1498 /* Select the correct translation function. */
1499 switch (code->expr->ts.type)
1500 {
1501 case BT_LOGICAL: return gfc_trans_logical_select (code);
1502 case BT_INTEGER: return gfc_trans_integer_select (code);
1503 case BT_CHARACTER: return gfc_trans_character_select (code);
1504 default:
1505 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1506 /* Not reached */
1507 }
1508 }
1509
1510
1511 /* Traversal function to substitute a replacement symtree if the symbol
1512 in the expression is the same as that passed. f == 2 signals that
1513 that variable itself is not to be checked - only the references.
1514 This group of functions is used when the variable expression in a
1515 FORALL assignment has internal references. For example:
1516 FORALL (i = 1:4) p(p(i)) = i
1517 The only recourse here is to store a copy of 'p' for the index
1518 expression. */
1519
1520 static gfc_symtree *new_symtree;
1521 static gfc_symtree *old_symtree;
1522
1523 static bool
1524 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1525 {
1526 if (expr->expr_type != EXPR_VARIABLE)
1527 return false;
1528
1529 if (*f == 2)
1530 *f = 1;
1531 else if (expr->symtree->n.sym == sym)
1532 expr->symtree = new_symtree;
1533
1534 return false;
1535 }
1536
1537 static void
1538 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1539 {
1540 gfc_traverse_expr (e, sym, forall_replace, f);
1541 }
1542
1543 static bool
1544 forall_restore (gfc_expr *expr,
1545 gfc_symbol *sym ATTRIBUTE_UNUSED,
1546 int *f ATTRIBUTE_UNUSED)
1547 {
1548 if (expr->expr_type != EXPR_VARIABLE)
1549 return false;
1550
1551 if (expr->symtree == new_symtree)
1552 expr->symtree = old_symtree;
1553
1554 return false;
1555 }
1556
1557 static void
1558 forall_restore_symtree (gfc_expr *e)
1559 {
1560 gfc_traverse_expr (e, NULL, forall_restore, 0);
1561 }
1562
1563 static void
1564 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1565 {
1566 gfc_se tse;
1567 gfc_se rse;
1568 gfc_expr *e;
1569 gfc_symbol *new_sym;
1570 gfc_symbol *old_sym;
1571 gfc_symtree *root;
1572 tree tmp;
1573
1574 /* Build a copy of the lvalue. */
1575 old_symtree = c->expr->symtree;
1576 old_sym = old_symtree->n.sym;
1577 e = gfc_lval_expr_from_sym (old_sym);
1578 if (old_sym->attr.dimension)
1579 {
1580 gfc_init_se (&tse, NULL);
1581 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1582 gfc_add_block_to_block (pre, &tse.pre);
1583 gfc_add_block_to_block (post, &tse.post);
1584 tse.expr = build_fold_indirect_ref (tse.expr);
1585
1586 if (e->ts.type != BT_CHARACTER)
1587 {
1588 /* Use the variable offset for the temporary. */
1589 tmp = gfc_conv_descriptor_offset (tse.expr);
1590 gfc_add_modify_expr (pre, tmp,
1591 gfc_conv_array_offset (old_sym->backend_decl));
1592 }
1593 }
1594 else
1595 {
1596 gfc_init_se (&tse, NULL);
1597 gfc_init_se (&rse, NULL);
1598 gfc_conv_expr (&rse, e);
1599 if (e->ts.type == BT_CHARACTER)
1600 {
1601 tse.string_length = rse.string_length;
1602 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1603 tse.string_length);
1604 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1605 rse.string_length);
1606 gfc_add_block_to_block (pre, &tse.pre);
1607 gfc_add_block_to_block (post, &tse.post);
1608 }
1609 else
1610 {
1611 tmp = gfc_typenode_for_spec (&e->ts);
1612 tse.expr = gfc_create_var (tmp, "temp");
1613 }
1614
1615 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1616 e->expr_type == EXPR_VARIABLE);
1617 gfc_add_expr_to_block (pre, tmp);
1618 }
1619 gfc_free_expr (e);
1620
1621 /* Create a new symbol to represent the lvalue. */
1622 new_sym = gfc_new_symbol (old_sym->name, NULL);
1623 new_sym->ts = old_sym->ts;
1624 new_sym->attr.referenced = 1;
1625 new_sym->attr.dimension = old_sym->attr.dimension;
1626 new_sym->attr.flavor = old_sym->attr.flavor;
1627
1628 /* Use the temporary as the backend_decl. */
1629 new_sym->backend_decl = tse.expr;
1630
1631 /* Create a fake symtree for it. */
1632 root = NULL;
1633 new_symtree = gfc_new_symtree (&root, old_sym->name);
1634 new_symtree->n.sym = new_sym;
1635 gcc_assert (new_symtree == root);
1636
1637 /* Go through the expression reference replacing the old_symtree
1638 with the new. */
1639 forall_replace_symtree (c->expr, old_sym, 2);
1640
1641 /* Now we have made this temporary, we might as well use it for
1642 the right hand side. */
1643 forall_replace_symtree (c->expr2, old_sym, 1);
1644 }
1645
1646
1647 /* Handles dependencies in forall assignments. */
1648 static int
1649 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1650 {
1651 gfc_ref *lref;
1652 gfc_ref *rref;
1653 int need_temp;
1654 gfc_symbol *lsym;
1655
1656 lsym = c->expr->symtree->n.sym;
1657 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1658
1659 /* Now check for dependencies within the 'variable'
1660 expression itself. These are treated by making a complete
1661 copy of variable and changing all the references to it
1662 point to the copy instead. Note that the shallow copy of
1663 the variable will not suffice for derived types with
1664 pointer components. We therefore leave these to their
1665 own devices. */
1666 if (lsym->ts.type == BT_DERIVED
1667 && lsym->ts.derived->attr.pointer_comp)
1668 return need_temp;
1669
1670 new_symtree = NULL;
1671 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1672 {
1673 forall_make_variable_temp (c, pre, post);
1674 need_temp = 0;
1675 }
1676
1677 /* Substrings with dependencies are treated in the same
1678 way. */
1679 if (c->expr->ts.type == BT_CHARACTER
1680 && c->expr->ref
1681 && c->expr2->expr_type == EXPR_VARIABLE
1682 && lsym == c->expr2->symtree->n.sym)
1683 {
1684 for (lref = c->expr->ref; lref; lref = lref->next)
1685 if (lref->type == REF_SUBSTRING)
1686 break;
1687 for (rref = c->expr2->ref; rref; rref = rref->next)
1688 if (rref->type == REF_SUBSTRING)
1689 break;
1690
1691 if (rref && lref
1692 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1693 {
1694 forall_make_variable_temp (c, pre, post);
1695 need_temp = 0;
1696 }
1697 }
1698 return need_temp;
1699 }
1700
1701
1702 static void
1703 cleanup_forall_symtrees (gfc_code *c)
1704 {
1705 forall_restore_symtree (c->expr);
1706 forall_restore_symtree (c->expr2);
1707 gfc_free (new_symtree->n.sym);
1708 gfc_free (new_symtree);
1709 }
1710
1711
1712 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1713 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1714 indicates whether we should generate code to test the FORALLs mask
1715 array. OUTER is the loop header to be used for initializing mask
1716 indices.
1717
1718 The generated loop format is:
1719 count = (end - start + step) / step
1720 loopvar = start
1721 while (1)
1722 {
1723 if (count <=0 )
1724 goto end_of_loop
1725 <body>
1726 loopvar += step
1727 count --
1728 }
1729 end_of_loop: */
1730
1731 static tree
1732 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1733 int mask_flag, stmtblock_t *outer)
1734 {
1735 int n, nvar;
1736 tree tmp;
1737 tree cond;
1738 stmtblock_t block;
1739 tree exit_label;
1740 tree count;
1741 tree var, start, end, step;
1742 iter_info *iter;
1743
1744 /* Initialize the mask index outside the FORALL nest. */
1745 if (mask_flag && forall_tmp->mask)
1746 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1747
1748 iter = forall_tmp->this_loop;
1749 nvar = forall_tmp->nvar;
1750 for (n = 0; n < nvar; n++)
1751 {
1752 var = iter->var;
1753 start = iter->start;
1754 end = iter->end;
1755 step = iter->step;
1756
1757 exit_label = gfc_build_label_decl (NULL_TREE);
1758 TREE_USED (exit_label) = 1;
1759
1760 /* The loop counter. */
1761 count = gfc_create_var (TREE_TYPE (var), "count");
1762
1763 /* The body of the loop. */
1764 gfc_init_block (&block);
1765
1766 /* The exit condition. */
1767 cond = fold_build2 (LE_EXPR, boolean_type_node,
1768 count, build_int_cst (TREE_TYPE (count), 0));
1769 tmp = build1_v (GOTO_EXPR, exit_label);
1770 tmp = fold_build3 (COND_EXPR, void_type_node,
1771 cond, tmp, build_empty_stmt ());
1772 gfc_add_expr_to_block (&block, tmp);
1773
1774 /* The main loop body. */
1775 gfc_add_expr_to_block (&block, body);
1776
1777 /* Increment the loop variable. */
1778 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1779 gfc_add_modify_expr (&block, var, tmp);
1780
1781 /* Advance to the next mask element. Only do this for the
1782 innermost loop. */
1783 if (n == 0 && mask_flag && forall_tmp->mask)
1784 {
1785 tree maskindex = forall_tmp->maskindex;
1786 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1787 maskindex, gfc_index_one_node);
1788 gfc_add_modify_expr (&block, maskindex, tmp);
1789 }
1790
1791 /* Decrement the loop counter. */
1792 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count,
1793 build_int_cst (TREE_TYPE (var), 1));
1794 gfc_add_modify_expr (&block, count, tmp);
1795
1796 body = gfc_finish_block (&block);
1797
1798 /* Loop var initialization. */
1799 gfc_init_block (&block);
1800 gfc_add_modify_expr (&block, var, start);
1801
1802
1803 /* Initialize the loop counter. */
1804 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1805 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1806 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1807 gfc_add_modify_expr (&block, count, tmp);
1808
1809 /* The loop expression. */
1810 tmp = build1_v (LOOP_EXPR, body);
1811 gfc_add_expr_to_block (&block, tmp);
1812
1813 /* The exit label. */
1814 tmp = build1_v (LABEL_EXPR, exit_label);
1815 gfc_add_expr_to_block (&block, tmp);
1816
1817 body = gfc_finish_block (&block);
1818 iter = iter->next;
1819 }
1820 return body;
1821 }
1822
1823
1824 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1825 is nonzero, the body is controlled by all masks in the forall nest.
1826 Otherwise, the innermost loop is not controlled by it's mask. This
1827 is used for initializing that mask. */
1828
1829 static tree
1830 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1831 int mask_flag)
1832 {
1833 tree tmp;
1834 stmtblock_t header;
1835 forall_info *forall_tmp;
1836 tree mask, maskindex;
1837
1838 gfc_start_block (&header);
1839
1840 forall_tmp = nested_forall_info;
1841 while (forall_tmp != NULL)
1842 {
1843 /* Generate body with masks' control. */
1844 if (mask_flag)
1845 {
1846 mask = forall_tmp->mask;
1847 maskindex = forall_tmp->maskindex;
1848
1849 /* If a mask was specified make the assignment conditional. */
1850 if (mask)
1851 {
1852 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1853 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1854 }
1855 }
1856 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1857 forall_tmp = forall_tmp->prev_nest;
1858 mask_flag = 1;
1859 }
1860
1861 gfc_add_expr_to_block (&header, body);
1862 return gfc_finish_block (&header);
1863 }
1864
1865
1866 /* Allocate data for holding a temporary array. Returns either a local
1867 temporary array or a pointer variable. */
1868
1869 static tree
1870 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1871 tree elem_type)
1872 {
1873 tree tmpvar;
1874 tree type;
1875 tree tmp;
1876
1877 if (INTEGER_CST_P (size))
1878 {
1879 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1880 gfc_index_one_node);
1881 }
1882 else
1883 tmp = NULL_TREE;
1884
1885 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1886 type = build_array_type (elem_type, type);
1887 if (gfc_can_put_var_on_stack (bytesize))
1888 {
1889 gcc_assert (INTEGER_CST_P (size));
1890 tmpvar = gfc_create_var (type, "temp");
1891 *pdata = NULL_TREE;
1892 }
1893 else
1894 {
1895 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1896 *pdata = convert (pvoid_type_node, tmpvar);
1897
1898 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1899 gfc_add_modify_expr (pblock, tmpvar, tmp);
1900 }
1901 return tmpvar;
1902 }
1903
1904
1905 /* Generate codes to copy the temporary to the actual lhs. */
1906
1907 static tree
1908 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1909 tree count1, tree wheremask, bool invert)
1910 {
1911 gfc_ss *lss;
1912 gfc_se lse, rse;
1913 stmtblock_t block, body;
1914 gfc_loopinfo loop1;
1915 tree tmp;
1916 tree wheremaskexpr;
1917
1918 /* Walk the lhs. */
1919 lss = gfc_walk_expr (expr);
1920
1921 if (lss == gfc_ss_terminator)
1922 {
1923 gfc_start_block (&block);
1924
1925 gfc_init_se (&lse, NULL);
1926
1927 /* Translate the expression. */
1928 gfc_conv_expr (&lse, expr);
1929
1930 /* Form the expression for the temporary. */
1931 tmp = gfc_build_array_ref (tmp1, count1, NULL);
1932
1933 /* Use the scalar assignment as is. */
1934 gfc_add_block_to_block (&block, &lse.pre);
1935 gfc_add_modify_expr (&block, lse.expr, tmp);
1936 gfc_add_block_to_block (&block, &lse.post);
1937
1938 /* Increment the count1. */
1939 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1940 gfc_index_one_node);
1941 gfc_add_modify_expr (&block, count1, tmp);
1942
1943 tmp = gfc_finish_block (&block);
1944 }
1945 else
1946 {
1947 gfc_start_block (&block);
1948
1949 gfc_init_loopinfo (&loop1);
1950 gfc_init_se (&rse, NULL);
1951 gfc_init_se (&lse, NULL);
1952
1953 /* Associate the lss with the loop. */
1954 gfc_add_ss_to_loop (&loop1, lss);
1955
1956 /* Calculate the bounds of the scalarization. */
1957 gfc_conv_ss_startstride (&loop1);
1958 /* Setup the scalarizing loops. */
1959 gfc_conv_loop_setup (&loop1);
1960
1961 gfc_mark_ss_chain_used (lss, 1);
1962
1963 /* Start the scalarized loop body. */
1964 gfc_start_scalarized_body (&loop1, &body);
1965
1966 /* Setup the gfc_se structures. */
1967 gfc_copy_loopinfo_to_se (&lse, &loop1);
1968 lse.ss = lss;
1969
1970 /* Form the expression of the temporary. */
1971 if (lss != gfc_ss_terminator)
1972 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
1973 /* Translate expr. */
1974 gfc_conv_expr (&lse, expr);
1975
1976 /* Use the scalar assignment. */
1977 rse.string_length = lse.string_length;
1978 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1979
1980 /* Form the mask expression according to the mask tree list. */
1981 if (wheremask)
1982 {
1983 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
1984 if (invert)
1985 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1986 TREE_TYPE (wheremaskexpr),
1987 wheremaskexpr);
1988 tmp = fold_build3 (COND_EXPR, void_type_node,
1989 wheremaskexpr, tmp, build_empty_stmt ());
1990 }
1991
1992 gfc_add_expr_to_block (&body, tmp);
1993
1994 /* Increment count1. */
1995 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1996 count1, gfc_index_one_node);
1997 gfc_add_modify_expr (&body, count1, tmp);
1998
1999 /* Increment count3. */
2000 if (count3)
2001 {
2002 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2003 count3, gfc_index_one_node);
2004 gfc_add_modify_expr (&body, count3, tmp);
2005 }
2006
2007 /* Generate the copying loops. */
2008 gfc_trans_scalarizing_loops (&loop1, &body);
2009 gfc_add_block_to_block (&block, &loop1.pre);
2010 gfc_add_block_to_block (&block, &loop1.post);
2011 gfc_cleanup_loop (&loop1);
2012
2013 tmp = gfc_finish_block (&block);
2014 }
2015 return tmp;
2016 }
2017
2018
2019 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2020 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2021 and should not be freed. WHEREMASK is the conditional execution mask
2022 whose sense may be inverted by INVERT. */
2023
2024 static tree
2025 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2026 tree count1, gfc_ss *lss, gfc_ss *rss,
2027 tree wheremask, bool invert)
2028 {
2029 stmtblock_t block, body1;
2030 gfc_loopinfo loop;
2031 gfc_se lse;
2032 gfc_se rse;
2033 tree tmp;
2034 tree wheremaskexpr;
2035
2036 gfc_start_block (&block);
2037
2038 gfc_init_se (&rse, NULL);
2039 gfc_init_se (&lse, NULL);
2040
2041 if (lss == gfc_ss_terminator)
2042 {
2043 gfc_init_block (&body1);
2044 gfc_conv_expr (&rse, expr2);
2045 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2046 }
2047 else
2048 {
2049 /* Initialize the loop. */
2050 gfc_init_loopinfo (&loop);
2051
2052 /* We may need LSS to determine the shape of the expression. */
2053 gfc_add_ss_to_loop (&loop, lss);
2054 gfc_add_ss_to_loop (&loop, rss);
2055
2056 gfc_conv_ss_startstride (&loop);
2057 gfc_conv_loop_setup (&loop);
2058
2059 gfc_mark_ss_chain_used (rss, 1);
2060 /* Start the loop body. */
2061 gfc_start_scalarized_body (&loop, &body1);
2062
2063 /* Translate the expression. */
2064 gfc_copy_loopinfo_to_se (&rse, &loop);
2065 rse.ss = rss;
2066 gfc_conv_expr (&rse, expr2);
2067
2068 /* Form the expression of the temporary. */
2069 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2070 }
2071
2072 /* Use the scalar assignment. */
2073 lse.string_length = rse.string_length;
2074 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2075 expr2->expr_type == EXPR_VARIABLE);
2076
2077 /* Form the mask expression according to the mask tree list. */
2078 if (wheremask)
2079 {
2080 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2081 if (invert)
2082 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2083 TREE_TYPE (wheremaskexpr),
2084 wheremaskexpr);
2085 tmp = fold_build3 (COND_EXPR, void_type_node,
2086 wheremaskexpr, tmp, build_empty_stmt ());
2087 }
2088
2089 gfc_add_expr_to_block (&body1, tmp);
2090
2091 if (lss == gfc_ss_terminator)
2092 {
2093 gfc_add_block_to_block (&block, &body1);
2094
2095 /* Increment count1. */
2096 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2097 gfc_index_one_node);
2098 gfc_add_modify_expr (&block, count1, tmp);
2099 }
2100 else
2101 {
2102 /* Increment count1. */
2103 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2104 count1, gfc_index_one_node);
2105 gfc_add_modify_expr (&body1, count1, tmp);
2106
2107 /* Increment count3. */
2108 if (count3)
2109 {
2110 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2111 count3, gfc_index_one_node);
2112 gfc_add_modify_expr (&body1, count3, tmp);
2113 }
2114
2115 /* Generate the copying loops. */
2116 gfc_trans_scalarizing_loops (&loop, &body1);
2117
2118 gfc_add_block_to_block (&block, &loop.pre);
2119 gfc_add_block_to_block (&block, &loop.post);
2120
2121 gfc_cleanup_loop (&loop);
2122 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2123 as tree nodes in SS may not be valid in different scope. */
2124 }
2125
2126 tmp = gfc_finish_block (&block);
2127 return tmp;
2128 }
2129
2130
2131 /* Calculate the size of temporary needed in the assignment inside forall.
2132 LSS and RSS are filled in this function. */
2133
2134 static tree
2135 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2136 stmtblock_t * pblock,
2137 gfc_ss **lss, gfc_ss **rss)
2138 {
2139 gfc_loopinfo loop;
2140 tree size;
2141 int i;
2142 int save_flag;
2143 tree tmp;
2144
2145 *lss = gfc_walk_expr (expr1);
2146 *rss = NULL;
2147
2148 size = gfc_index_one_node;
2149 if (*lss != gfc_ss_terminator)
2150 {
2151 gfc_init_loopinfo (&loop);
2152
2153 /* Walk the RHS of the expression. */
2154 *rss = gfc_walk_expr (expr2);
2155 if (*rss == gfc_ss_terminator)
2156 {
2157 /* The rhs is scalar. Add a ss for the expression. */
2158 *rss = gfc_get_ss ();
2159 (*rss)->next = gfc_ss_terminator;
2160 (*rss)->type = GFC_SS_SCALAR;
2161 (*rss)->expr = expr2;
2162 }
2163
2164 /* Associate the SS with the loop. */
2165 gfc_add_ss_to_loop (&loop, *lss);
2166 /* We don't actually need to add the rhs at this point, but it might
2167 make guessing the loop bounds a bit easier. */
2168 gfc_add_ss_to_loop (&loop, *rss);
2169
2170 /* We only want the shape of the expression, not rest of the junk
2171 generated by the scalarizer. */
2172 loop.array_parameter = 1;
2173
2174 /* Calculate the bounds of the scalarization. */
2175 save_flag = flag_bounds_check;
2176 flag_bounds_check = 0;
2177 gfc_conv_ss_startstride (&loop);
2178 flag_bounds_check = save_flag;
2179 gfc_conv_loop_setup (&loop);
2180
2181 /* Figure out how many elements we need. */
2182 for (i = 0; i < loop.dimen; i++)
2183 {
2184 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2185 gfc_index_one_node, loop.from[i]);
2186 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2187 tmp, loop.to[i]);
2188 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2189 }
2190 gfc_add_block_to_block (pblock, &loop.pre);
2191 size = gfc_evaluate_now (size, pblock);
2192 gfc_add_block_to_block (pblock, &loop.post);
2193
2194 /* TODO: write a function that cleans up a loopinfo without freeing
2195 the SS chains. Currently a NOP. */
2196 }
2197
2198 return size;
2199 }
2200
2201
2202 /* Calculate the overall iterator number of the nested forall construct.
2203 This routine actually calculates the number of times the body of the
2204 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2205 that by the expression INNER_SIZE. The BLOCK argument specifies the
2206 block in which to calculate the result, and the optional INNER_SIZE_BODY
2207 argument contains any statements that need to executed (inside the loop)
2208 to initialize or calculate INNER_SIZE. */
2209
2210 static tree
2211 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2212 stmtblock_t *inner_size_body, stmtblock_t *block)
2213 {
2214 forall_info *forall_tmp = nested_forall_info;
2215 tree tmp, number;
2216 stmtblock_t body;
2217
2218 /* We can eliminate the innermost unconditional loops with constant
2219 array bounds. */
2220 if (INTEGER_CST_P (inner_size))
2221 {
2222 while (forall_tmp
2223 && !forall_tmp->mask
2224 && INTEGER_CST_P (forall_tmp->size))
2225 {
2226 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2227 inner_size, forall_tmp->size);
2228 forall_tmp = forall_tmp->prev_nest;
2229 }
2230
2231 /* If there are no loops left, we have our constant result. */
2232 if (!forall_tmp)
2233 return inner_size;
2234 }
2235
2236 /* Otherwise, create a temporary variable to compute the result. */
2237 number = gfc_create_var (gfc_array_index_type, "num");
2238 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2239
2240 gfc_start_block (&body);
2241 if (inner_size_body)
2242 gfc_add_block_to_block (&body, inner_size_body);
2243 if (forall_tmp)
2244 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2245 inner_size);
2246 else
2247 tmp = inner_size;
2248 gfc_add_modify_expr (&body, number, tmp);
2249 tmp = gfc_finish_block (&body);
2250
2251 /* Generate loops. */
2252 if (forall_tmp != NULL)
2253 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2254
2255 gfc_add_expr_to_block (block, tmp);
2256
2257 return number;
2258 }
2259
2260
2261 /* Allocate temporary for forall construct. SIZE is the size of temporary
2262 needed. PTEMP1 is returned for space free. */
2263
2264 static tree
2265 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2266 tree * ptemp1)
2267 {
2268 tree bytesize;
2269 tree unit;
2270 tree tmp;
2271
2272 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2273 if (!integer_onep (unit))
2274 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2275 else
2276 bytesize = size;
2277
2278 *ptemp1 = NULL;
2279 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2280
2281 if (*ptemp1)
2282 tmp = build_fold_indirect_ref (tmp);
2283 return tmp;
2284 }
2285
2286
2287 /* Allocate temporary for forall construct according to the information in
2288 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2289 assignment inside forall. PTEMP1 is returned for space free. */
2290
2291 static tree
2292 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2293 tree inner_size, stmtblock_t * inner_size_body,
2294 stmtblock_t * block, tree * ptemp1)
2295 {
2296 tree size;
2297
2298 /* Calculate the total size of temporary needed in forall construct. */
2299 size = compute_overall_iter_number (nested_forall_info, inner_size,
2300 inner_size_body, block);
2301
2302 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2303 }
2304
2305
2306 /* Handle assignments inside forall which need temporary.
2307
2308 forall (i=start:end:stride; maskexpr)
2309 e<i> = f<i>
2310 end forall
2311 (where e,f<i> are arbitrary expressions possibly involving i
2312 and there is a dependency between e<i> and f<i>)
2313 Translates to:
2314 masktmp(:) = maskexpr(:)
2315
2316 maskindex = 0;
2317 count1 = 0;
2318 num = 0;
2319 for (i = start; i <= end; i += stride)
2320 num += SIZE (f<i>)
2321 count1 = 0;
2322 ALLOCATE (tmp(num))
2323 for (i = start; i <= end; i += stride)
2324 {
2325 if (masktmp[maskindex++])
2326 tmp[count1++] = f<i>
2327 }
2328 maskindex = 0;
2329 count1 = 0;
2330 for (i = start; i <= end; i += stride)
2331 {
2332 if (masktmp[maskindex++])
2333 e<i> = tmp[count1++]
2334 }
2335 DEALLOCATE (tmp)
2336 */
2337 static void
2338 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2339 tree wheremask, bool invert,
2340 forall_info * nested_forall_info,
2341 stmtblock_t * block)
2342 {
2343 tree type;
2344 tree inner_size;
2345 gfc_ss *lss, *rss;
2346 tree count, count1;
2347 tree tmp, tmp1;
2348 tree ptemp1;
2349 stmtblock_t inner_size_body;
2350
2351 /* Create vars. count1 is the current iterator number of the nested
2352 forall. */
2353 count1 = gfc_create_var (gfc_array_index_type, "count1");
2354
2355 /* Count is the wheremask index. */
2356 if (wheremask)
2357 {
2358 count = gfc_create_var (gfc_array_index_type, "count");
2359 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2360 }
2361 else
2362 count = NULL;
2363
2364 /* Initialize count1. */
2365 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2366
2367 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2368 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2369 gfc_init_block (&inner_size_body);
2370 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2371 &lss, &rss);
2372
2373 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2374 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2375 {
2376 if (!expr1->ts.cl->backend_decl)
2377 {
2378 gfc_se tse;
2379 gfc_init_se (&tse, NULL);
2380 gfc_conv_expr (&tse, expr1->ts.cl->length);
2381 expr1->ts.cl->backend_decl = tse.expr;
2382 }
2383 type = gfc_get_character_type_len (gfc_default_character_kind,
2384 expr1->ts.cl->backend_decl);
2385 }
2386 else
2387 type = gfc_typenode_for_spec (&expr1->ts);
2388
2389 /* Allocate temporary for nested forall construct according to the
2390 information in nested_forall_info and inner_size. */
2391 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2392 &inner_size_body, block, &ptemp1);
2393
2394 /* Generate codes to copy rhs to the temporary . */
2395 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2396 wheremask, invert);
2397
2398 /* Generate body and loops according to the information in
2399 nested_forall_info. */
2400 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2401 gfc_add_expr_to_block (block, tmp);
2402
2403 /* Reset count1. */
2404 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2405
2406 /* Reset count. */
2407 if (wheremask)
2408 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2409
2410 /* Generate codes to copy the temporary to lhs. */
2411 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2412 wheremask, invert);
2413
2414 /* Generate body and loops according to the information in
2415 nested_forall_info. */
2416 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2417 gfc_add_expr_to_block (block, tmp);
2418
2419 if (ptemp1)
2420 {
2421 /* Free the temporary. */
2422 tmp = gfc_call_free (ptemp1);
2423 gfc_add_expr_to_block (block, tmp);
2424 }
2425 }
2426
2427
2428 /* Translate pointer assignment inside FORALL which need temporary. */
2429
2430 static void
2431 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2432 forall_info * nested_forall_info,
2433 stmtblock_t * block)
2434 {
2435 tree type;
2436 tree inner_size;
2437 gfc_ss *lss, *rss;
2438 gfc_se lse;
2439 gfc_se rse;
2440 gfc_ss_info *info;
2441 gfc_loopinfo loop;
2442 tree desc;
2443 tree parm;
2444 tree parmtype;
2445 stmtblock_t body;
2446 tree count;
2447 tree tmp, tmp1, ptemp1;
2448
2449 count = gfc_create_var (gfc_array_index_type, "count");
2450 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2451
2452 inner_size = integer_one_node;
2453 lss = gfc_walk_expr (expr1);
2454 rss = gfc_walk_expr (expr2);
2455 if (lss == gfc_ss_terminator)
2456 {
2457 type = gfc_typenode_for_spec (&expr1->ts);
2458 type = build_pointer_type (type);
2459
2460 /* Allocate temporary for nested forall construct according to the
2461 information in nested_forall_info and inner_size. */
2462 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2463 inner_size, NULL, block, &ptemp1);
2464 gfc_start_block (&body);
2465 gfc_init_se (&lse, NULL);
2466 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2467 gfc_init_se (&rse, NULL);
2468 rse.want_pointer = 1;
2469 gfc_conv_expr (&rse, expr2);
2470 gfc_add_block_to_block (&body, &rse.pre);
2471 gfc_add_modify_expr (&body, lse.expr,
2472 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2473 gfc_add_block_to_block (&body, &rse.post);
2474
2475 /* Increment count. */
2476 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2477 count, gfc_index_one_node);
2478 gfc_add_modify_expr (&body, count, tmp);
2479
2480 tmp = gfc_finish_block (&body);
2481
2482 /* Generate body and loops according to the information in
2483 nested_forall_info. */
2484 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2485 gfc_add_expr_to_block (block, tmp);
2486
2487 /* Reset count. */
2488 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2489
2490 gfc_start_block (&body);
2491 gfc_init_se (&lse, NULL);
2492 gfc_init_se (&rse, NULL);
2493 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2494 lse.want_pointer = 1;
2495 gfc_conv_expr (&lse, expr1);
2496 gfc_add_block_to_block (&body, &lse.pre);
2497 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2498 gfc_add_block_to_block (&body, &lse.post);
2499 /* Increment count. */
2500 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2501 count, gfc_index_one_node);
2502 gfc_add_modify_expr (&body, count, tmp);
2503 tmp = gfc_finish_block (&body);
2504
2505 /* Generate body and loops according to the information in
2506 nested_forall_info. */
2507 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2508 gfc_add_expr_to_block (block, tmp);
2509 }
2510 else
2511 {
2512 gfc_init_loopinfo (&loop);
2513
2514 /* Associate the SS with the loop. */
2515 gfc_add_ss_to_loop (&loop, rss);
2516
2517 /* Setup the scalarizing loops and bounds. */
2518 gfc_conv_ss_startstride (&loop);
2519
2520 gfc_conv_loop_setup (&loop);
2521
2522 info = &rss->data.info;
2523 desc = info->descriptor;
2524
2525 /* Make a new descriptor. */
2526 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2527 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2528 loop.from, loop.to, 1);
2529
2530 /* Allocate temporary for nested forall construct. */
2531 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2532 inner_size, NULL, block, &ptemp1);
2533 gfc_start_block (&body);
2534 gfc_init_se (&lse, NULL);
2535 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2536 lse.direct_byref = 1;
2537 rss = gfc_walk_expr (expr2);
2538 gfc_conv_expr_descriptor (&lse, expr2, rss);
2539
2540 gfc_add_block_to_block (&body, &lse.pre);
2541 gfc_add_block_to_block (&body, &lse.post);
2542
2543 /* Increment count. */
2544 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2545 count, gfc_index_one_node);
2546 gfc_add_modify_expr (&body, count, tmp);
2547
2548 tmp = gfc_finish_block (&body);
2549
2550 /* Generate body and loops according to the information in
2551 nested_forall_info. */
2552 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2553 gfc_add_expr_to_block (block, tmp);
2554
2555 /* Reset count. */
2556 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2557
2558 parm = gfc_build_array_ref (tmp1, count, NULL);
2559 lss = gfc_walk_expr (expr1);
2560 gfc_init_se (&lse, NULL);
2561 gfc_conv_expr_descriptor (&lse, expr1, lss);
2562 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2563 gfc_start_block (&body);
2564 gfc_add_block_to_block (&body, &lse.pre);
2565 gfc_add_block_to_block (&body, &lse.post);
2566
2567 /* Increment count. */
2568 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2569 count, gfc_index_one_node);
2570 gfc_add_modify_expr (&body, count, tmp);
2571
2572 tmp = gfc_finish_block (&body);
2573
2574 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2575 gfc_add_expr_to_block (block, tmp);
2576 }
2577 /* Free the temporary. */
2578 if (ptemp1)
2579 {
2580 tmp = gfc_call_free (ptemp1);
2581 gfc_add_expr_to_block (block, tmp);
2582 }
2583 }
2584
2585
2586 /* FORALL and WHERE statements are really nasty, especially when you nest
2587 them. All the rhs of a forall assignment must be evaluated before the
2588 actual assignments are performed. Presumably this also applies to all the
2589 assignments in an inner where statement. */
2590
2591 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2592 linear array, relying on the fact that we process in the same order in all
2593 loops.
2594
2595 forall (i=start:end:stride; maskexpr)
2596 e<i> = f<i>
2597 g<i> = h<i>
2598 end forall
2599 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2600 Translates to:
2601 count = ((end + 1 - start) / stride)
2602 masktmp(:) = maskexpr(:)
2603
2604 maskindex = 0;
2605 for (i = start; i <= end; i += stride)
2606 {
2607 if (masktmp[maskindex++])
2608 e<i> = f<i>
2609 }
2610 maskindex = 0;
2611 for (i = start; i <= end; i += stride)
2612 {
2613 if (masktmp[maskindex++])
2614 g<i> = h<i>
2615 }
2616
2617 Note that this code only works when there are no dependencies.
2618 Forall loop with array assignments and data dependencies are a real pain,
2619 because the size of the temporary cannot always be determined before the
2620 loop is executed. This problem is compounded by the presence of nested
2621 FORALL constructs.
2622 */
2623
2624 static tree
2625 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2626 {
2627 stmtblock_t pre;
2628 stmtblock_t post;
2629 stmtblock_t block;
2630 stmtblock_t body;
2631 tree *var;
2632 tree *start;
2633 tree *end;
2634 tree *step;
2635 gfc_expr **varexpr;
2636 tree tmp;
2637 tree assign;
2638 tree size;
2639 tree maskindex;
2640 tree mask;
2641 tree pmask;
2642 int n;
2643 int nvar;
2644 int need_temp;
2645 gfc_forall_iterator *fa;
2646 gfc_se se;
2647 gfc_code *c;
2648 gfc_saved_var *saved_vars;
2649 iter_info *this_forall;
2650 forall_info *info;
2651 bool need_mask;
2652
2653 /* Do nothing if the mask is false. */
2654 if (code->expr
2655 && code->expr->expr_type == EXPR_CONSTANT
2656 && !code->expr->value.logical)
2657 return build_empty_stmt ();
2658
2659 n = 0;
2660 /* Count the FORALL index number. */
2661 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2662 n++;
2663 nvar = n;
2664
2665 /* Allocate the space for var, start, end, step, varexpr. */
2666 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2667 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2668 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2669 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2670 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2671 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2672
2673 /* Allocate the space for info. */
2674 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2675
2676 gfc_start_block (&pre);
2677 gfc_init_block (&post);
2678 gfc_init_block (&block);
2679
2680 n = 0;
2681 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2682 {
2683 gfc_symbol *sym = fa->var->symtree->n.sym;
2684
2685 /* Allocate space for this_forall. */
2686 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2687
2688 /* Create a temporary variable for the FORALL index. */
2689 tmp = gfc_typenode_for_spec (&sym->ts);
2690 var[n] = gfc_create_var (tmp, sym->name);
2691 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2692
2693 /* Record it in this_forall. */
2694 this_forall->var = var[n];
2695
2696 /* Replace the index symbol's backend_decl with the temporary decl. */
2697 sym->backend_decl = var[n];
2698
2699 /* Work out the start, end and stride for the loop. */
2700 gfc_init_se (&se, NULL);
2701 gfc_conv_expr_val (&se, fa->start);
2702 /* Record it in this_forall. */
2703 this_forall->start = se.expr;
2704 gfc_add_block_to_block (&block, &se.pre);
2705 start[n] = se.expr;
2706
2707 gfc_init_se (&se, NULL);
2708 gfc_conv_expr_val (&se, fa->end);
2709 /* Record it in this_forall. */
2710 this_forall->end = se.expr;
2711 gfc_make_safe_expr (&se);
2712 gfc_add_block_to_block (&block, &se.pre);
2713 end[n] = se.expr;
2714
2715 gfc_init_se (&se, NULL);
2716 gfc_conv_expr_val (&se, fa->stride);
2717 /* Record it in this_forall. */
2718 this_forall->step = se.expr;
2719 gfc_make_safe_expr (&se);
2720 gfc_add_block_to_block (&block, &se.pre);
2721 step[n] = se.expr;
2722
2723 /* Set the NEXT field of this_forall to NULL. */
2724 this_forall->next = NULL;
2725 /* Link this_forall to the info construct. */
2726 if (info->this_loop)
2727 {
2728 iter_info *iter_tmp = info->this_loop;
2729 while (iter_tmp->next != NULL)
2730 iter_tmp = iter_tmp->next;
2731 iter_tmp->next = this_forall;
2732 }
2733 else
2734 info->this_loop = this_forall;
2735
2736 n++;
2737 }
2738 nvar = n;
2739
2740 /* Calculate the size needed for the current forall level. */
2741 size = gfc_index_one_node;
2742 for (n = 0; n < nvar; n++)
2743 {
2744 /* size = (end + step - start) / step. */
2745 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2746 step[n], start[n]);
2747 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2748
2749 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2750 tmp = convert (gfc_array_index_type, tmp);
2751
2752 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2753 }
2754
2755 /* Record the nvar and size of current forall level. */
2756 info->nvar = nvar;
2757 info->size = size;
2758
2759 if (code->expr)
2760 {
2761 /* If the mask is .true., consider the FORALL unconditional. */
2762 if (code->expr->expr_type == EXPR_CONSTANT
2763 && code->expr->value.logical)
2764 need_mask = false;
2765 else
2766 need_mask = true;
2767 }
2768 else
2769 need_mask = false;
2770
2771 /* First we need to allocate the mask. */
2772 if (need_mask)
2773 {
2774 /* As the mask array can be very big, prefer compact boolean types. */
2775 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2776 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2777 size, NULL, &block, &pmask);
2778 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2779
2780 /* Record them in the info structure. */
2781 info->maskindex = maskindex;
2782 info->mask = mask;
2783 }
2784 else
2785 {
2786 /* No mask was specified. */
2787 maskindex = NULL_TREE;
2788 mask = pmask = NULL_TREE;
2789 }
2790
2791 /* Link the current forall level to nested_forall_info. */
2792 info->prev_nest = nested_forall_info;
2793 nested_forall_info = info;
2794
2795 /* Copy the mask into a temporary variable if required.
2796 For now we assume a mask temporary is needed. */
2797 if (need_mask)
2798 {
2799 /* As the mask array can be very big, prefer compact boolean types. */
2800 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2801
2802 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2803
2804 /* Start of mask assignment loop body. */
2805 gfc_start_block (&body);
2806
2807 /* Evaluate the mask expression. */
2808 gfc_init_se (&se, NULL);
2809 gfc_conv_expr_val (&se, code->expr);
2810 gfc_add_block_to_block (&body, &se.pre);
2811
2812 /* Store the mask. */
2813 se.expr = convert (mask_type, se.expr);
2814
2815 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2816 gfc_add_modify_expr (&body, tmp, se.expr);
2817
2818 /* Advance to the next mask element. */
2819 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2820 maskindex, gfc_index_one_node);
2821 gfc_add_modify_expr (&body, maskindex, tmp);
2822
2823 /* Generate the loops. */
2824 tmp = gfc_finish_block (&body);
2825 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2826 gfc_add_expr_to_block (&block, tmp);
2827 }
2828
2829 c = code->block->next;
2830
2831 /* TODO: loop merging in FORALL statements. */
2832 /* Now that we've got a copy of the mask, generate the assignment loops. */
2833 while (c)
2834 {
2835 switch (c->op)
2836 {
2837 case EXEC_ASSIGN:
2838 /* A scalar or array assignment. DO the simple check for
2839 lhs to rhs dependencies. These make a temporary for the
2840 rhs and form a second forall block to copy to variable. */
2841 need_temp = check_forall_dependencies(c, &pre, &post);
2842
2843 /* Temporaries due to array assignment data dependencies introduce
2844 no end of problems. */
2845 if (need_temp)
2846 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2847 nested_forall_info, &block);
2848 else
2849 {
2850 /* Use the normal assignment copying routines. */
2851 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2852
2853 /* Generate body and loops. */
2854 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2855 assign, 1);
2856 gfc_add_expr_to_block (&block, tmp);
2857 }
2858
2859 /* Cleanup any temporary symtrees that have been made to deal
2860 with dependencies. */
2861 if (new_symtree)
2862 cleanup_forall_symtrees (c);
2863
2864 break;
2865
2866 case EXEC_WHERE:
2867 /* Translate WHERE or WHERE construct nested in FORALL. */
2868 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2869 break;
2870
2871 /* Pointer assignment inside FORALL. */
2872 case EXEC_POINTER_ASSIGN:
2873 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2874 if (need_temp)
2875 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2876 nested_forall_info, &block);
2877 else
2878 {
2879 /* Use the normal assignment copying routines. */
2880 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2881
2882 /* Generate body and loops. */
2883 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2884 assign, 1);
2885 gfc_add_expr_to_block (&block, tmp);
2886 }
2887 break;
2888
2889 case EXEC_FORALL:
2890 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2891 gfc_add_expr_to_block (&block, tmp);
2892 break;
2893
2894 /* Explicit subroutine calls are prevented by the frontend but interface
2895 assignments can legitimately produce them. */
2896 case EXEC_ASSIGN_CALL:
2897 assign = gfc_trans_call (c, true);
2898 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2899 gfc_add_expr_to_block (&block, tmp);
2900 break;
2901
2902 default:
2903 gcc_unreachable ();
2904 }
2905
2906 c = c->next;
2907 }
2908
2909 /* Restore the original index variables. */
2910 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2911 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2912
2913 /* Free the space for var, start, end, step, varexpr. */
2914 gfc_free (var);
2915 gfc_free (start);
2916 gfc_free (end);
2917 gfc_free (step);
2918 gfc_free (varexpr);
2919 gfc_free (saved_vars);
2920
2921 /* Free the space for this forall_info. */
2922 gfc_free (info);
2923
2924 if (pmask)
2925 {
2926 /* Free the temporary for the mask. */
2927 tmp = gfc_call_free (pmask);
2928 gfc_add_expr_to_block (&block, tmp);
2929 }
2930 if (maskindex)
2931 pushdecl (maskindex);
2932
2933 gfc_add_block_to_block (&pre, &block);
2934 gfc_add_block_to_block (&pre, &post);
2935
2936 return gfc_finish_block (&pre);
2937 }
2938
2939
2940 /* Translate the FORALL statement or construct. */
2941
2942 tree gfc_trans_forall (gfc_code * code)
2943 {
2944 return gfc_trans_forall_1 (code, NULL);
2945 }
2946
2947
2948 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2949 If the WHERE construct is nested in FORALL, compute the overall temporary
2950 needed by the WHERE mask expression multiplied by the iterator number of
2951 the nested forall.
2952 ME is the WHERE mask expression.
2953 MASK is the current execution mask upon input, whose sense may or may
2954 not be inverted as specified by the INVERT argument.
2955 CMASK is the updated execution mask on output, or NULL if not required.
2956 PMASK is the pending execution mask on output, or NULL if not required.
2957 BLOCK is the block in which to place the condition evaluation loops. */
2958
2959 static void
2960 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2961 tree mask, bool invert, tree cmask, tree pmask,
2962 tree mask_type, stmtblock_t * block)
2963 {
2964 tree tmp, tmp1;
2965 gfc_ss *lss, *rss;
2966 gfc_loopinfo loop;
2967 stmtblock_t body, body1;
2968 tree count, cond, mtmp;
2969 gfc_se lse, rse;
2970
2971 gfc_init_loopinfo (&loop);
2972
2973 lss = gfc_walk_expr (me);
2974 rss = gfc_walk_expr (me);
2975
2976 /* Variable to index the temporary. */
2977 count = gfc_create_var (gfc_array_index_type, "count");
2978 /* Initialize count. */
2979 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2980
2981 gfc_start_block (&body);
2982
2983 gfc_init_se (&rse, NULL);
2984 gfc_init_se (&lse, NULL);
2985
2986 if (lss == gfc_ss_terminator)
2987 {
2988 gfc_init_block (&body1);
2989 }
2990 else
2991 {
2992 /* Initialize the loop. */
2993 gfc_init_loopinfo (&loop);
2994
2995 /* We may need LSS to determine the shape of the expression. */
2996 gfc_add_ss_to_loop (&loop, lss);
2997 gfc_add_ss_to_loop (&loop, rss);
2998
2999 gfc_conv_ss_startstride (&loop);
3000 gfc_conv_loop_setup (&loop);
3001
3002 gfc_mark_ss_chain_used (rss, 1);
3003 /* Start the loop body. */
3004 gfc_start_scalarized_body (&loop, &body1);
3005
3006 /* Translate the expression. */
3007 gfc_copy_loopinfo_to_se (&rse, &loop);
3008 rse.ss = rss;
3009 gfc_conv_expr (&rse, me);
3010 }
3011
3012 /* Variable to evaluate mask condition. */
3013 cond = gfc_create_var (mask_type, "cond");
3014 if (mask && (cmask || pmask))
3015 mtmp = gfc_create_var (mask_type, "mask");
3016 else mtmp = NULL_TREE;
3017
3018 gfc_add_block_to_block (&body1, &lse.pre);
3019 gfc_add_block_to_block (&body1, &rse.pre);
3020
3021 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
3022
3023 if (mask && (cmask || pmask))
3024 {
3025 tmp = gfc_build_array_ref (mask, count, NULL);
3026 if (invert)
3027 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3028 gfc_add_modify_expr (&body1, mtmp, tmp);
3029 }
3030
3031 if (cmask)
3032 {
3033 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3034 tmp = cond;
3035 if (mask)
3036 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3037 gfc_add_modify_expr (&body1, tmp1, tmp);
3038 }
3039
3040 if (pmask)
3041 {
3042 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3043 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
3044 if (mask)
3045 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3046 gfc_add_modify_expr (&body1, tmp1, tmp);
3047 }
3048
3049 gfc_add_block_to_block (&body1, &lse.post);
3050 gfc_add_block_to_block (&body1, &rse.post);
3051
3052 if (lss == gfc_ss_terminator)
3053 {
3054 gfc_add_block_to_block (&body, &body1);
3055 }
3056 else
3057 {
3058 /* Increment count. */
3059 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3060 gfc_index_one_node);
3061 gfc_add_modify_expr (&body1, count, tmp1);
3062
3063 /* Generate the copying loops. */
3064 gfc_trans_scalarizing_loops (&loop, &body1);
3065
3066 gfc_add_block_to_block (&body, &loop.pre);
3067 gfc_add_block_to_block (&body, &loop.post);
3068
3069 gfc_cleanup_loop (&loop);
3070 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3071 as tree nodes in SS may not be valid in different scope. */
3072 }
3073
3074 tmp1 = gfc_finish_block (&body);
3075 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3076 if (nested_forall_info != NULL)
3077 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3078
3079 gfc_add_expr_to_block (block, tmp1);
3080 }
3081
3082
3083 /* Translate an assignment statement in a WHERE statement or construct
3084 statement. The MASK expression is used to control which elements
3085 of EXPR1 shall be assigned. The sense of MASK is specified by
3086 INVERT. */
3087
3088 static tree
3089 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3090 tree mask, bool invert,
3091 tree count1, tree count2,
3092 gfc_symbol *sym)
3093 {
3094 gfc_se lse;
3095 gfc_se rse;
3096 gfc_ss *lss;
3097 gfc_ss *lss_section;
3098 gfc_ss *rss;
3099
3100 gfc_loopinfo loop;
3101 tree tmp;
3102 stmtblock_t block;
3103 stmtblock_t body;
3104 tree index, maskexpr;
3105
3106 #if 0
3107 /* TODO: handle this special case.
3108 Special case a single function returning an array. */
3109 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3110 {
3111 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3112 if (tmp)
3113 return tmp;
3114 }
3115 #endif
3116
3117 /* Assignment of the form lhs = rhs. */
3118 gfc_start_block (&block);
3119
3120 gfc_init_se (&lse, NULL);
3121 gfc_init_se (&rse, NULL);
3122
3123 /* Walk the lhs. */
3124 lss = gfc_walk_expr (expr1);
3125 rss = NULL;
3126
3127 /* In each where-assign-stmt, the mask-expr and the variable being
3128 defined shall be arrays of the same shape. */
3129 gcc_assert (lss != gfc_ss_terminator);
3130
3131 /* The assignment needs scalarization. */
3132 lss_section = lss;
3133
3134 /* Find a non-scalar SS from the lhs. */
3135 while (lss_section != gfc_ss_terminator
3136 && lss_section->type != GFC_SS_SECTION)
3137 lss_section = lss_section->next;
3138
3139 gcc_assert (lss_section != gfc_ss_terminator);
3140
3141 /* Initialize the scalarizer. */
3142 gfc_init_loopinfo (&loop);
3143
3144 /* Walk the rhs. */
3145 rss = gfc_walk_expr (expr2);
3146 if (rss == gfc_ss_terminator)
3147 {
3148 /* The rhs is scalar. Add a ss for the expression. */
3149 rss = gfc_get_ss ();
3150 rss->next = gfc_ss_terminator;
3151 rss->type = GFC_SS_SCALAR;
3152 rss->expr = expr2;
3153 }
3154
3155 /* Associate the SS with the loop. */
3156 gfc_add_ss_to_loop (&loop, lss);
3157 gfc_add_ss_to_loop (&loop, rss);
3158
3159 /* Calculate the bounds of the scalarization. */
3160 gfc_conv_ss_startstride (&loop);
3161
3162 /* Resolve any data dependencies in the statement. */
3163 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3164
3165 /* Setup the scalarizing loops. */
3166 gfc_conv_loop_setup (&loop);
3167
3168 /* Setup the gfc_se structures. */
3169 gfc_copy_loopinfo_to_se (&lse, &loop);
3170 gfc_copy_loopinfo_to_se (&rse, &loop);
3171
3172 rse.ss = rss;
3173 gfc_mark_ss_chain_used (rss, 1);
3174 if (loop.temp_ss == NULL)
3175 {
3176 lse.ss = lss;
3177 gfc_mark_ss_chain_used (lss, 1);
3178 }
3179 else
3180 {
3181 lse.ss = loop.temp_ss;
3182 gfc_mark_ss_chain_used (lss, 3);
3183 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3184 }
3185
3186 /* Start the scalarized loop body. */
3187 gfc_start_scalarized_body (&loop, &body);
3188
3189 /* Translate the expression. */
3190 gfc_conv_expr (&rse, expr2);
3191 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3192 {
3193 gfc_conv_tmp_array_ref (&lse);
3194 gfc_advance_se_ss_chain (&lse);
3195 }
3196 else
3197 gfc_conv_expr (&lse, expr1);
3198
3199 /* Form the mask expression according to the mask. */
3200 index = count1;
3201 maskexpr = gfc_build_array_ref (mask, index, NULL);
3202 if (invert)
3203 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3204
3205 /* Use the scalar assignment as is. */
3206 if (sym == NULL)
3207 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3208 loop.temp_ss != NULL, false);
3209 else
3210 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3211
3212 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3213
3214 gfc_add_expr_to_block (&body, tmp);
3215
3216 if (lss == gfc_ss_terminator)
3217 {
3218 /* Increment count1. */
3219 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3220 count1, gfc_index_one_node);
3221 gfc_add_modify_expr (&body, count1, tmp);
3222
3223 /* Use the scalar assignment as is. */
3224 gfc_add_block_to_block (&block, &body);
3225 }
3226 else
3227 {
3228 gcc_assert (lse.ss == gfc_ss_terminator
3229 && rse.ss == gfc_ss_terminator);
3230
3231 if (loop.temp_ss != NULL)
3232 {
3233 /* Increment count1 before finish the main body of a scalarized
3234 expression. */
3235 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3236 count1, gfc_index_one_node);
3237 gfc_add_modify_expr (&body, count1, tmp);
3238 gfc_trans_scalarized_loop_boundary (&loop, &body);
3239
3240 /* We need to copy the temporary to the actual lhs. */
3241 gfc_init_se (&lse, NULL);
3242 gfc_init_se (&rse, NULL);
3243 gfc_copy_loopinfo_to_se (&lse, &loop);
3244 gfc_copy_loopinfo_to_se (&rse, &loop);
3245
3246 rse.ss = loop.temp_ss;
3247 lse.ss = lss;
3248
3249 gfc_conv_tmp_array_ref (&rse);
3250 gfc_advance_se_ss_chain (&rse);
3251 gfc_conv_expr (&lse, expr1);
3252
3253 gcc_assert (lse.ss == gfc_ss_terminator
3254 && rse.ss == gfc_ss_terminator);
3255
3256 /* Form the mask expression according to the mask tree list. */
3257 index = count2;
3258 maskexpr = gfc_build_array_ref (mask, index, NULL);
3259 if (invert)
3260 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3261 maskexpr);
3262
3263 /* Use the scalar assignment as is. */
3264 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3265 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3266 gfc_add_expr_to_block (&body, tmp);
3267
3268 /* Increment count2. */
3269 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3270 count2, gfc_index_one_node);
3271 gfc_add_modify_expr (&body, count2, tmp);
3272 }
3273 else
3274 {
3275 /* Increment count1. */
3276 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3277 count1, gfc_index_one_node);
3278 gfc_add_modify_expr (&body, count1, tmp);
3279 }
3280
3281 /* Generate the copying loops. */
3282 gfc_trans_scalarizing_loops (&loop, &body);
3283
3284 /* Wrap the whole thing up. */
3285 gfc_add_block_to_block (&block, &loop.pre);
3286 gfc_add_block_to_block (&block, &loop.post);
3287 gfc_cleanup_loop (&loop);
3288 }
3289
3290 return gfc_finish_block (&block);
3291 }
3292
3293
3294 /* Translate the WHERE construct or statement.
3295 This function can be called iteratively to translate the nested WHERE
3296 construct or statement.
3297 MASK is the control mask. */
3298
3299 static void
3300 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3301 forall_info * nested_forall_info, stmtblock_t * block)
3302 {
3303 stmtblock_t inner_size_body;
3304 tree inner_size, size;
3305 gfc_ss *lss, *rss;
3306 tree mask_type;
3307 gfc_expr *expr1;
3308 gfc_expr *expr2;
3309 gfc_code *cblock;
3310 gfc_code *cnext;
3311 tree tmp;
3312 tree count1, count2;
3313 bool need_cmask;
3314 bool need_pmask;
3315 int need_temp;
3316 tree pcmask = NULL_TREE;
3317 tree ppmask = NULL_TREE;
3318 tree cmask = NULL_TREE;
3319 tree pmask = NULL_TREE;
3320 gfc_actual_arglist *arg;
3321
3322 /* the WHERE statement or the WHERE construct statement. */
3323 cblock = code->block;
3324
3325 /* As the mask array can be very big, prefer compact boolean types. */
3326 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3327
3328 /* Determine which temporary masks are needed. */
3329 if (!cblock->block)
3330 {
3331 /* One clause: No ELSEWHEREs. */
3332 need_cmask = (cblock->next != 0);
3333 need_pmask = false;
3334 }
3335 else if (cblock->block->block)
3336 {
3337 /* Three or more clauses: Conditional ELSEWHEREs. */
3338 need_cmask = true;
3339 need_pmask = true;
3340 }
3341 else if (cblock->next)
3342 {
3343 /* Two clauses, the first non-empty. */
3344 need_cmask = true;
3345 need_pmask = (mask != NULL_TREE
3346 && cblock->block->next != 0);
3347 }
3348 else if (!cblock->block->next)
3349 {
3350 /* Two clauses, both empty. */
3351 need_cmask = false;
3352 need_pmask = false;
3353 }
3354 /* Two clauses, the first empty, the second non-empty. */
3355 else if (mask)
3356 {
3357 need_cmask = (cblock->block->expr != 0);
3358 need_pmask = true;
3359 }
3360 else
3361 {
3362 need_cmask = true;
3363 need_pmask = false;
3364 }
3365
3366 if (need_cmask || need_pmask)
3367 {
3368 /* Calculate the size of temporary needed by the mask-expr. */
3369 gfc_init_block (&inner_size_body);
3370 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3371 &inner_size_body, &lss, &rss);
3372
3373 /* Calculate the total size of temporary needed. */
3374 size = compute_overall_iter_number (nested_forall_info, inner_size,
3375 &inner_size_body, block);
3376
3377 /* Allocate temporary for WHERE mask if needed. */
3378 if (need_cmask)
3379 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3380 &pcmask);
3381
3382 /* Allocate temporary for !mask if needed. */
3383 if (need_pmask)
3384 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3385 &ppmask);
3386 }
3387
3388 while (cblock)
3389 {
3390 /* Each time around this loop, the where clause is conditional
3391 on the value of mask and invert, which are updated at the
3392 bottom of the loop. */
3393
3394 /* Has mask-expr. */
3395 if (cblock->expr)
3396 {
3397 /* Ensure that the WHERE mask will be evaluated exactly once.
3398 If there are no statements in this WHERE/ELSEWHERE clause,
3399 then we don't need to update the control mask (cmask).
3400 If this is the last clause of the WHERE construct, then
3401 we don't need to update the pending control mask (pmask). */
3402 if (mask)
3403 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3404 mask, invert,
3405 cblock->next ? cmask : NULL_TREE,
3406 cblock->block ? pmask : NULL_TREE,
3407 mask_type, block);
3408 else
3409 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3410 NULL_TREE, false,
3411 (cblock->next || cblock->block)
3412 ? cmask : NULL_TREE,
3413 NULL_TREE, mask_type, block);
3414
3415 invert = false;
3416 }
3417 /* It's a final elsewhere-stmt. No mask-expr is present. */
3418 else
3419 cmask = mask;
3420
3421 /* The body of this where clause are controlled by cmask with
3422 sense specified by invert. */
3423
3424 /* Get the assignment statement of a WHERE statement, or the first
3425 statement in where-body-construct of a WHERE construct. */
3426 cnext = cblock->next;
3427 while (cnext)
3428 {
3429 switch (cnext->op)
3430 {
3431 /* WHERE assignment statement. */
3432 case EXEC_ASSIGN_CALL:
3433
3434 arg = cnext->ext.actual;
3435 expr1 = expr2 = NULL;
3436 for (; arg; arg = arg->next)
3437 {
3438 if (!arg->expr)
3439 continue;
3440 if (expr1 == NULL)
3441 expr1 = arg->expr;
3442 else
3443 expr2 = arg->expr;
3444 }
3445 goto evaluate;
3446
3447 case EXEC_ASSIGN:
3448 expr1 = cnext->expr;
3449 expr2 = cnext->expr2;
3450 evaluate:
3451 if (nested_forall_info != NULL)
3452 {
3453 need_temp = gfc_check_dependency (expr1, expr2, 0);
3454 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3455 gfc_trans_assign_need_temp (expr1, expr2,
3456 cmask, invert,
3457 nested_forall_info, block);
3458 else
3459 {
3460 /* Variables to control maskexpr. */
3461 count1 = gfc_create_var (gfc_array_index_type, "count1");
3462 count2 = gfc_create_var (gfc_array_index_type, "count2");
3463 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3464 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3465
3466 tmp = gfc_trans_where_assign (expr1, expr2,
3467 cmask, invert,
3468 count1, count2,
3469 cnext->resolved_sym);
3470
3471 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3472 tmp, 1);
3473 gfc_add_expr_to_block (block, tmp);
3474 }
3475 }
3476 else
3477 {
3478 /* Variables to control maskexpr. */
3479 count1 = gfc_create_var (gfc_array_index_type, "count1");
3480 count2 = gfc_create_var (gfc_array_index_type, "count2");
3481 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3482 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3483
3484 tmp = gfc_trans_where_assign (expr1, expr2,
3485 cmask, invert,
3486 count1, count2,
3487 cnext->resolved_sym);
3488 gfc_add_expr_to_block (block, tmp);
3489
3490 }
3491 break;
3492
3493 /* WHERE or WHERE construct is part of a where-body-construct. */
3494 case EXEC_WHERE:
3495 gfc_trans_where_2 (cnext, cmask, invert,
3496 nested_forall_info, block);
3497 break;
3498
3499 default:
3500 gcc_unreachable ();
3501 }
3502
3503 /* The next statement within the same where-body-construct. */
3504 cnext = cnext->next;
3505 }
3506 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3507 cblock = cblock->block;
3508 if (mask == NULL_TREE)
3509 {
3510 /* If we're the initial WHERE, we can simply invert the sense
3511 of the current mask to obtain the "mask" for the remaining
3512 ELSEWHEREs. */
3513 invert = true;
3514 mask = cmask;
3515 }
3516 else
3517 {
3518 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3519 invert = false;
3520 mask = pmask;
3521 }
3522 }
3523
3524 /* If we allocated a pending mask array, deallocate it now. */
3525 if (ppmask)
3526 {
3527 tmp = gfc_call_free (ppmask);
3528 gfc_add_expr_to_block (block, tmp);
3529 }
3530
3531 /* If we allocated a current mask array, deallocate it now. */
3532 if (pcmask)
3533 {
3534 tmp = gfc_call_free (pcmask);
3535 gfc_add_expr_to_block (block, tmp);
3536 }
3537 }
3538
3539 /* Translate a simple WHERE construct or statement without dependencies.
3540 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3541 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3542 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3543
3544 static tree
3545 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3546 {
3547 stmtblock_t block, body;
3548 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3549 tree tmp, cexpr, tstmt, estmt;
3550 gfc_ss *css, *tdss, *tsss;
3551 gfc_se cse, tdse, tsse, edse, esse;
3552 gfc_loopinfo loop;
3553 gfc_ss *edss = 0;
3554 gfc_ss *esss = 0;
3555
3556 cond = cblock->expr;
3557 tdst = cblock->next->expr;
3558 tsrc = cblock->next->expr2;
3559 edst = eblock ? eblock->next->expr : NULL;
3560 esrc = eblock ? eblock->next->expr2 : NULL;
3561
3562 gfc_start_block (&block);
3563 gfc_init_loopinfo (&loop);
3564
3565 /* Handle the condition. */
3566 gfc_init_se (&cse, NULL);
3567 css = gfc_walk_expr (cond);
3568 gfc_add_ss_to_loop (&loop, css);
3569
3570 /* Handle the then-clause. */
3571 gfc_init_se (&tdse, NULL);
3572 gfc_init_se (&tsse, NULL);
3573 tdss = gfc_walk_expr (tdst);
3574 tsss = gfc_walk_expr (tsrc);
3575 if (tsss == gfc_ss_terminator)
3576 {
3577 tsss = gfc_get_ss ();
3578 tsss->next = gfc_ss_terminator;
3579 tsss->type = GFC_SS_SCALAR;
3580 tsss->expr = tsrc;
3581 }
3582 gfc_add_ss_to_loop (&loop, tdss);
3583 gfc_add_ss_to_loop (&loop, tsss);
3584
3585 if (eblock)
3586 {
3587 /* Handle the else clause. */
3588 gfc_init_se (&edse, NULL);
3589 gfc_init_se (&esse, NULL);
3590 edss = gfc_walk_expr (edst);
3591 esss = gfc_walk_expr (esrc);
3592 if (esss == gfc_ss_terminator)
3593 {
3594 esss = gfc_get_ss ();
3595 esss->next = gfc_ss_terminator;
3596 esss->type = GFC_SS_SCALAR;
3597 esss->expr = esrc;
3598 }
3599 gfc_add_ss_to_loop (&loop, edss);
3600 gfc_add_ss_to_loop (&loop, esss);
3601 }
3602
3603 gfc_conv_ss_startstride (&loop);
3604 gfc_conv_loop_setup (&loop);
3605
3606 gfc_mark_ss_chain_used (css, 1);
3607 gfc_mark_ss_chain_used (tdss, 1);
3608 gfc_mark_ss_chain_used (tsss, 1);
3609 if (eblock)
3610 {
3611 gfc_mark_ss_chain_used (edss, 1);
3612 gfc_mark_ss_chain_used (esss, 1);
3613 }
3614
3615 gfc_start_scalarized_body (&loop, &body);
3616
3617 gfc_copy_loopinfo_to_se (&cse, &loop);
3618 gfc_copy_loopinfo_to_se (&tdse, &loop);
3619 gfc_copy_loopinfo_to_se (&tsse, &loop);
3620 cse.ss = css;
3621 tdse.ss = tdss;
3622 tsse.ss = tsss;
3623 if (eblock)
3624 {
3625 gfc_copy_loopinfo_to_se (&edse, &loop);
3626 gfc_copy_loopinfo_to_se (&esse, &loop);
3627 edse.ss = edss;
3628 esse.ss = esss;
3629 }
3630
3631 gfc_conv_expr (&cse, cond);
3632 gfc_add_block_to_block (&body, &cse.pre);
3633 cexpr = cse.expr;
3634
3635 gfc_conv_expr (&tsse, tsrc);
3636 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3637 {
3638 gfc_conv_tmp_array_ref (&tdse);
3639 gfc_advance_se_ss_chain (&tdse);
3640 }
3641 else
3642 gfc_conv_expr (&tdse, tdst);
3643
3644 if (eblock)
3645 {
3646 gfc_conv_expr (&esse, esrc);
3647 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3648 {
3649 gfc_conv_tmp_array_ref (&edse);
3650 gfc_advance_se_ss_chain (&edse);
3651 }
3652 else
3653 gfc_conv_expr (&edse, edst);
3654 }
3655
3656 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3657 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3658 : build_empty_stmt ();
3659 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3660 gfc_add_expr_to_block (&body, tmp);
3661 gfc_add_block_to_block (&body, &cse.post);
3662
3663 gfc_trans_scalarizing_loops (&loop, &body);
3664 gfc_add_block_to_block (&block, &loop.pre);
3665 gfc_add_block_to_block (&block, &loop.post);
3666 gfc_cleanup_loop (&loop);
3667
3668 return gfc_finish_block (&block);
3669 }
3670
3671 /* As the WHERE or WHERE construct statement can be nested, we call
3672 gfc_trans_where_2 to do the translation, and pass the initial
3673 NULL values for both the control mask and the pending control mask. */
3674
3675 tree
3676 gfc_trans_where (gfc_code * code)
3677 {
3678 stmtblock_t block;
3679 gfc_code *cblock;
3680 gfc_code *eblock;
3681
3682 cblock = code->block;
3683 if (cblock->next
3684 && cblock->next->op == EXEC_ASSIGN
3685 && !cblock->next->next)
3686 {
3687 eblock = cblock->block;
3688 if (!eblock)
3689 {
3690 /* A simple "WHERE (cond) x = y" statement or block is
3691 dependence free if cond is not dependent upon writing x,
3692 and the source y is unaffected by the destination x. */
3693 if (!gfc_check_dependency (cblock->next->expr,
3694 cblock->expr, 0)
3695 && !gfc_check_dependency (cblock->next->expr,
3696 cblock->next->expr2, 0))
3697 return gfc_trans_where_3 (cblock, NULL);
3698 }
3699 else if (!eblock->expr
3700 && !eblock->block
3701 && eblock->next
3702 && eblock->next->op == EXEC_ASSIGN
3703 && !eblock->next->next)
3704 {
3705 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3706 block is dependence free if cond is not dependent on writes
3707 to x1 and x2, y1 is not dependent on writes to x2, and y2
3708 is not dependent on writes to x1, and both y's are not
3709 dependent upon their own x's. */
3710 if (!gfc_check_dependency(cblock->next->expr,
3711 cblock->expr, 0)
3712 && !gfc_check_dependency(eblock->next->expr,
3713 cblock->expr, 0)
3714 && !gfc_check_dependency(cblock->next->expr,
3715 eblock->next->expr2, 0)
3716 && !gfc_check_dependency(eblock->next->expr,
3717 cblock->next->expr2, 0)
3718 && !gfc_check_dependency(cblock->next->expr,
3719 cblock->next->expr2, 0)
3720 && !gfc_check_dependency(eblock->next->expr,
3721 eblock->next->expr2, 0))
3722 return gfc_trans_where_3 (cblock, eblock);
3723 }
3724 }
3725
3726 gfc_start_block (&block);
3727
3728 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3729
3730 return gfc_finish_block (&block);
3731 }
3732
3733
3734 /* CYCLE a DO loop. The label decl has already been created by
3735 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3736 node at the head of the loop. We must mark the label as used. */
3737
3738 tree
3739 gfc_trans_cycle (gfc_code * code)
3740 {
3741 tree cycle_label;
3742
3743 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3744 TREE_USED (cycle_label) = 1;
3745 return build1_v (GOTO_EXPR, cycle_label);
3746 }
3747
3748
3749 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3750 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3751 loop. */
3752
3753 tree
3754 gfc_trans_exit (gfc_code * code)
3755 {
3756 tree exit_label;
3757
3758 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3759 TREE_USED (exit_label) = 1;
3760 return build1_v (GOTO_EXPR, exit_label);
3761 }
3762
3763
3764 /* Translate the ALLOCATE statement. */
3765
3766 tree
3767 gfc_trans_allocate (gfc_code * code)
3768 {
3769 gfc_alloc *al;
3770 gfc_expr *expr;
3771 gfc_se se;
3772 tree tmp;
3773 tree parm;
3774 tree stat;
3775 tree pstat;
3776 tree error_label;
3777 stmtblock_t block;
3778
3779 if (!code->ext.alloc_list)
3780 return NULL_TREE;
3781
3782 gfc_start_block (&block);
3783
3784 if (code->expr)
3785 {
3786 tree gfc_int4_type_node = gfc_get_int_type (4);
3787
3788 stat = gfc_create_var (gfc_int4_type_node, "stat");
3789 pstat = build_fold_addr_expr (stat);
3790
3791 error_label = gfc_build_label_decl (NULL_TREE);
3792 TREE_USED (error_label) = 1;
3793 }
3794 else
3795 pstat = stat = error_label = NULL_TREE;
3796
3797 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3798 {
3799 expr = al->expr;
3800
3801 gfc_init_se (&se, NULL);
3802 gfc_start_block (&se.pre);
3803
3804 se.want_pointer = 1;
3805 se.descriptor_only = 1;
3806 gfc_conv_expr (&se, expr);
3807
3808 if (!gfc_array_allocate (&se, expr, pstat))
3809 {
3810 /* A scalar or derived type. */
3811 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3812
3813 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3814 tmp = se.string_length;
3815
3816 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3817 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr,
3818 fold_convert (TREE_TYPE (se.expr), tmp));
3819 gfc_add_expr_to_block (&se.pre, tmp);
3820
3821 if (code->expr)
3822 {
3823 tmp = build1_v (GOTO_EXPR, error_label);
3824 parm = fold_build2 (NE_EXPR, boolean_type_node,
3825 stat, build_int_cst (TREE_TYPE (stat), 0));
3826 tmp = fold_build3 (COND_EXPR, void_type_node,
3827 parm, tmp, build_empty_stmt ());
3828 gfc_add_expr_to_block (&se.pre, tmp);
3829 }
3830
3831 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3832 {
3833 tmp = build_fold_indirect_ref (se.expr);
3834 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3835 gfc_add_expr_to_block (&se.pre, tmp);
3836 }
3837
3838 }
3839
3840 tmp = gfc_finish_block (&se.pre);
3841 gfc_add_expr_to_block (&block, tmp);
3842 }
3843
3844 /* Assign the value to the status variable. */
3845 if (code->expr)
3846 {
3847 tmp = build1_v (LABEL_EXPR, error_label);
3848 gfc_add_expr_to_block (&block, tmp);
3849
3850 gfc_init_se (&se, NULL);
3851 gfc_conv_expr_lhs (&se, code->expr);
3852 tmp = convert (TREE_TYPE (se.expr), stat);
3853 gfc_add_modify_expr (&block, se.expr, tmp);
3854 }
3855
3856 return gfc_finish_block (&block);
3857 }
3858
3859
3860 /* Translate a DEALLOCATE statement.
3861 There are two cases within the for loop:
3862 (1) deallocate(a1, a2, a3) is translated into the following sequence
3863 _gfortran_deallocate(a1, 0B)
3864 _gfortran_deallocate(a2, 0B)
3865 _gfortran_deallocate(a3, 0B)
3866 where the STAT= variable is passed a NULL pointer.
3867 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3868 astat = 0
3869 _gfortran_deallocate(a1, &stat)
3870 astat = astat + stat
3871 _gfortran_deallocate(a2, &stat)
3872 astat = astat + stat
3873 _gfortran_deallocate(a3, &stat)
3874 astat = astat + stat
3875 In case (1), we simply return at the end of the for loop. In case (2)
3876 we set STAT= astat. */
3877 tree
3878 gfc_trans_deallocate (gfc_code * code)
3879 {
3880 gfc_se se;
3881 gfc_alloc *al;
3882 gfc_expr *expr;
3883 tree apstat, astat, pstat, stat, tmp;
3884 stmtblock_t block;
3885
3886 gfc_start_block (&block);
3887
3888 /* Set up the optional STAT= */
3889 if (code->expr)
3890 {
3891 tree gfc_int4_type_node = gfc_get_int_type (4);
3892
3893 /* Variable used with the library call. */
3894 stat = gfc_create_var (gfc_int4_type_node, "stat");
3895 pstat = build_fold_addr_expr (stat);
3896
3897 /* Running total of possible deallocation failures. */
3898 astat = gfc_create_var (gfc_int4_type_node, "astat");
3899 apstat = build_fold_addr_expr (astat);
3900
3901 /* Initialize astat to 0. */
3902 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3903 }
3904 else
3905 pstat = apstat = stat = astat = NULL_TREE;
3906
3907 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3908 {
3909 expr = al->expr;
3910 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3911
3912 gfc_init_se (&se, NULL);
3913 gfc_start_block (&se.pre);
3914
3915 se.want_pointer = 1;
3916 se.descriptor_only = 1;
3917 gfc_conv_expr (&se, expr);
3918
3919 if (expr->ts.type == BT_DERIVED
3920 && expr->ts.derived->attr.alloc_comp)
3921 {
3922 gfc_ref *ref;
3923 gfc_ref *last = NULL;
3924 for (ref = expr->ref; ref; ref = ref->next)
3925 if (ref->type == REF_COMPONENT)
3926 last = ref;
3927
3928 /* Do not deallocate the components of a derived type
3929 ultimate pointer component. */
3930 if (!(last && last->u.c.component->pointer)
3931 && !(!last && expr->symtree->n.sym->attr.pointer))
3932 {
3933 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3934 expr->rank);
3935 gfc_add_expr_to_block (&se.pre, tmp);
3936 }
3937 }
3938
3939 if (expr->rank)
3940 tmp = gfc_array_deallocate (se.expr, pstat);
3941 else
3942 {
3943 tmp = gfc_deallocate_with_status (se.expr, pstat, false);
3944 gfc_add_expr_to_block (&se.pre, tmp);
3945
3946 tmp = build2 (MODIFY_EXPR, void_type_node,
3947 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3948 }
3949
3950 gfc_add_expr_to_block (&se.pre, tmp);
3951
3952 /* Keep track of the number of failed deallocations by adding stat
3953 of the last deallocation to the running total. */
3954 if (code->expr)
3955 {
3956 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3957 gfc_add_modify_expr (&se.pre, astat, apstat);
3958 }
3959
3960 tmp = gfc_finish_block (&se.pre);
3961 gfc_add_expr_to_block (&block, tmp);
3962
3963 }
3964
3965 /* Assign the value to the status variable. */
3966 if (code->expr)
3967 {
3968 gfc_init_se (&se, NULL);
3969 gfc_conv_expr_lhs (&se, code->expr);
3970 tmp = convert (TREE_TYPE (se.expr), astat);
3971 gfc_add_modify_expr (&block, se.expr, tmp);
3972 }
3973
3974 return gfc_finish_block (&block);
3975 }
3976