1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
27 #include "stringpool.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
36 #include "dependency.h"
39 typedef struct iter_info
45 struct iter_info
*next
;
49 typedef struct forall_info
56 struct forall_info
*prev_nest
;
61 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
62 forall_info
*, stmtblock_t
*);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code
* code
)
69 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
80 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
81 gfc_conv_expr (se
, expr
);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
84 se
->expr
= TREE_OPERAND (se
->expr
, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
87 se
->expr
= TREE_OPERAND (se
->expr
, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code
* code
)
102 /* Start a new block. */
103 gfc_init_se (&se
, NULL
);
104 gfc_start_block (&se
.pre
);
105 gfc_conv_label_variable (&se
, code
->expr1
);
107 len
= GFC_DECL_STRING_LEN (se
.expr
);
108 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
110 label_tree
= gfc_get_label_decl (code
->label1
);
112 if (code
->label1
->defined
== ST_LABEL_TARGET
113 || code
->label1
->defined
== ST_LABEL_DO_TARGET
)
115 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
116 len_tree
= integer_minus_one_node
;
120 gfc_expr
*format
= code
->label1
->format
;
122 label_len
= format
->value
.character
.length
;
123 len_tree
= build_int_cst (gfc_charlen_type_node
, label_len
);
124 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
125 format
->value
.character
.string
);
126 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
129 gfc_add_modify (&se
.pre
, len
, len_tree
);
130 gfc_add_modify (&se
.pre
, addr
, label_tree
);
132 return gfc_finish_block (&se
.pre
);
135 /* Translate a GOTO statement. */
138 gfc_trans_goto (gfc_code
* code
)
140 locus loc
= code
->loc
;
146 if (code
->label1
!= NULL
)
147 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
150 gfc_init_se (&se
, NULL
);
151 gfc_start_block (&se
.pre
);
152 gfc_conv_label_variable (&se
, code
->expr1
);
153 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
154 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, tmp
,
155 build_int_cst (TREE_TYPE (tmp
), -1));
156 gfc_trans_runtime_check (true, false, tmp
, &se
.pre
, &loc
,
157 "Assigned label is not a target label");
159 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
161 /* We're going to ignore a label list. It does not really change the
162 statement's semantics (because it is just a further restriction on
163 what's legal code); before, we were comparing label addresses here, but
164 that's a very fragile business and may break with optimization. So
167 target
= fold_build1_loc (input_location
, GOTO_EXPR
, void_type_node
,
169 gfc_add_expr_to_block (&se
.pre
, target
);
170 return gfc_finish_block (&se
.pre
);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
176 gfc_trans_entry (gfc_code
* code
)
178 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
182 /* Replace a gfc_ss structure by another both in the gfc_se struct
183 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
184 to replace a variable ss by the corresponding temporary. */
187 replace_ss (gfc_se
*se
, gfc_ss
*old_ss
, gfc_ss
*new_ss
)
189 gfc_ss
**sess
, **loopss
;
191 /* The old_ss is a ss for a single variable. */
192 gcc_assert (old_ss
->info
->type
== GFC_SS_SECTION
);
194 for (sess
= &(se
->ss
); *sess
!= gfc_ss_terminator
; sess
= &((*sess
)->next
))
197 gcc_assert (*sess
!= gfc_ss_terminator
);
200 new_ss
->next
= old_ss
->next
;
203 for (loopss
= &(se
->loop
->ss
); *loopss
!= gfc_ss_terminator
;
204 loopss
= &((*loopss
)->loop_chain
))
205 if (*loopss
== old_ss
)
207 gcc_assert (*loopss
!= gfc_ss_terminator
);
210 new_ss
->loop_chain
= old_ss
->loop_chain
;
211 new_ss
->loop
= old_ss
->loop
;
213 gfc_free_ss (old_ss
);
217 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
218 elemental subroutines. Make temporaries for output arguments if any such
219 dependencies are found. Output arguments are chosen because internal_unpack
220 can be used, as is, to copy the result back to the variable. */
222 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
223 gfc_symbol
* sym
, gfc_actual_arglist
* arg
,
224 gfc_dep_check check_variable
)
226 gfc_actual_arglist
*arg0
;
228 gfc_formal_arglist
*formal
;
236 if (loopse
->ss
== NULL
)
241 formal
= gfc_sym_get_dummy_args (sym
);
243 /* Loop over all the arguments testing for dependencies. */
244 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
250 /* Obtain the info structure for the current argument. */
251 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
252 if (ss
->info
->expr
== e
)
255 /* If there is a dependency, create a temporary and use it
256 instead of the variable. */
257 fsym
= formal
? formal
->sym
: NULL
;
258 if (e
->expr_type
== EXPR_VARIABLE
260 && fsym
->attr
.intent
!= INTENT_IN
261 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
262 sym
, arg0
, check_variable
))
264 tree initial
, temptype
;
265 stmtblock_t temp_post
;
268 tmp_ss
= gfc_get_array_ss (gfc_ss_terminator
, NULL
, ss
->dimen
,
270 gfc_mark_ss_chain_used (tmp_ss
, 1);
271 tmp_ss
->info
->expr
= ss
->info
->expr
;
272 replace_ss (loopse
, ss
, tmp_ss
);
274 /* Obtain the argument descriptor for unpacking. */
275 gfc_init_se (&parmse
, NULL
);
276 parmse
.want_pointer
= 1;
277 gfc_conv_expr_descriptor (&parmse
, e
);
278 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
280 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
281 initialize the array temporary with a copy of the values. */
282 if (fsym
->attr
.intent
== INTENT_INOUT
283 || (fsym
->ts
.type
==BT_DERIVED
284 && fsym
->attr
.intent
== INTENT_OUT
))
285 initial
= parmse
.expr
;
286 /* For class expressions, we always initialize with the copy of
288 else if (e
->ts
.type
== BT_CLASS
)
289 initial
= parmse
.expr
;
293 if (e
->ts
.type
!= BT_CLASS
)
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e
297 (where the type of e is that of the final reference, but
298 parmse.expr's type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype
= TREE_TYPE (parmse
.expr
); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype
) == POINTER_TYPE
);
303 temptype
= TREE_TYPE (temptype
);
304 temptype
= gfc_get_element_type (temptype
);
308 /* For class arrays signal that the size of the dynamic type has to
309 be obtained from the vtable, using the 'initial' expression. */
310 temptype
= NULL_TREE
;
312 /* Generate the temporary. Cleaning up the temporary should be the
313 very last thing done, so we add the code to a new block and add it
314 to se->post as last instructions. */
315 size
= gfc_create_var (gfc_array_index_type
, NULL
);
316 data
= gfc_create_var (pvoid_type_node
, NULL
);
317 gfc_init_block (&temp_post
);
318 tmp
= gfc_trans_create_temp_array (&se
->pre
, &temp_post
, tmp_ss
,
319 temptype
, initial
, false, true,
320 false, &arg
->expr
->where
);
321 gfc_add_modify (&se
->pre
, size
, tmp
);
322 tmp
= fold_convert (pvoid_type_node
, tmp_ss
->info
->data
.array
.data
);
323 gfc_add_modify (&se
->pre
, data
, tmp
);
325 /* Update other ss' delta. */
326 gfc_set_delta (loopse
->loop
);
328 /* Copy the result back using unpack..... */
329 if (e
->ts
.type
!= BT_CLASS
)
330 tmp
= build_call_expr_loc (input_location
,
331 gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
334 /* ... except for class results where the copy is
336 tmp
= build_fold_indirect_ref_loc (input_location
, parmse
.expr
);
337 tmp
= gfc_conv_descriptor_data_get (tmp
);
338 tmp
= build_call_expr_loc (input_location
,
339 builtin_decl_explicit (BUILT_IN_MEMCPY
),
341 fold_convert (size_type_node
, size
));
343 gfc_add_expr_to_block (&se
->post
, tmp
);
345 /* parmse.pre is already added above. */
346 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
347 gfc_add_block_to_block (&se
->post
, &temp_post
);
353 /* Get the interface symbol for the procedure corresponding to the given call.
354 We can't get the procedure symbol directly as we have to handle the case
355 of (deferred) type-bound procedures. */
358 get_proc_ifc_for_call (gfc_code
*c
)
362 gcc_assert (c
->op
== EXEC_ASSIGN_CALL
|| c
->op
== EXEC_CALL
);
364 sym
= gfc_get_proc_ifc_for_expr (c
->expr1
);
366 /* Fall back/last resort try. */
368 sym
= c
->resolved_sym
;
374 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
377 gfc_trans_call (gfc_code
* code
, bool dependency_check
,
378 tree mask
, tree count1
, bool invert
)
382 int has_alternate_specifier
;
383 gfc_dep_check check_variable
;
384 tree index
= NULL_TREE
;
385 tree maskexpr
= NULL_TREE
;
388 /* A CALL starts a new block because the actual arguments may have to
389 be evaluated first. */
390 gfc_init_se (&se
, NULL
);
391 gfc_start_block (&se
.pre
);
393 gcc_assert (code
->resolved_sym
);
395 ss
= gfc_ss_terminator
;
396 if (code
->resolved_sym
->attr
.elemental
)
397 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
,
398 get_proc_ifc_for_call (code
),
401 /* Is not an elemental subroutine call with array valued arguments. */
402 if (ss
== gfc_ss_terminator
)
405 /* Translate the call. */
406 has_alternate_specifier
407 = gfc_conv_procedure_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
410 /* A subroutine without side-effect, by definition, does nothing! */
411 TREE_SIDE_EFFECTS (se
.expr
) = 1;
413 /* Chain the pieces together and return the block. */
414 if (has_alternate_specifier
)
416 gfc_code
*select_code
;
418 select_code
= code
->next
;
419 gcc_assert(select_code
->op
== EXEC_SELECT
);
420 sym
= select_code
->expr1
->symtree
->n
.sym
;
421 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
422 if (sym
->backend_decl
== NULL
)
423 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
424 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
427 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
429 gfc_add_block_to_block (&se
.pre
, &se
.post
);
434 /* An elemental subroutine call with array valued arguments has
442 /* gfc_walk_elemental_function_args renders the ss chain in the
443 reverse order to the actual argument order. */
444 ss
= gfc_reverse_ss (ss
);
446 /* Initialize the loop. */
447 gfc_init_se (&loopse
, NULL
);
448 gfc_init_loopinfo (&loop
);
449 gfc_add_ss_to_loop (&loop
, ss
);
451 gfc_conv_ss_startstride (&loop
);
452 /* TODO: gfc_conv_loop_setup generates a temporary for vector
453 subscripts. This could be prevented in the elemental case
454 as temporaries are handled separatedly
455 (below in gfc_conv_elemental_dependencies). */
456 gfc_conv_loop_setup (&loop
, &code
->expr1
->where
);
457 gfc_mark_ss_chain_used (ss
, 1);
459 /* Convert the arguments, checking for dependencies. */
460 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
463 /* For operator assignment, do dependency checking. */
464 if (dependency_check
)
465 check_variable
= ELEM_CHECK_VARIABLE
;
467 check_variable
= ELEM_DONT_CHECK_VARIABLE
;
469 gfc_init_se (&depse
, NULL
);
470 gfc_conv_elemental_dependencies (&depse
, &loopse
, code
->resolved_sym
,
471 code
->ext
.actual
, check_variable
);
473 gfc_add_block_to_block (&loop
.pre
, &depse
.pre
);
474 gfc_add_block_to_block (&loop
.post
, &depse
.post
);
476 /* Generate the loop body. */
477 gfc_start_scalarized_body (&loop
, &body
);
478 gfc_init_block (&block
);
482 /* Form the mask expression according to the mask. */
484 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
486 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
487 TREE_TYPE (maskexpr
), maskexpr
);
490 /* Add the subroutine call to the block. */
491 gfc_conv_procedure_call (&loopse
, code
->resolved_sym
,
492 code
->ext
.actual
, code
->expr1
,
497 tmp
= build3_v (COND_EXPR
, maskexpr
, loopse
.expr
,
498 build_empty_stmt (input_location
));
499 gfc_add_expr_to_block (&loopse
.pre
, tmp
);
500 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
501 gfc_array_index_type
,
502 count1
, gfc_index_one_node
);
503 gfc_add_modify (&loopse
.pre
, count1
, tmp
);
506 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
508 gfc_add_block_to_block (&block
, &loopse
.pre
);
509 gfc_add_block_to_block (&block
, &loopse
.post
);
511 /* Finish up the loop block and the loop. */
512 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
513 gfc_trans_scalarizing_loops (&loop
, &body
);
514 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
515 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
516 gfc_add_block_to_block (&se
.pre
, &se
.post
);
517 gfc_cleanup_loop (&loop
);
520 return gfc_finish_block (&se
.pre
);
524 /* Translate the RETURN statement. */
527 gfc_trans_return (gfc_code
* code
)
535 /* If code->expr is not NULL, this return statement must appear
536 in a subroutine and current_fake_result_decl has already
539 result
= gfc_get_fake_result_decl (NULL
, 0);
542 gfc_warning ("An alternate return at %L without a * dummy argument",
543 &code
->expr1
->where
);
544 return gfc_generate_return ();
547 /* Start a new block for this statement. */
548 gfc_init_se (&se
, NULL
);
549 gfc_start_block (&se
.pre
);
551 gfc_conv_expr (&se
, code
->expr1
);
553 /* Note that the actually returned expression is a simple value and
554 does not depend on any pointers or such; thus we can clean-up with
555 se.post before returning. */
556 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (result
),
557 result
, fold_convert (TREE_TYPE (result
),
559 gfc_add_expr_to_block (&se
.pre
, tmp
);
560 gfc_add_block_to_block (&se
.pre
, &se
.post
);
562 tmp
= gfc_generate_return ();
563 gfc_add_expr_to_block (&se
.pre
, tmp
);
564 return gfc_finish_block (&se
.pre
);
567 return gfc_generate_return ();
571 /* Translate the PAUSE statement. We have to translate this statement
572 to a runtime library call. */
575 gfc_trans_pause (gfc_code
* code
)
577 tree gfc_int4_type_node
= gfc_get_int_type (4);
581 /* Start a new block for this statement. */
582 gfc_init_se (&se
, NULL
);
583 gfc_start_block (&se
.pre
);
586 if (code
->expr1
== NULL
)
588 tmp
= build_int_cst (gfc_int4_type_node
, 0);
589 tmp
= build_call_expr_loc (input_location
,
590 gfor_fndecl_pause_string
, 2,
591 build_int_cst (pchar_type_node
, 0), tmp
);
593 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
595 gfc_conv_expr (&se
, code
->expr1
);
596 tmp
= build_call_expr_loc (input_location
,
597 gfor_fndecl_pause_numeric
, 1,
598 fold_convert (gfc_int4_type_node
, se
.expr
));
602 gfc_conv_expr_reference (&se
, code
->expr1
);
603 tmp
= build_call_expr_loc (input_location
,
604 gfor_fndecl_pause_string
, 2,
605 se
.expr
, se
.string_length
);
608 gfc_add_expr_to_block (&se
.pre
, tmp
);
610 gfc_add_block_to_block (&se
.pre
, &se
.post
);
612 return gfc_finish_block (&se
.pre
);
616 /* Translate the STOP statement. We have to translate this statement
617 to a runtime library call. */
620 gfc_trans_stop (gfc_code
*code
, bool error_stop
)
622 tree gfc_int4_type_node
= gfc_get_int_type (4);
626 /* Start a new block for this statement. */
627 gfc_init_se (&se
, NULL
);
628 gfc_start_block (&se
.pre
);
630 if (flag_coarray
== GFC_FCOARRAY_LIB
&& !error_stop
)
632 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
633 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
634 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
635 gfc_add_expr_to_block (&se
.pre
, tmp
);
637 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_finalize
, 0);
638 gfc_add_expr_to_block (&se
.pre
, tmp
);
641 if (code
->expr1
== NULL
)
643 tmp
= build_int_cst (gfc_int4_type_node
, 0);
644 tmp
= build_call_expr_loc (input_location
,
646 ? (flag_coarray
== GFC_FCOARRAY_LIB
647 ? gfor_fndecl_caf_error_stop_str
648 : gfor_fndecl_error_stop_string
)
649 : gfor_fndecl_stop_string
,
650 2, build_int_cst (pchar_type_node
, 0), tmp
);
652 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
654 gfc_conv_expr (&se
, code
->expr1
);
655 tmp
= build_call_expr_loc (input_location
,
657 ? (flag_coarray
== GFC_FCOARRAY_LIB
658 ? gfor_fndecl_caf_error_stop
659 : gfor_fndecl_error_stop_numeric
)
660 : gfor_fndecl_stop_numeric_f08
, 1,
661 fold_convert (gfc_int4_type_node
, se
.expr
));
665 gfc_conv_expr_reference (&se
, code
->expr1
);
666 tmp
= build_call_expr_loc (input_location
,
668 ? (flag_coarray
== GFC_FCOARRAY_LIB
669 ? gfor_fndecl_caf_error_stop_str
670 : gfor_fndecl_error_stop_string
)
671 : gfor_fndecl_stop_string
,
672 2, se
.expr
, se
.string_length
);
675 gfc_add_expr_to_block (&se
.pre
, tmp
);
677 gfc_add_block_to_block (&se
.pre
, &se
.post
);
679 return gfc_finish_block (&se
.pre
);
684 gfc_trans_lock_unlock (gfc_code
*code
, gfc_exec_op type ATTRIBUTE_UNUSED
)
687 tree stat
= NULL_TREE
, lock_acquired
= NULL_TREE
;
689 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
690 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
691 if (!code
->expr2
&& !code
->expr4
&& flag_coarray
!= GFC_FCOARRAY_LIB
)
694 gfc_init_se (&se
, NULL
);
695 gfc_start_block (&se
.pre
);
699 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
700 gfc_init_se (&argse
, NULL
);
701 gfc_conv_expr_val (&argse
, code
->expr2
);
707 gcc_assert (code
->expr4
->expr_type
== EXPR_VARIABLE
);
708 gfc_init_se (&argse
, NULL
);
709 gfc_conv_expr_val (&argse
, code
->expr4
);
710 lock_acquired
= argse
.expr
;
713 if (stat
!= NULL_TREE
)
714 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
716 if (lock_acquired
!= NULL_TREE
)
717 gfc_add_modify (&se
.pre
, lock_acquired
,
718 fold_convert (TREE_TYPE (lock_acquired
),
721 return gfc_finish_block (&se
.pre
);
726 gfc_trans_sync (gfc_code
*code
, gfc_exec_op type
)
730 tree images
= NULL_TREE
, stat
= NULL_TREE
,
731 errmsg
= NULL_TREE
, errmsglen
= NULL_TREE
;
733 /* Short cut: For single images without bound checking or without STAT=,
734 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
735 if (!code
->expr2
&& !(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
736 && flag_coarray
!= GFC_FCOARRAY_LIB
)
739 gfc_init_se (&se
, NULL
);
740 gfc_start_block (&se
.pre
);
742 if (code
->expr1
&& code
->expr1
->rank
== 0)
744 gfc_init_se (&argse
, NULL
);
745 gfc_conv_expr_val (&argse
, code
->expr1
);
751 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
752 gfc_init_se (&argse
, NULL
);
753 gfc_conv_expr_val (&argse
, code
->expr2
);
757 stat
= null_pointer_node
;
759 if (code
->expr3
&& flag_coarray
== GFC_FCOARRAY_LIB
760 && type
!= EXEC_SYNC_MEMORY
)
762 gcc_assert (code
->expr3
->expr_type
== EXPR_VARIABLE
);
763 gfc_init_se (&argse
, NULL
);
764 gfc_conv_expr (&argse
, code
->expr3
);
765 gfc_conv_string_parameter (&argse
);
766 errmsg
= gfc_build_addr_expr (NULL
, argse
.expr
);
767 errmsglen
= argse
.string_length
;
769 else if (flag_coarray
== GFC_FCOARRAY_LIB
&& type
!= EXEC_SYNC_MEMORY
)
771 errmsg
= null_pointer_node
;
772 errmsglen
= build_int_cst (integer_type_node
, 0);
775 /* Check SYNC IMAGES(imageset) for valid image index.
776 FIXME: Add a check for image-set arrays. */
777 if (code
->expr1
&& (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
778 && code
->expr1
->rank
== 0)
781 if (flag_coarray
!= GFC_FCOARRAY_LIB
)
782 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
783 images
, build_int_cst (TREE_TYPE (images
), 1));
787 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_num_images
,
788 2, integer_zero_node
,
789 build_int_cst (integer_type_node
, -1));
790 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
792 cond2
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
794 build_int_cst (TREE_TYPE (images
), 1));
795 cond
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
796 boolean_type_node
, cond
, cond2
);
798 gfc_trans_runtime_check (true, false, cond
, &se
.pre
,
799 &code
->expr1
->where
, "Invalid image number "
801 fold_convert (integer_type_node
, images
));
804 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
805 image control statements SYNC IMAGES and SYNC ALL. */
806 if (flag_coarray
== GFC_FCOARRAY_LIB
)
808 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
809 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
810 gfc_add_expr_to_block (&se
.pre
, tmp
);
813 if (flag_coarray
!= GFC_FCOARRAY_LIB
|| type
== EXEC_SYNC_MEMORY
)
815 /* Set STAT to zero. */
817 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
819 else if (type
== EXEC_SYNC_ALL
)
821 /* SYNC ALL => stat == null_pointer_node
822 SYNC ALL(stat=s) => stat has an integer type
824 If "stat" has the wrong integer type, use a temp variable of
825 the right type and later cast the result back into "stat". */
826 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
828 if (TREE_TYPE (stat
) == integer_type_node
)
829 stat
= gfc_build_addr_expr (NULL
, stat
);
831 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
832 3, stat
, errmsg
, errmsglen
);
833 gfc_add_expr_to_block (&se
.pre
, tmp
);
837 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
839 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
840 3, gfc_build_addr_expr (NULL
, tmp_stat
),
842 gfc_add_expr_to_block (&se
.pre
, tmp
);
844 gfc_add_modify (&se
.pre
, stat
,
845 fold_convert (TREE_TYPE (stat
), tmp_stat
));
852 gcc_assert (type
== EXEC_SYNC_IMAGES
);
856 len
= build_int_cst (integer_type_node
, -1);
857 images
= null_pointer_node
;
859 else if (code
->expr1
->rank
== 0)
861 len
= build_int_cst (integer_type_node
, 1);
862 images
= gfc_build_addr_expr (NULL_TREE
, images
);
867 if (code
->expr1
->ts
.kind
!= gfc_c_int_kind
)
868 gfc_fatal_error ("Sorry, only support for integer kind %d "
869 "implemented for image-set at %L",
870 gfc_c_int_kind
, &code
->expr1
->where
);
872 gfc_conv_array_parameter (&se
, code
->expr1
, true, NULL
, NULL
, &len
);
875 tmp
= gfc_typenode_for_spec (&code
->expr1
->ts
);
876 if (GFC_ARRAY_TYPE_P (tmp
) || GFC_DESCRIPTOR_TYPE_P (tmp
))
877 tmp
= gfc_get_element_type (tmp
);
879 len
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
880 TREE_TYPE (len
), len
,
881 fold_convert (TREE_TYPE (len
),
882 TYPE_SIZE_UNIT (tmp
)));
883 len
= fold_convert (integer_type_node
, len
);
886 /* SYNC IMAGES(imgs) => stat == null_pointer_node
887 SYNC IMAGES(imgs,stat=s) => stat has an integer type
889 If "stat" has the wrong integer type, use a temp variable of
890 the right type and later cast the result back into "stat". */
891 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
893 if (TREE_TYPE (stat
) == integer_type_node
)
894 stat
= gfc_build_addr_expr (NULL
, stat
);
896 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
897 5, fold_convert (integer_type_node
, len
),
898 images
, stat
, errmsg
, errmsglen
);
899 gfc_add_expr_to_block (&se
.pre
, tmp
);
903 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
905 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
906 5, fold_convert (integer_type_node
, len
),
907 images
, gfc_build_addr_expr (NULL
, tmp_stat
),
909 gfc_add_expr_to_block (&se
.pre
, tmp
);
911 gfc_add_modify (&se
.pre
, stat
,
912 fold_convert (TREE_TYPE (stat
), tmp_stat
));
916 return gfc_finish_block (&se
.pre
);
920 /* Generate GENERIC for the IF construct. This function also deals with
921 the simple IF statement, because the front end translates the IF
922 statement into an IF construct.
954 where COND_S is the simplified version of the predicate. PRE_COND_S
955 are the pre side-effects produced by the translation of the
957 We need to build the chain recursively otherwise we run into
958 problems with folding incomplete statements. */
961 gfc_trans_if_1 (gfc_code
* code
)
968 /* Check for an unconditional ELSE clause. */
970 return gfc_trans_code (code
->next
);
972 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
973 gfc_init_se (&if_se
, NULL
);
974 gfc_start_block (&if_se
.pre
);
976 /* Calculate the IF condition expression. */
977 if (code
->expr1
->where
.lb
)
979 gfc_save_backend_locus (&saved_loc
);
980 gfc_set_backend_locus (&code
->expr1
->where
);
983 gfc_conv_expr_val (&if_se
, code
->expr1
);
985 if (code
->expr1
->where
.lb
)
986 gfc_restore_backend_locus (&saved_loc
);
988 /* Translate the THEN clause. */
989 stmt
= gfc_trans_code (code
->next
);
991 /* Translate the ELSE clause. */
993 elsestmt
= gfc_trans_if_1 (code
->block
);
995 elsestmt
= build_empty_stmt (input_location
);
997 /* Build the condition expression and add it to the condition block. */
998 loc
= code
->expr1
->where
.lb
? code
->expr1
->where
.lb
->location
: input_location
;
999 stmt
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, if_se
.expr
, stmt
,
1002 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
1004 /* Finish off this statement. */
1005 return gfc_finish_block (&if_se
.pre
);
1009 gfc_trans_if (gfc_code
* code
)
1014 /* Create exit label so it is available for trans'ing the body code. */
1015 exit_label
= gfc_build_label_decl (NULL_TREE
);
1016 code
->exit_label
= exit_label
;
1018 /* Translate the actual code in code->block. */
1019 gfc_init_block (&body
);
1020 gfc_add_expr_to_block (&body
, gfc_trans_if_1 (code
->block
));
1022 /* Add exit label. */
1023 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1025 return gfc_finish_block (&body
);
1029 /* Translate an arithmetic IF expression.
1031 IF (cond) label1, label2, label3 translates to
1043 An optimized version can be generated in case of equal labels.
1044 E.g., if label1 is equal to label2, we can translate it to
1053 gfc_trans_arithmetic_if (gfc_code
* code
)
1061 /* Start a new block. */
1062 gfc_init_se (&se
, NULL
);
1063 gfc_start_block (&se
.pre
);
1065 /* Pre-evaluate COND. */
1066 gfc_conv_expr_val (&se
, code
->expr1
);
1067 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
1069 /* Build something to compare with. */
1070 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
1072 if (code
->label1
->value
!= code
->label2
->value
)
1074 /* If (cond < 0) take branch1 else take branch2.
1075 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1076 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1077 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
1079 if (code
->label1
->value
!= code
->label3
->value
)
1080 tmp
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
1083 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1086 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1087 tmp
, branch1
, branch2
);
1090 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1092 if (code
->label1
->value
!= code
->label3
->value
1093 && code
->label2
->value
!= code
->label3
->value
)
1095 /* if (cond <= 0) take branch1 else take branch2. */
1096 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
1097 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
1099 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1100 tmp
, branch1
, branch2
);
1103 /* Append the COND_EXPR to the evaluation of COND, and return. */
1104 gfc_add_expr_to_block (&se
.pre
, branch1
);
1105 return gfc_finish_block (&se
.pre
);
1109 /* Translate a CRITICAL block. */
1111 gfc_trans_critical (gfc_code
*code
)
1114 tree tmp
, token
= NULL_TREE
;
1116 gfc_start_block (&block
);
1118 if (flag_coarray
== GFC_FCOARRAY_LIB
)
1120 token
= gfc_get_symbol_decl (code
->resolved_sym
);
1121 token
= GFC_TYPE_ARRAY_CAF_TOKEN (TREE_TYPE (token
));
1122 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_lock
, 7,
1123 token
, integer_zero_node
, integer_one_node
,
1124 null_pointer_node
, null_pointer_node
,
1125 null_pointer_node
, integer_zero_node
);
1126 gfc_add_expr_to_block (&block
, tmp
);
1129 tmp
= gfc_trans_code (code
->block
->next
);
1130 gfc_add_expr_to_block (&block
, tmp
);
1132 if (flag_coarray
== GFC_FCOARRAY_LIB
)
1134 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_unlock
, 6,
1135 token
, integer_zero_node
, integer_one_node
,
1136 null_pointer_node
, null_pointer_node
,
1138 gfc_add_expr_to_block (&block
, tmp
);
1142 return gfc_finish_block (&block
);
1146 /* Do proper initialization for ASSOCIATE names. */
1149 trans_associate_var (gfc_symbol
*sym
, gfc_wrapped_block
*block
)
1160 gcc_assert (sym
->assoc
);
1161 e
= sym
->assoc
->target
;
1163 class_target
= (e
->expr_type
== EXPR_VARIABLE
)
1164 && (gfc_is_class_scalar_expr (e
)
1165 || gfc_is_class_array_ref (e
, NULL
));
1167 unlimited
= UNLIMITED_POLY (e
);
1169 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1170 to array temporary) for arrays with either unknown shape or if associating
1172 if (sym
->attr
.dimension
&& !class_target
1173 && (sym
->as
->type
== AS_DEFERRED
|| sym
->assoc
->variable
))
1177 bool cst_array_ctor
;
1179 desc
= sym
->backend_decl
;
1180 cst_array_ctor
= e
->expr_type
== EXPR_ARRAY
1181 && gfc_constant_array_constructor_p (e
->value
.constructor
);
1183 /* If association is to an expression, evaluate it and create temporary.
1184 Otherwise, get descriptor of target for pointer assignment. */
1185 gfc_init_se (&se
, NULL
);
1186 if (sym
->assoc
->variable
|| cst_array_ctor
)
1188 se
.direct_byref
= 1;
1193 gfc_conv_expr_descriptor (&se
, e
);
1195 /* If we didn't already do the pointer assignment, set associate-name
1196 descriptor to the one generated for the temporary. */
1197 if (!sym
->assoc
->variable
&& !cst_array_ctor
)
1201 gfc_add_modify (&se
.pre
, desc
, se
.expr
);
1203 /* The generated descriptor has lower bound zero (as array
1204 temporary), shift bounds so we get lower bounds of 1. */
1205 for (dim
= 0; dim
< e
->rank
; ++dim
)
1206 gfc_conv_shift_descriptor_lbound (&se
.pre
, desc
,
1207 dim
, gfc_index_one_node
);
1210 /* If this is a subreference array pointer associate name use the
1211 associate variable element size for the value of 'span'. */
1212 if (sym
->attr
.subref_array_pointer
)
1214 gcc_assert (e
->expr_type
== EXPR_VARIABLE
);
1215 tmp
= e
->symtree
->n
.sym
->backend_decl
;
1216 tmp
= gfc_get_element_type (TREE_TYPE (tmp
));
1217 tmp
= fold_convert (gfc_array_index_type
, size_in_bytes (tmp
));
1218 gfc_add_modify (&se
.pre
, GFC_DECL_SPAN(desc
), tmp
);
1221 /* Done, register stuff as init / cleanup code. */
1222 gfc_add_init_cleanup (block
, gfc_finish_block (&se
.pre
),
1223 gfc_finish_block (&se
.post
));
1226 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1227 arrays to be assigned directly. */
1228 else if (class_target
&& sym
->attr
.dimension
1229 && (sym
->ts
.type
== BT_DERIVED
|| unlimited
))
1233 gfc_init_se (&se
, NULL
);
1234 se
.descriptor_only
= 1;
1235 gfc_conv_expr (&se
, e
);
1237 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se
.expr
)));
1238 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym
->backend_decl
)));
1240 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
1244 /* Recover the dtype, which has been overwritten by the
1245 assignment from an unlimited polymorphic object. */
1246 tmp
= gfc_conv_descriptor_dtype (sym
->backend_decl
);
1247 gfc_add_modify (&se
.pre
, tmp
,
1248 gfc_get_dtype (TREE_TYPE (sym
->backend_decl
)));
1251 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1252 gfc_finish_block (&se
.post
));
1255 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1256 else if (gfc_is_associate_pointer (sym
))
1260 gcc_assert (!sym
->attr
.dimension
);
1262 gfc_init_se (&se
, NULL
);
1264 /* Class associate-names come this way because they are
1265 unconditionally associate pointers and the symbol is scalar. */
1266 if (sym
->ts
.type
== BT_CLASS
&& CLASS_DATA (sym
)->attr
.dimension
)
1268 /* For a class array we need a descriptor for the selector. */
1269 gfc_conv_expr_descriptor (&se
, e
);
1271 /* Obtain a temporary class container for the result. */
1272 gfc_conv_class_to_class (&se
, e
, sym
->ts
, false, true, false, false);
1273 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1275 /* Set the offset. */
1276 desc
= gfc_class_data_get (se
.expr
);
1277 offset
= gfc_index_zero_node
;
1278 for (n
= 0; n
< e
->rank
; n
++)
1280 dim
= gfc_rank_cst
[n
];
1281 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
1282 gfc_array_index_type
,
1283 gfc_conv_descriptor_stride_get (desc
, dim
),
1284 gfc_conv_descriptor_lbound_get (desc
, dim
));
1285 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
1286 gfc_array_index_type
,
1289 gfc_conv_descriptor_offset_set (&se
.pre
, desc
, offset
);
1291 else if (sym
->ts
.type
== BT_CLASS
&& e
->ts
.type
== BT_CLASS
1292 && CLASS_DATA (e
)->attr
.dimension
)
1294 /* This is bound to be a class array element. */
1295 gfc_conv_expr_reference (&se
, e
);
1296 /* Get the _vptr component of the class object. */
1297 tmp
= gfc_get_vptr_from_expr (se
.expr
);
1298 /* Obtain a temporary class container for the result. */
1299 gfc_conv_derived_to_class (&se
, e
, sym
->ts
, tmp
, false, false);
1300 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1303 gfc_conv_expr (&se
, e
);
1305 tmp
= TREE_TYPE (sym
->backend_decl
);
1306 tmp
= gfc_build_addr_expr (tmp
, se
.expr
);
1307 gfc_add_modify (&se
.pre
, sym
->backend_decl
, tmp
);
1309 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1310 gfc_finish_block (&se
.post
));
1313 /* Do a simple assignment. This is for scalar expressions, where we
1314 can simply use expression assignment. */
1319 lhs
= gfc_lval_expr_from_sym (sym
);
1320 tmp
= gfc_trans_assignment (lhs
, e
, false, true);
1321 gfc_add_init_cleanup (block
, tmp
, NULL_TREE
);
1324 /* Set the stringlength from the vtable size. */
1325 if (sym
->ts
.type
== BT_CHARACTER
&& sym
->attr
.select_type_temporary
)
1329 gfc_init_se (&se
, NULL
);
1330 gcc_assert (UNLIMITED_POLY (e
->symtree
->n
.sym
));
1331 tmp
= gfc_get_symbol_decl (e
->symtree
->n
.sym
);
1332 tmp
= gfc_vtable_size_get (tmp
);
1333 gfc_get_symbol_decl (sym
);
1334 charlen
= sym
->ts
.u
.cl
->backend_decl
;
1335 gfc_add_modify (&se
.pre
, charlen
,
1336 fold_convert (TREE_TYPE (charlen
), tmp
));
1337 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1338 gfc_finish_block (&se
.post
));
1343 /* Translate a BLOCK construct. This is basically what we would do for a
1347 gfc_trans_block_construct (gfc_code
* code
)
1351 gfc_wrapped_block block
;
1354 gfc_association_list
*ass
;
1356 ns
= code
->ext
.block
.ns
;
1358 sym
= ns
->proc_name
;
1361 /* Process local variables. */
1362 gcc_assert (!sym
->tlink
);
1364 gfc_process_block_locals (ns
);
1366 /* Generate code including exit-label. */
1367 gfc_init_block (&body
);
1368 exit_label
= gfc_build_label_decl (NULL_TREE
);
1369 code
->exit_label
= exit_label
;
1370 gfc_add_expr_to_block (&body
, gfc_trans_code (ns
->code
));
1371 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1373 /* Finish everything. */
1374 gfc_start_wrapped_block (&block
, gfc_finish_block (&body
));
1375 gfc_trans_deferred_vars (sym
, &block
);
1376 for (ass
= code
->ext
.block
.assoc
; ass
; ass
= ass
->next
)
1377 trans_associate_var (ass
->st
->n
.sym
, &block
);
1379 return gfc_finish_wrapped_block (&block
);
1383 /* Translate the simple DO construct. This is where the loop variable has
1384 integer type and step +-1. We can't use this in the general case
1385 because integer overflow and floating point errors could give incorrect
1387 We translate a do loop from:
1389 DO dovar = from, to, step
1395 [Evaluate loop bounds and step]
1397 if ((step > 0) ? (dovar <= to) : (dovar => to))
1403 cond = (dovar == to);
1405 if (cond) goto end_label;
1410 This helps the optimizers by avoiding the extra induction variable
1411 used in the general case. */
1414 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
1415 tree from
, tree to
, tree step
, tree exit_cond
)
1421 tree saved_dovar
= NULL
;
1426 type
= TREE_TYPE (dovar
);
1428 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1430 /* Initialize the DO variable: dovar = from. */
1431 gfc_add_modify_loc (loc
, pblock
, dovar
,
1432 fold_convert (TREE_TYPE(dovar
), from
));
1434 /* Save value for do-tinkering checking. */
1435 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1437 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1438 gfc_add_modify_loc (loc
, pblock
, saved_dovar
, dovar
);
1441 /* Cycle and exit statements are implemented with gotos. */
1442 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1443 exit_label
= gfc_build_label_decl (NULL_TREE
);
1445 /* Put the labels where they can be found later. See gfc_trans_do(). */
1446 code
->cycle_label
= cycle_label
;
1447 code
->exit_label
= exit_label
;
1450 gfc_start_block (&body
);
1452 /* Main loop body. */
1453 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1454 gfc_add_expr_to_block (&body
, tmp
);
1456 /* Label for cycle statements (if needed). */
1457 if (TREE_USED (cycle_label
))
1459 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1460 gfc_add_expr_to_block (&body
, tmp
);
1463 /* Check whether someone has modified the loop variable. */
1464 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1466 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
,
1467 dovar
, saved_dovar
);
1468 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1469 "Loop variable has been modified");
1472 /* Exit the loop if there is an I/O result condition or error. */
1475 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1476 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1478 build_empty_stmt (loc
));
1479 gfc_add_expr_to_block (&body
, tmp
);
1482 /* Evaluate the loop condition. */
1483 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, dovar
,
1485 cond
= gfc_evaluate_now_loc (loc
, cond
, &body
);
1487 /* Increment the loop variable. */
1488 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1489 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1491 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1492 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1494 /* The loop exit. */
1495 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1496 TREE_USED (exit_label
) = 1;
1497 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1498 cond
, tmp
, build_empty_stmt (loc
));
1499 gfc_add_expr_to_block (&body
, tmp
);
1501 /* Finish the loop body. */
1502 tmp
= gfc_finish_block (&body
);
1503 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1505 /* Only execute the loop if the number of iterations is positive. */
1506 if (tree_int_cst_sgn (step
) > 0)
1507 cond
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, dovar
,
1510 cond
= fold_build2_loc (loc
, GE_EXPR
, boolean_type_node
, dovar
,
1512 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
, tmp
,
1513 build_empty_stmt (loc
));
1514 gfc_add_expr_to_block (pblock
, tmp
);
1516 /* Add the exit label. */
1517 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1518 gfc_add_expr_to_block (pblock
, tmp
);
1520 return gfc_finish_block (pblock
);
1523 /* Translate the DO construct. This obviously is one of the most
1524 important ones to get right with any compiler, but especially
1527 We special case some loop forms as described in gfc_trans_simple_do.
1528 For other cases we implement them with a separate loop count,
1529 as described in the standard.
1531 We translate a do loop from:
1533 DO dovar = from, to, step
1539 [evaluate loop bounds and step]
1540 empty = (step > 0 ? to < from : to > from);
1541 countm1 = (to - from) / step;
1543 if (empty) goto exit_label;
1551 if (countm1t == 0) goto exit_label;
1555 countm1 is an unsigned integer. It is equal to the loop count minus one,
1556 because the loop count itself can overflow. */
1559 gfc_trans_do (gfc_code
* code
, tree exit_cond
)
1563 tree saved_dovar
= NULL
;
1578 gfc_start_block (&block
);
1580 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1582 /* Evaluate all the expressions in the iterator. */
1583 gfc_init_se (&se
, NULL
);
1584 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1585 gfc_add_block_to_block (&block
, &se
.pre
);
1587 type
= TREE_TYPE (dovar
);
1589 gfc_init_se (&se
, NULL
);
1590 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1591 gfc_add_block_to_block (&block
, &se
.pre
);
1592 from
= gfc_evaluate_now (se
.expr
, &block
);
1594 gfc_init_se (&se
, NULL
);
1595 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1596 gfc_add_block_to_block (&block
, &se
.pre
);
1597 to
= gfc_evaluate_now (se
.expr
, &block
);
1599 gfc_init_se (&se
, NULL
);
1600 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1601 gfc_add_block_to_block (&block
, &se
.pre
);
1602 step
= gfc_evaluate_now (se
.expr
, &block
);
1604 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1606 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, step
,
1607 build_zero_cst (type
));
1608 gfc_trans_runtime_check (true, false, tmp
, &block
, &code
->loc
,
1609 "DO step value is zero");
1612 /* Special case simple loops. */
1613 if (TREE_CODE (type
) == INTEGER_TYPE
1614 && (integer_onep (step
)
1615 || tree_int_cst_equal (step
, integer_minus_one_node
)))
1616 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
, exit_cond
);
1619 if (TREE_CODE (type
) == INTEGER_TYPE
)
1620 utype
= unsigned_type_for (type
);
1622 utype
= unsigned_type_for (gfc_array_index_type
);
1623 countm1
= gfc_create_var (utype
, "countm1");
1625 /* Cycle and exit statements are implemented with gotos. */
1626 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1627 exit_label
= gfc_build_label_decl (NULL_TREE
);
1628 TREE_USED (exit_label
) = 1;
1630 /* Put these labels where they can be found later. */
1631 code
->cycle_label
= cycle_label
;
1632 code
->exit_label
= exit_label
;
1634 /* Initialize the DO variable: dovar = from. */
1635 gfc_add_modify (&block
, dovar
, from
);
1637 /* Save value for do-tinkering checking. */
1638 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1640 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1641 gfc_add_modify_loc (loc
, &block
, saved_dovar
, dovar
);
1644 /* Initialize loop count and jump to exit label if the loop is empty.
1645 This code is executed before we enter the loop body. We generate:
1648 countm1 = (to - from) / step;
1654 countm1 = (from - to) / -step;
1660 if (TREE_CODE (type
) == INTEGER_TYPE
)
1662 tree pos
, neg
, tou
, fromu
, stepu
, tmp2
;
1664 /* The distance from FROM to TO cannot always be represented in a signed
1665 type, thus use unsigned arithmetic, also to avoid any undefined
1667 tou
= fold_convert (utype
, to
);
1668 fromu
= fold_convert (utype
, from
);
1669 stepu
= fold_convert (utype
, step
);
1671 /* For a positive step, when to < from, exit, otherwise compute
1672 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1673 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, to
, from
);
1674 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1675 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1678 pos
= build2 (COMPOUND_EXPR
, void_type_node
,
1679 fold_build2 (MODIFY_EXPR
, void_type_node
,
1681 build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1682 build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1683 exit_label
), NULL_TREE
));
1685 /* For a negative step, when to > from, exit, otherwise compute
1686 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1687 tmp
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, to
, from
);
1688 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1689 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1691 fold_build1_loc (loc
, NEGATE_EXPR
, utype
, stepu
));
1692 neg
= build2 (COMPOUND_EXPR
, void_type_node
,
1693 fold_build2 (MODIFY_EXPR
, void_type_node
,
1695 build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1696 build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1697 exit_label
), NULL_TREE
));
1699 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, step
,
1700 build_int_cst (TREE_TYPE (step
), 0));
1701 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
, neg
, pos
);
1703 gfc_add_expr_to_block (&block
, tmp
);
1709 /* TODO: We could use the same width as the real type.
1710 This would probably cause more problems that it solves
1711 when we implement "long double" types. */
1713 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to
, from
);
1714 tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, tmp
, step
);
1715 tmp
= fold_build1_loc (loc
, FIX_TRUNC_EXPR
, utype
, tmp
);
1716 gfc_add_modify (&block
, countm1
, tmp
);
1718 /* We need a special check for empty loops:
1719 empty = (step > 0 ? to < from : to > from); */
1720 pos_step
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, step
,
1721 build_zero_cst (type
));
1722 tmp
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
, pos_step
,
1723 fold_build2_loc (loc
, LT_EXPR
,
1724 boolean_type_node
, to
, from
),
1725 fold_build2_loc (loc
, GT_EXPR
,
1726 boolean_type_node
, to
, from
));
1727 /* If the loop is empty, go directly to the exit label. */
1728 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1729 build1_v (GOTO_EXPR
, exit_label
),
1730 build_empty_stmt (input_location
));
1731 gfc_add_expr_to_block (&block
, tmp
);
1735 gfc_start_block (&body
);
1737 /* Main loop body. */
1738 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1739 gfc_add_expr_to_block (&body
, tmp
);
1741 /* Label for cycle statements (if needed). */
1742 if (TREE_USED (cycle_label
))
1744 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1745 gfc_add_expr_to_block (&body
, tmp
);
1748 /* Check whether someone has modified the loop variable. */
1749 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1751 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
, dovar
,
1753 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1754 "Loop variable has been modified");
1757 /* Exit the loop if there is an I/O result condition or error. */
1760 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1761 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1763 build_empty_stmt (input_location
));
1764 gfc_add_expr_to_block (&body
, tmp
);
1767 /* Increment the loop variable. */
1768 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1769 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1771 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1772 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1774 /* Initialize countm1t. */
1775 tree countm1t
= gfc_create_var (utype
, "countm1t");
1776 gfc_add_modify_loc (loc
, &body
, countm1t
, countm1
);
1778 /* Decrement the loop count. */
1779 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, utype
, countm1
,
1780 build_int_cst (utype
, 1));
1781 gfc_add_modify_loc (loc
, &body
, countm1
, tmp
);
1783 /* End with the loop condition. Loop until countm1t == 0. */
1784 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, countm1t
,
1785 build_int_cst (utype
, 0));
1786 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1787 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1788 cond
, tmp
, build_empty_stmt (loc
));
1789 gfc_add_expr_to_block (&body
, tmp
);
1791 /* End of loop body. */
1792 tmp
= gfc_finish_block (&body
);
1794 /* The for loop itself. */
1795 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1796 gfc_add_expr_to_block (&block
, tmp
);
1798 /* Add the exit label. */
1799 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1800 gfc_add_expr_to_block (&block
, tmp
);
1802 return gfc_finish_block (&block
);
1806 /* Translate the DO WHILE construct.
1819 if (! cond) goto exit_label;
1825 Because the evaluation of the exit condition `cond' may have side
1826 effects, we can't do much for empty loop bodies. The backend optimizers
1827 should be smart enough to eliminate any dead loops. */
1830 gfc_trans_do_while (gfc_code
* code
)
1838 /* Everything we build here is part of the loop body. */
1839 gfc_start_block (&block
);
1841 /* Cycle and exit statements are implemented with gotos. */
1842 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1843 exit_label
= gfc_build_label_decl (NULL_TREE
);
1845 /* Put the labels where they can be found later. See gfc_trans_do(). */
1846 code
->cycle_label
= cycle_label
;
1847 code
->exit_label
= exit_label
;
1849 /* Create a GIMPLE version of the exit condition. */
1850 gfc_init_se (&cond
, NULL
);
1851 gfc_conv_expr_val (&cond
, code
->expr1
);
1852 gfc_add_block_to_block (&block
, &cond
.pre
);
1853 cond
.expr
= fold_build1_loc (code
->expr1
->where
.lb
->location
,
1854 TRUTH_NOT_EXPR
, TREE_TYPE (cond
.expr
), cond
.expr
);
1856 /* Build "IF (! cond) GOTO exit_label". */
1857 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1858 TREE_USED (exit_label
) = 1;
1859 tmp
= fold_build3_loc (code
->expr1
->where
.lb
->location
, COND_EXPR
,
1860 void_type_node
, cond
.expr
, tmp
,
1861 build_empty_stmt (code
->expr1
->where
.lb
->location
));
1862 gfc_add_expr_to_block (&block
, tmp
);
1864 /* The main body of the loop. */
1865 tmp
= gfc_trans_code (code
->block
->next
);
1866 gfc_add_expr_to_block (&block
, tmp
);
1868 /* Label for cycle statements (if needed). */
1869 if (TREE_USED (cycle_label
))
1871 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1872 gfc_add_expr_to_block (&block
, tmp
);
1875 /* End of loop body. */
1876 tmp
= gfc_finish_block (&block
);
1878 gfc_init_block (&block
);
1879 /* Build the loop. */
1880 tmp
= fold_build1_loc (code
->expr1
->where
.lb
->location
, LOOP_EXPR
,
1881 void_type_node
, tmp
);
1882 gfc_add_expr_to_block (&block
, tmp
);
1884 /* Add the exit label. */
1885 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1886 gfc_add_expr_to_block (&block
, tmp
);
1888 return gfc_finish_block (&block
);
1892 /* Translate the SELECT CASE construct for INTEGER case expressions,
1893 without killing all potential optimizations. The problem is that
1894 Fortran allows unbounded cases, but the back-end does not, so we
1895 need to intercept those before we enter the equivalent SWITCH_EXPR
1898 For example, we translate this,
1901 CASE (:100,101,105:115)
1911 to the GENERIC equivalent,
1915 case (minimum value for typeof(expr) ... 100:
1921 case 200 ... (maximum value for typeof(expr):
1938 gfc_trans_integer_select (gfc_code
* code
)
1948 gfc_start_block (&block
);
1950 /* Calculate the switch expression. */
1951 gfc_init_se (&se
, NULL
);
1952 gfc_conv_expr_val (&se
, code
->expr1
);
1953 gfc_add_block_to_block (&block
, &se
.pre
);
1955 end_label
= gfc_build_label_decl (NULL_TREE
);
1957 gfc_init_block (&body
);
1959 for (c
= code
->block
; c
; c
= c
->block
)
1961 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1966 /* Assume it's the default case. */
1967 low
= high
= NULL_TREE
;
1971 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1974 /* If there's only a lower bound, set the high bound to the
1975 maximum value of the case expression. */
1977 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
1982 /* Three cases are possible here:
1984 1) There is no lower bound, e.g. CASE (:N).
1985 2) There is a lower bound .NE. high bound, that is
1986 a case range, e.g. CASE (N:M) where M>N (we make
1987 sure that M>N during type resolution).
1988 3) There is a lower bound, and it has the same value
1989 as the high bound, e.g. CASE (N:N). This is our
1990 internal representation of CASE(N).
1992 In the first and second case, we need to set a value for
1993 high. In the third case, we don't because the GCC middle
1994 end represents a single case value by just letting high be
1995 a NULL_TREE. We can't do that because we need to be able
1996 to represent unbounded cases. */
2000 && mpz_cmp (cp
->low
->value
.integer
,
2001 cp
->high
->value
.integer
) != 0))
2002 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
2005 /* Unbounded case. */
2007 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
2010 /* Build a label. */
2011 label
= gfc_build_label_decl (NULL_TREE
);
2013 /* Add this case label.
2014 Add parameter 'label', make it match GCC backend. */
2015 tmp
= build_case_label (low
, high
, label
);
2016 gfc_add_expr_to_block (&body
, tmp
);
2019 /* Add the statements for this case. */
2020 tmp
= gfc_trans_code (c
->next
);
2021 gfc_add_expr_to_block (&body
, tmp
);
2023 /* Break to the end of the construct. */
2024 tmp
= build1_v (GOTO_EXPR
, end_label
);
2025 gfc_add_expr_to_block (&body
, tmp
);
2028 tmp
= gfc_finish_block (&body
);
2029 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2030 se
.expr
, tmp
, NULL_TREE
);
2031 gfc_add_expr_to_block (&block
, tmp
);
2033 tmp
= build1_v (LABEL_EXPR
, end_label
);
2034 gfc_add_expr_to_block (&block
, tmp
);
2036 return gfc_finish_block (&block
);
2040 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2042 There are only two cases possible here, even though the standard
2043 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2044 .FALSE., and DEFAULT.
2046 We never generate more than two blocks here. Instead, we always
2047 try to eliminate the DEFAULT case. This way, we can translate this
2048 kind of SELECT construct to a simple
2052 expression in GENERIC. */
2055 gfc_trans_logical_select (gfc_code
* code
)
2058 gfc_code
*t
, *f
, *d
;
2063 /* Assume we don't have any cases at all. */
2066 /* Now see which ones we actually do have. We can have at most two
2067 cases in a single case list: one for .TRUE. and one for .FALSE.
2068 The default case is always separate. If the cases for .TRUE. and
2069 .FALSE. are in the same case list, the block for that case list
2070 always executed, and we don't generate code a COND_EXPR. */
2071 for (c
= code
->block
; c
; c
= c
->block
)
2073 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2077 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
2079 else /* if (cp->value.logical != 0), thus .TRUE. */
2087 /* Start a new block. */
2088 gfc_start_block (&block
);
2090 /* Calculate the switch expression. We always need to do this
2091 because it may have side effects. */
2092 gfc_init_se (&se
, NULL
);
2093 gfc_conv_expr_val (&se
, code
->expr1
);
2094 gfc_add_block_to_block (&block
, &se
.pre
);
2096 if (t
== f
&& t
!= NULL
)
2098 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2099 translate the code for these cases, append it to the current
2101 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
2105 tree true_tree
, false_tree
, stmt
;
2107 true_tree
= build_empty_stmt (input_location
);
2108 false_tree
= build_empty_stmt (input_location
);
2110 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2111 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2112 make the missing case the default case. */
2113 if (t
!= NULL
&& f
!= NULL
)
2123 /* Translate the code for each of these blocks, and append it to
2124 the current block. */
2126 true_tree
= gfc_trans_code (t
->next
);
2129 false_tree
= gfc_trans_code (f
->next
);
2131 stmt
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2132 se
.expr
, true_tree
, false_tree
);
2133 gfc_add_expr_to_block (&block
, stmt
);
2136 return gfc_finish_block (&block
);
2140 /* The jump table types are stored in static variables to avoid
2141 constructing them from scratch every single time. */
2142 static GTY(()) tree select_struct
[2];
2144 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2145 Instead of generating compares and jumps, it is far simpler to
2146 generate a data structure describing the cases in order and call a
2147 library subroutine that locates the right case.
2148 This is particularly true because this is the only case where we
2149 might have to dispose of a temporary.
2150 The library subroutine returns a pointer to jump to or NULL if no
2151 branches are to be taken. */
2154 gfc_trans_character_select (gfc_code
*code
)
2156 tree init
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
2157 stmtblock_t block
, body
;
2162 vec
<constructor_elt
, va_gc
> *inits
= NULL
;
2164 tree pchartype
= gfc_get_pchar_type (code
->expr1
->ts
.kind
);
2166 /* The jump table types are stored in static variables to avoid
2167 constructing them from scratch every single time. */
2168 static tree ss_string1
[2], ss_string1_len
[2];
2169 static tree ss_string2
[2], ss_string2_len
[2];
2170 static tree ss_target
[2];
2172 cp
= code
->block
->ext
.block
.case_list
;
2173 while (cp
->left
!= NULL
)
2176 /* Generate the body */
2177 gfc_start_block (&block
);
2178 gfc_init_se (&expr1se
, NULL
);
2179 gfc_conv_expr_reference (&expr1se
, code
->expr1
);
2181 gfc_add_block_to_block (&block
, &expr1se
.pre
);
2183 end_label
= gfc_build_label_decl (NULL_TREE
);
2185 gfc_init_block (&body
);
2187 /* Attempt to optimize length 1 selects. */
2188 if (integer_onep (expr1se
.string_length
))
2190 for (d
= cp
; d
; d
= d
->right
)
2195 gcc_assert (d
->low
->expr_type
== EXPR_CONSTANT
2196 && d
->low
->ts
.type
== BT_CHARACTER
);
2197 if (d
->low
->value
.character
.length
> 1)
2199 for (i
= 1; i
< d
->low
->value
.character
.length
; i
++)
2200 if (d
->low
->value
.character
.string
[i
] != ' ')
2202 if (i
!= d
->low
->value
.character
.length
)
2204 if (optimize
&& d
->high
&& i
== 1)
2206 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2207 && d
->high
->ts
.type
== BT_CHARACTER
);
2208 if (d
->high
->value
.character
.length
> 1
2209 && (d
->low
->value
.character
.string
[0]
2210 == d
->high
->value
.character
.string
[0])
2211 && d
->high
->value
.character
.string
[1] != ' '
2212 && ((d
->low
->value
.character
.string
[1] < ' ')
2213 == (d
->high
->value
.character
.string
[1]
2223 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2224 && d
->high
->ts
.type
== BT_CHARACTER
);
2225 if (d
->high
->value
.character
.length
> 1)
2227 for (i
= 1; i
< d
->high
->value
.character
.length
; i
++)
2228 if (d
->high
->value
.character
.string
[i
] != ' ')
2230 if (i
!= d
->high
->value
.character
.length
)
2237 tree ctype
= gfc_get_char_type (code
->expr1
->ts
.kind
);
2239 for (c
= code
->block
; c
; c
= c
->block
)
2241 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2247 /* Assume it's the default case. */
2248 low
= high
= NULL_TREE
;
2252 /* CASE ('ab') or CASE ('ab':'az') will never match
2253 any length 1 character. */
2254 if (cp
->low
->value
.character
.length
> 1
2255 && cp
->low
->value
.character
.string
[1] != ' ')
2258 if (cp
->low
->value
.character
.length
> 0)
2259 r
= cp
->low
->value
.character
.string
[0];
2262 low
= build_int_cst (ctype
, r
);
2264 /* If there's only a lower bound, set the high bound
2265 to the maximum value of the case expression. */
2267 high
= TYPE_MAX_VALUE (ctype
);
2273 || (cp
->low
->value
.character
.string
[0]
2274 != cp
->high
->value
.character
.string
[0]))
2276 if (cp
->high
->value
.character
.length
> 0)
2277 r
= cp
->high
->value
.character
.string
[0];
2280 high
= build_int_cst (ctype
, r
);
2283 /* Unbounded case. */
2285 low
= TYPE_MIN_VALUE (ctype
);
2288 /* Build a label. */
2289 label
= gfc_build_label_decl (NULL_TREE
);
2291 /* Add this case label.
2292 Add parameter 'label', make it match GCC backend. */
2293 tmp
= build_case_label (low
, high
, label
);
2294 gfc_add_expr_to_block (&body
, tmp
);
2297 /* Add the statements for this case. */
2298 tmp
= gfc_trans_code (c
->next
);
2299 gfc_add_expr_to_block (&body
, tmp
);
2301 /* Break to the end of the construct. */
2302 tmp
= build1_v (GOTO_EXPR
, end_label
);
2303 gfc_add_expr_to_block (&body
, tmp
);
2306 tmp
= gfc_string_to_single_character (expr1se
.string_length
,
2308 code
->expr1
->ts
.kind
);
2309 case_num
= gfc_create_var (ctype
, "case_num");
2310 gfc_add_modify (&block
, case_num
, tmp
);
2312 gfc_add_block_to_block (&block
, &expr1se
.post
);
2314 tmp
= gfc_finish_block (&body
);
2315 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2316 case_num
, tmp
, NULL_TREE
);
2317 gfc_add_expr_to_block (&block
, tmp
);
2319 tmp
= build1_v (LABEL_EXPR
, end_label
);
2320 gfc_add_expr_to_block (&block
, tmp
);
2322 return gfc_finish_block (&block
);
2326 if (code
->expr1
->ts
.kind
== 1)
2328 else if (code
->expr1
->ts
.kind
== 4)
2333 if (select_struct
[k
] == NULL
)
2336 select_struct
[k
] = make_node (RECORD_TYPE
);
2338 if (code
->expr1
->ts
.kind
== 1)
2339 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
2340 else if (code
->expr1
->ts
.kind
== 4)
2341 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
2346 #define ADD_FIELD(NAME, TYPE) \
2347 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2348 get_identifier (stringize(NAME)), \
2352 ADD_FIELD (string1
, pchartype
);
2353 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
2355 ADD_FIELD (string2
, pchartype
);
2356 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
2358 ADD_FIELD (target
, integer_type_node
);
2361 gfc_finish_type (select_struct
[k
]);
2365 for (d
= cp
; d
; d
= d
->right
)
2368 for (c
= code
->block
; c
; c
= c
->block
)
2370 for (d
= c
->ext
.block
.case_list
; d
; d
= d
->next
)
2372 label
= gfc_build_label_decl (NULL_TREE
);
2373 tmp
= build_case_label ((d
->low
== NULL
&& d
->high
== NULL
)
2375 : build_int_cst (integer_type_node
, d
->n
),
2377 gfc_add_expr_to_block (&body
, tmp
);
2380 tmp
= gfc_trans_code (c
->next
);
2381 gfc_add_expr_to_block (&body
, tmp
);
2383 tmp
= build1_v (GOTO_EXPR
, end_label
);
2384 gfc_add_expr_to_block (&body
, tmp
);
2387 /* Generate the structure describing the branches */
2388 for (d
= cp
; d
; d
= d
->right
)
2390 vec
<constructor_elt
, va_gc
> *node
= NULL
;
2392 gfc_init_se (&se
, NULL
);
2396 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], null_pointer_node
);
2397 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], integer_zero_node
);
2401 gfc_conv_expr_reference (&se
, d
->low
);
2403 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], se
.expr
);
2404 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], se
.string_length
);
2407 if (d
->high
== NULL
)
2409 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], null_pointer_node
);
2410 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], integer_zero_node
);
2414 gfc_init_se (&se
, NULL
);
2415 gfc_conv_expr_reference (&se
, d
->high
);
2417 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], se
.expr
);
2418 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], se
.string_length
);
2421 CONSTRUCTOR_APPEND_ELT (node
, ss_target
[k
],
2422 build_int_cst (integer_type_node
, d
->n
));
2424 tmp
= build_constructor (select_struct
[k
], node
);
2425 CONSTRUCTOR_APPEND_ELT (inits
, NULL_TREE
, tmp
);
2428 type
= build_array_type (select_struct
[k
],
2429 build_index_type (size_int (n
-1)));
2431 init
= build_constructor (type
, inits
);
2432 TREE_CONSTANT (init
) = 1;
2433 TREE_STATIC (init
) = 1;
2434 /* Create a static variable to hold the jump table. */
2435 tmp
= gfc_create_var (type
, "jumptable");
2436 TREE_CONSTANT (tmp
) = 1;
2437 TREE_STATIC (tmp
) = 1;
2438 TREE_READONLY (tmp
) = 1;
2439 DECL_INITIAL (tmp
) = init
;
2442 /* Build the library call */
2443 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
2445 if (code
->expr1
->ts
.kind
== 1)
2446 fndecl
= gfor_fndecl_select_string
;
2447 else if (code
->expr1
->ts
.kind
== 4)
2448 fndecl
= gfor_fndecl_select_string_char4
;
2452 tmp
= build_call_expr_loc (input_location
,
2454 build_int_cst (gfc_charlen_type_node
, n
),
2455 expr1se
.expr
, expr1se
.string_length
);
2456 case_num
= gfc_create_var (integer_type_node
, "case_num");
2457 gfc_add_modify (&block
, case_num
, tmp
);
2459 gfc_add_block_to_block (&block
, &expr1se
.post
);
2461 tmp
= gfc_finish_block (&body
);
2462 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2463 case_num
, tmp
, NULL_TREE
);
2464 gfc_add_expr_to_block (&block
, tmp
);
2466 tmp
= build1_v (LABEL_EXPR
, end_label
);
2467 gfc_add_expr_to_block (&block
, tmp
);
2469 return gfc_finish_block (&block
);
2473 /* Translate the three variants of the SELECT CASE construct.
2475 SELECT CASEs with INTEGER case expressions can be translated to an
2476 equivalent GENERIC switch statement, and for LOGICAL case
2477 expressions we build one or two if-else compares.
2479 SELECT CASEs with CHARACTER case expressions are a whole different
2480 story, because they don't exist in GENERIC. So we sort them and
2481 do a binary search at runtime.
2483 Fortran has no BREAK statement, and it does not allow jumps from
2484 one case block to another. That makes things a lot easier for
2488 gfc_trans_select (gfc_code
* code
)
2494 gcc_assert (code
&& code
->expr1
);
2495 gfc_init_block (&block
);
2497 /* Build the exit label and hang it in. */
2498 exit_label
= gfc_build_label_decl (NULL_TREE
);
2499 code
->exit_label
= exit_label
;
2501 /* Empty SELECT constructs are legal. */
2502 if (code
->block
== NULL
)
2503 body
= build_empty_stmt (input_location
);
2505 /* Select the correct translation function. */
2507 switch (code
->expr1
->ts
.type
)
2510 body
= gfc_trans_logical_select (code
);
2514 body
= gfc_trans_integer_select (code
);
2518 body
= gfc_trans_character_select (code
);
2522 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2526 /* Build everything together. */
2527 gfc_add_expr_to_block (&block
, body
);
2528 gfc_add_expr_to_block (&block
, build1_v (LABEL_EXPR
, exit_label
));
2530 return gfc_finish_block (&block
);
2534 /* Traversal function to substitute a replacement symtree if the symbol
2535 in the expression is the same as that passed. f == 2 signals that
2536 that variable itself is not to be checked - only the references.
2537 This group of functions is used when the variable expression in a
2538 FORALL assignment has internal references. For example:
2539 FORALL (i = 1:4) p(p(i)) = i
2540 The only recourse here is to store a copy of 'p' for the index
2543 static gfc_symtree
*new_symtree
;
2544 static gfc_symtree
*old_symtree
;
2547 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
2549 if (expr
->expr_type
!= EXPR_VARIABLE
)
2554 else if (expr
->symtree
->n
.sym
== sym
)
2555 expr
->symtree
= new_symtree
;
2561 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
2563 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
2567 forall_restore (gfc_expr
*expr
,
2568 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
2569 int *f ATTRIBUTE_UNUSED
)
2571 if (expr
->expr_type
!= EXPR_VARIABLE
)
2574 if (expr
->symtree
== new_symtree
)
2575 expr
->symtree
= old_symtree
;
2581 forall_restore_symtree (gfc_expr
*e
)
2583 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
2587 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2592 gfc_symbol
*new_sym
;
2593 gfc_symbol
*old_sym
;
2597 /* Build a copy of the lvalue. */
2598 old_symtree
= c
->expr1
->symtree
;
2599 old_sym
= old_symtree
->n
.sym
;
2600 e
= gfc_lval_expr_from_sym (old_sym
);
2601 if (old_sym
->attr
.dimension
)
2603 gfc_init_se (&tse
, NULL
);
2604 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
, false);
2605 gfc_add_block_to_block (pre
, &tse
.pre
);
2606 gfc_add_block_to_block (post
, &tse
.post
);
2607 tse
.expr
= build_fold_indirect_ref_loc (input_location
, tse
.expr
);
2609 if (e
->ts
.type
!= BT_CHARACTER
)
2611 /* Use the variable offset for the temporary. */
2612 tmp
= gfc_conv_array_offset (old_sym
->backend_decl
);
2613 gfc_conv_descriptor_offset_set (pre
, tse
.expr
, tmp
);
2618 gfc_init_se (&tse
, NULL
);
2619 gfc_init_se (&rse
, NULL
);
2620 gfc_conv_expr (&rse
, e
);
2621 if (e
->ts
.type
== BT_CHARACTER
)
2623 tse
.string_length
= rse
.string_length
;
2624 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
2626 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
2628 gfc_add_block_to_block (pre
, &tse
.pre
);
2629 gfc_add_block_to_block (post
, &tse
.post
);
2633 tmp
= gfc_typenode_for_spec (&e
->ts
);
2634 tse
.expr
= gfc_create_var (tmp
, "temp");
2637 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
2638 e
->expr_type
== EXPR_VARIABLE
, true);
2639 gfc_add_expr_to_block (pre
, tmp
);
2643 /* Create a new symbol to represent the lvalue. */
2644 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
2645 new_sym
->ts
= old_sym
->ts
;
2646 new_sym
->attr
.referenced
= 1;
2647 new_sym
->attr
.temporary
= 1;
2648 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
2649 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
2651 /* Use the temporary as the backend_decl. */
2652 new_sym
->backend_decl
= tse
.expr
;
2654 /* Create a fake symtree for it. */
2656 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
2657 new_symtree
->n
.sym
= new_sym
;
2658 gcc_assert (new_symtree
== root
);
2660 /* Go through the expression reference replacing the old_symtree
2662 forall_replace_symtree (c
->expr1
, old_sym
, 2);
2664 /* Now we have made this temporary, we might as well use it for
2665 the right hand side. */
2666 forall_replace_symtree (c
->expr2
, old_sym
, 1);
2670 /* Handles dependencies in forall assignments. */
2672 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2679 lsym
= c
->expr1
->symtree
->n
.sym
;
2680 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
2682 /* Now check for dependencies within the 'variable'
2683 expression itself. These are treated by making a complete
2684 copy of variable and changing all the references to it
2685 point to the copy instead. Note that the shallow copy of
2686 the variable will not suffice for derived types with
2687 pointer components. We therefore leave these to their
2689 if (lsym
->ts
.type
== BT_DERIVED
2690 && lsym
->ts
.u
.derived
->attr
.pointer_comp
)
2694 if (find_forall_index (c
->expr1
, lsym
, 2))
2696 forall_make_variable_temp (c
, pre
, post
);
2700 /* Substrings with dependencies are treated in the same
2702 if (c
->expr1
->ts
.type
== BT_CHARACTER
2704 && c
->expr2
->expr_type
== EXPR_VARIABLE
2705 && lsym
== c
->expr2
->symtree
->n
.sym
)
2707 for (lref
= c
->expr1
->ref
; lref
; lref
= lref
->next
)
2708 if (lref
->type
== REF_SUBSTRING
)
2710 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
2711 if (rref
->type
== REF_SUBSTRING
)
2715 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
2717 forall_make_variable_temp (c
, pre
, post
);
2726 cleanup_forall_symtrees (gfc_code
*c
)
2728 forall_restore_symtree (c
->expr1
);
2729 forall_restore_symtree (c
->expr2
);
2730 free (new_symtree
->n
.sym
);
2735 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2736 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2737 indicates whether we should generate code to test the FORALLs mask
2738 array. OUTER is the loop header to be used for initializing mask
2741 The generated loop format is:
2742 count = (end - start + step) / step
2755 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
2756 int mask_flag
, stmtblock_t
*outer
)
2764 tree var
, start
, end
, step
;
2767 /* Initialize the mask index outside the FORALL nest. */
2768 if (mask_flag
&& forall_tmp
->mask
)
2769 gfc_add_modify (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
2771 iter
= forall_tmp
->this_loop
;
2772 nvar
= forall_tmp
->nvar
;
2773 for (n
= 0; n
< nvar
; n
++)
2776 start
= iter
->start
;
2780 exit_label
= gfc_build_label_decl (NULL_TREE
);
2781 TREE_USED (exit_label
) = 1;
2783 /* The loop counter. */
2784 count
= gfc_create_var (TREE_TYPE (var
), "count");
2786 /* The body of the loop. */
2787 gfc_init_block (&block
);
2789 /* The exit condition. */
2790 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
2791 count
, build_int_cst (TREE_TYPE (count
), 0));
2792 if (forall_tmp
->do_concurrent
)
2793 cond
= build2 (ANNOTATE_EXPR
, TREE_TYPE (cond
), cond
,
2794 build_int_cst (integer_type_node
,
2795 annot_expr_ivdep_kind
));
2797 tmp
= build1_v (GOTO_EXPR
, exit_label
);
2798 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2799 cond
, tmp
, build_empty_stmt (input_location
));
2800 gfc_add_expr_to_block (&block
, tmp
);
2802 /* The main loop body. */
2803 gfc_add_expr_to_block (&block
, body
);
2805 /* Increment the loop variable. */
2806 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), var
,
2808 gfc_add_modify (&block
, var
, tmp
);
2810 /* Advance to the next mask element. Only do this for the
2812 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
2814 tree maskindex
= forall_tmp
->maskindex
;
2815 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2816 maskindex
, gfc_index_one_node
);
2817 gfc_add_modify (&block
, maskindex
, tmp
);
2820 /* Decrement the loop counter. */
2821 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), count
,
2822 build_int_cst (TREE_TYPE (var
), 1));
2823 gfc_add_modify (&block
, count
, tmp
);
2825 body
= gfc_finish_block (&block
);
2827 /* Loop var initialization. */
2828 gfc_init_block (&block
);
2829 gfc_add_modify (&block
, var
, start
);
2832 /* Initialize the loop counter. */
2833 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), step
,
2835 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), end
,
2837 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, TREE_TYPE (var
),
2839 gfc_add_modify (&block
, count
, tmp
);
2841 /* The loop expression. */
2842 tmp
= build1_v (LOOP_EXPR
, body
);
2843 gfc_add_expr_to_block (&block
, tmp
);
2845 /* The exit label. */
2846 tmp
= build1_v (LABEL_EXPR
, exit_label
);
2847 gfc_add_expr_to_block (&block
, tmp
);
2849 body
= gfc_finish_block (&block
);
2856 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2857 is nonzero, the body is controlled by all masks in the forall nest.
2858 Otherwise, the innermost loop is not controlled by it's mask. This
2859 is used for initializing that mask. */
2862 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
2867 forall_info
*forall_tmp
;
2868 tree mask
, maskindex
;
2870 gfc_start_block (&header
);
2872 forall_tmp
= nested_forall_info
;
2873 while (forall_tmp
!= NULL
)
2875 /* Generate body with masks' control. */
2878 mask
= forall_tmp
->mask
;
2879 maskindex
= forall_tmp
->maskindex
;
2881 /* If a mask was specified make the assignment conditional. */
2884 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2885 body
= build3_v (COND_EXPR
, tmp
, body
,
2886 build_empty_stmt (input_location
));
2889 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
2890 forall_tmp
= forall_tmp
->prev_nest
;
2894 gfc_add_expr_to_block (&header
, body
);
2895 return gfc_finish_block (&header
);
2899 /* Allocate data for holding a temporary array. Returns either a local
2900 temporary array or a pointer variable. */
2903 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
2910 if (INTEGER_CST_P (size
))
2911 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
2912 size
, gfc_index_one_node
);
2916 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
2917 type
= build_array_type (elem_type
, type
);
2918 if (gfc_can_put_var_on_stack (bytesize
))
2920 gcc_assert (INTEGER_CST_P (size
));
2921 tmpvar
= gfc_create_var (type
, "temp");
2926 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
2927 *pdata
= convert (pvoid_type_node
, tmpvar
);
2929 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
2930 gfc_add_modify (pblock
, tmpvar
, tmp
);
2936 /* Generate codes to copy the temporary to the actual lhs. */
2939 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
2940 tree count1
, tree wheremask
, bool invert
)
2944 stmtblock_t block
, body
;
2950 lss
= gfc_walk_expr (expr
);
2952 if (lss
== gfc_ss_terminator
)
2954 gfc_start_block (&block
);
2956 gfc_init_se (&lse
, NULL
);
2958 /* Translate the expression. */
2959 gfc_conv_expr (&lse
, expr
);
2961 /* Form the expression for the temporary. */
2962 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2964 /* Use the scalar assignment as is. */
2965 gfc_add_block_to_block (&block
, &lse
.pre
);
2966 gfc_add_modify (&block
, lse
.expr
, tmp
);
2967 gfc_add_block_to_block (&block
, &lse
.post
);
2969 /* Increment the count1. */
2970 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2971 count1
, gfc_index_one_node
);
2972 gfc_add_modify (&block
, count1
, tmp
);
2974 tmp
= gfc_finish_block (&block
);
2978 gfc_start_block (&block
);
2980 gfc_init_loopinfo (&loop1
);
2981 gfc_init_se (&rse
, NULL
);
2982 gfc_init_se (&lse
, NULL
);
2984 /* Associate the lss with the loop. */
2985 gfc_add_ss_to_loop (&loop1
, lss
);
2987 /* Calculate the bounds of the scalarization. */
2988 gfc_conv_ss_startstride (&loop1
);
2989 /* Setup the scalarizing loops. */
2990 gfc_conv_loop_setup (&loop1
, &expr
->where
);
2992 gfc_mark_ss_chain_used (lss
, 1);
2994 /* Start the scalarized loop body. */
2995 gfc_start_scalarized_body (&loop1
, &body
);
2997 /* Setup the gfc_se structures. */
2998 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
3001 /* Form the expression of the temporary. */
3002 if (lss
!= gfc_ss_terminator
)
3003 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3004 /* Translate expr. */
3005 gfc_conv_expr (&lse
, expr
);
3007 /* Use the scalar assignment. */
3008 rse
.string_length
= lse
.string_length
;
3009 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, true, true);
3011 /* Form the mask expression according to the mask tree list. */
3014 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3016 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3017 TREE_TYPE (wheremaskexpr
),
3019 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3021 build_empty_stmt (input_location
));
3024 gfc_add_expr_to_block (&body
, tmp
);
3026 /* Increment count1. */
3027 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3028 count1
, gfc_index_one_node
);
3029 gfc_add_modify (&body
, count1
, tmp
);
3031 /* Increment count3. */
3034 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3035 gfc_array_index_type
, count3
,
3036 gfc_index_one_node
);
3037 gfc_add_modify (&body
, count3
, tmp
);
3040 /* Generate the copying loops. */
3041 gfc_trans_scalarizing_loops (&loop1
, &body
);
3042 gfc_add_block_to_block (&block
, &loop1
.pre
);
3043 gfc_add_block_to_block (&block
, &loop1
.post
);
3044 gfc_cleanup_loop (&loop1
);
3046 tmp
= gfc_finish_block (&block
);
3052 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3053 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3054 and should not be freed. WHEREMASK is the conditional execution mask
3055 whose sense may be inverted by INVERT. */
3058 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
3059 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
3060 tree wheremask
, bool invert
)
3062 stmtblock_t block
, body1
;
3069 gfc_start_block (&block
);
3071 gfc_init_se (&rse
, NULL
);
3072 gfc_init_se (&lse
, NULL
);
3074 if (lss
== gfc_ss_terminator
)
3076 gfc_init_block (&body1
);
3077 gfc_conv_expr (&rse
, expr2
);
3078 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3082 /* Initialize the loop. */
3083 gfc_init_loopinfo (&loop
);
3085 /* We may need LSS to determine the shape of the expression. */
3086 gfc_add_ss_to_loop (&loop
, lss
);
3087 gfc_add_ss_to_loop (&loop
, rss
);
3089 gfc_conv_ss_startstride (&loop
);
3090 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3092 gfc_mark_ss_chain_used (rss
, 1);
3093 /* Start the loop body. */
3094 gfc_start_scalarized_body (&loop
, &body1
);
3096 /* Translate the expression. */
3097 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3099 gfc_conv_expr (&rse
, expr2
);
3101 /* Form the expression of the temporary. */
3102 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3105 /* Use the scalar assignment. */
3106 lse
.string_length
= rse
.string_length
;
3107 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
3108 expr2
->expr_type
== EXPR_VARIABLE
, true);
3110 /* Form the mask expression according to the mask tree list. */
3113 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3115 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3116 TREE_TYPE (wheremaskexpr
),
3118 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3120 build_empty_stmt (input_location
));
3123 gfc_add_expr_to_block (&body1
, tmp
);
3125 if (lss
== gfc_ss_terminator
)
3127 gfc_add_block_to_block (&block
, &body1
);
3129 /* Increment count1. */
3130 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
3131 count1
, gfc_index_one_node
);
3132 gfc_add_modify (&block
, count1
, tmp
);
3136 /* Increment count1. */
3137 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3138 count1
, gfc_index_one_node
);
3139 gfc_add_modify (&body1
, count1
, tmp
);
3141 /* Increment count3. */
3144 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3145 gfc_array_index_type
,
3146 count3
, gfc_index_one_node
);
3147 gfc_add_modify (&body1
, count3
, tmp
);
3150 /* Generate the copying loops. */
3151 gfc_trans_scalarizing_loops (&loop
, &body1
);
3153 gfc_add_block_to_block (&block
, &loop
.pre
);
3154 gfc_add_block_to_block (&block
, &loop
.post
);
3156 gfc_cleanup_loop (&loop
);
3157 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3158 as tree nodes in SS may not be valid in different scope. */
3161 tmp
= gfc_finish_block (&block
);
3166 /* Calculate the size of temporary needed in the assignment inside forall.
3167 LSS and RSS are filled in this function. */
3170 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
3171 stmtblock_t
* pblock
,
3172 gfc_ss
**lss
, gfc_ss
**rss
)
3180 *lss
= gfc_walk_expr (expr1
);
3183 size
= gfc_index_one_node
;
3184 if (*lss
!= gfc_ss_terminator
)
3186 gfc_init_loopinfo (&loop
);
3188 /* Walk the RHS of the expression. */
3189 *rss
= gfc_walk_expr (expr2
);
3190 if (*rss
== gfc_ss_terminator
)
3191 /* The rhs is scalar. Add a ss for the expression. */
3192 *rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
3194 /* Associate the SS with the loop. */
3195 gfc_add_ss_to_loop (&loop
, *lss
);
3196 /* We don't actually need to add the rhs at this point, but it might
3197 make guessing the loop bounds a bit easier. */
3198 gfc_add_ss_to_loop (&loop
, *rss
);
3200 /* We only want the shape of the expression, not rest of the junk
3201 generated by the scalarizer. */
3202 loop
.array_parameter
= 1;
3204 /* Calculate the bounds of the scalarization. */
3205 save_flag
= gfc_option
.rtcheck
;
3206 gfc_option
.rtcheck
&= ~GFC_RTCHECK_BOUNDS
;
3207 gfc_conv_ss_startstride (&loop
);
3208 gfc_option
.rtcheck
= save_flag
;
3209 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3211 /* Figure out how many elements we need. */
3212 for (i
= 0; i
< loop
.dimen
; i
++)
3214 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
3215 gfc_array_index_type
,
3216 gfc_index_one_node
, loop
.from
[i
]);
3217 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3218 gfc_array_index_type
, tmp
, loop
.to
[i
]);
3219 size
= fold_build2_loc (input_location
, MULT_EXPR
,
3220 gfc_array_index_type
, size
, tmp
);
3222 gfc_add_block_to_block (pblock
, &loop
.pre
);
3223 size
= gfc_evaluate_now (size
, pblock
);
3224 gfc_add_block_to_block (pblock
, &loop
.post
);
3226 /* TODO: write a function that cleans up a loopinfo without freeing
3227 the SS chains. Currently a NOP. */
3234 /* Calculate the overall iterator number of the nested forall construct.
3235 This routine actually calculates the number of times the body of the
3236 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3237 that by the expression INNER_SIZE. The BLOCK argument specifies the
3238 block in which to calculate the result, and the optional INNER_SIZE_BODY
3239 argument contains any statements that need to executed (inside the loop)
3240 to initialize or calculate INNER_SIZE. */
3243 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
3244 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
3246 forall_info
*forall_tmp
= nested_forall_info
;
3250 /* We can eliminate the innermost unconditional loops with constant
3252 if (INTEGER_CST_P (inner_size
))
3255 && !forall_tmp
->mask
3256 && INTEGER_CST_P (forall_tmp
->size
))
3258 inner_size
= fold_build2_loc (input_location
, MULT_EXPR
,
3259 gfc_array_index_type
,
3260 inner_size
, forall_tmp
->size
);
3261 forall_tmp
= forall_tmp
->prev_nest
;
3264 /* If there are no loops left, we have our constant result. */
3269 /* Otherwise, create a temporary variable to compute the result. */
3270 number
= gfc_create_var (gfc_array_index_type
, "num");
3271 gfc_add_modify (block
, number
, gfc_index_zero_node
);
3273 gfc_start_block (&body
);
3274 if (inner_size_body
)
3275 gfc_add_block_to_block (&body
, inner_size_body
);
3277 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3278 gfc_array_index_type
, number
, inner_size
);
3281 gfc_add_modify (&body
, number
, tmp
);
3282 tmp
= gfc_finish_block (&body
);
3284 /* Generate loops. */
3285 if (forall_tmp
!= NULL
)
3286 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
3288 gfc_add_expr_to_block (block
, tmp
);
3294 /* Allocate temporary for forall construct. SIZE is the size of temporary
3295 needed. PTEMP1 is returned for space free. */
3298 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
3305 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
3306 if (!integer_onep (unit
))
3307 bytesize
= fold_build2_loc (input_location
, MULT_EXPR
,
3308 gfc_array_index_type
, size
, unit
);
3313 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
3316 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3321 /* Allocate temporary for forall construct according to the information in
3322 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3323 assignment inside forall. PTEMP1 is returned for space free. */
3326 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
3327 tree inner_size
, stmtblock_t
* inner_size_body
,
3328 stmtblock_t
* block
, tree
* ptemp1
)
3332 /* Calculate the total size of temporary needed in forall construct. */
3333 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3334 inner_size_body
, block
);
3336 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
3340 /* Handle assignments inside forall which need temporary.
3342 forall (i=start:end:stride; maskexpr)
3345 (where e,f<i> are arbitrary expressions possibly involving i
3346 and there is a dependency between e<i> and f<i>)
3348 masktmp(:) = maskexpr(:)
3353 for (i = start; i <= end; i += stride)
3357 for (i = start; i <= end; i += stride)
3359 if (masktmp[maskindex++])
3360 tmp[count1++] = f<i>
3364 for (i = start; i <= end; i += stride)
3366 if (masktmp[maskindex++])
3367 e<i> = tmp[count1++]
3372 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3373 tree wheremask
, bool invert
,
3374 forall_info
* nested_forall_info
,
3375 stmtblock_t
* block
)
3383 stmtblock_t inner_size_body
;
3385 /* Create vars. count1 is the current iterator number of the nested
3387 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3389 /* Count is the wheremask index. */
3392 count
= gfc_create_var (gfc_array_index_type
, "count");
3393 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3398 /* Initialize count1. */
3399 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3401 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3402 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3403 gfc_init_block (&inner_size_body
);
3404 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
3407 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3408 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->length
)
3410 if (!expr1
->ts
.u
.cl
->backend_decl
)
3413 gfc_init_se (&tse
, NULL
);
3414 gfc_conv_expr (&tse
, expr1
->ts
.u
.cl
->length
);
3415 expr1
->ts
.u
.cl
->backend_decl
= tse
.expr
;
3417 type
= gfc_get_character_type_len (gfc_default_character_kind
,
3418 expr1
->ts
.u
.cl
->backend_decl
);
3421 type
= gfc_typenode_for_spec (&expr1
->ts
);
3423 /* Allocate temporary for nested forall construct according to the
3424 information in nested_forall_info and inner_size. */
3425 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
3426 &inner_size_body
, block
, &ptemp1
);
3428 /* Generate codes to copy rhs to the temporary . */
3429 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
3432 /* Generate body and loops according to the information in
3433 nested_forall_info. */
3434 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3435 gfc_add_expr_to_block (block
, tmp
);
3438 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3442 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3444 /* Generate codes to copy the temporary to lhs. */
3445 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
3448 /* Generate body and loops according to the information in
3449 nested_forall_info. */
3450 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3451 gfc_add_expr_to_block (block
, tmp
);
3455 /* Free the temporary. */
3456 tmp
= gfc_call_free (ptemp1
);
3457 gfc_add_expr_to_block (block
, tmp
);
3462 /* Translate pointer assignment inside FORALL which need temporary. */
3465 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3466 forall_info
* nested_forall_info
,
3467 stmtblock_t
* block
)
3474 gfc_array_info
*info
;
3481 tree tmp
, tmp1
, ptemp1
;
3483 count
= gfc_create_var (gfc_array_index_type
, "count");
3484 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3486 inner_size
= gfc_index_one_node
;
3487 lss
= gfc_walk_expr (expr1
);
3488 rss
= gfc_walk_expr (expr2
);
3489 if (lss
== gfc_ss_terminator
)
3491 type
= gfc_typenode_for_spec (&expr1
->ts
);
3492 type
= build_pointer_type (type
);
3494 /* Allocate temporary for nested forall construct according to the
3495 information in nested_forall_info and inner_size. */
3496 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
3497 inner_size
, NULL
, block
, &ptemp1
);
3498 gfc_start_block (&body
);
3499 gfc_init_se (&lse
, NULL
);
3500 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3501 gfc_init_se (&rse
, NULL
);
3502 rse
.want_pointer
= 1;
3503 gfc_conv_expr (&rse
, expr2
);
3504 gfc_add_block_to_block (&body
, &rse
.pre
);
3505 gfc_add_modify (&body
, lse
.expr
,
3506 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
3507 gfc_add_block_to_block (&body
, &rse
.post
);
3509 /* Increment count. */
3510 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3511 count
, gfc_index_one_node
);
3512 gfc_add_modify (&body
, count
, tmp
);
3514 tmp
= gfc_finish_block (&body
);
3516 /* Generate body and loops according to the information in
3517 nested_forall_info. */
3518 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3519 gfc_add_expr_to_block (block
, tmp
);
3522 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3524 gfc_start_block (&body
);
3525 gfc_init_se (&lse
, NULL
);
3526 gfc_init_se (&rse
, NULL
);
3527 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3528 lse
.want_pointer
= 1;
3529 gfc_conv_expr (&lse
, expr1
);
3530 gfc_add_block_to_block (&body
, &lse
.pre
);
3531 gfc_add_modify (&body
, lse
.expr
, rse
.expr
);
3532 gfc_add_block_to_block (&body
, &lse
.post
);
3533 /* Increment count. */
3534 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3535 count
, gfc_index_one_node
);
3536 gfc_add_modify (&body
, count
, tmp
);
3537 tmp
= gfc_finish_block (&body
);
3539 /* Generate body and loops according to the information in
3540 nested_forall_info. */
3541 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3542 gfc_add_expr_to_block (block
, tmp
);
3546 gfc_init_loopinfo (&loop
);
3548 /* Associate the SS with the loop. */
3549 gfc_add_ss_to_loop (&loop
, rss
);
3551 /* Setup the scalarizing loops and bounds. */
3552 gfc_conv_ss_startstride (&loop
);
3554 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3556 info
= &rss
->info
->data
.array
;
3557 desc
= info
->descriptor
;
3559 /* Make a new descriptor. */
3560 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
3561 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, 0,
3562 loop
.from
, loop
.to
, 1,
3563 GFC_ARRAY_UNKNOWN
, true);
3565 /* Allocate temporary for nested forall construct. */
3566 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
3567 inner_size
, NULL
, block
, &ptemp1
);
3568 gfc_start_block (&body
);
3569 gfc_init_se (&lse
, NULL
);
3570 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3571 lse
.direct_byref
= 1;
3572 gfc_conv_expr_descriptor (&lse
, expr2
);
3574 gfc_add_block_to_block (&body
, &lse
.pre
);
3575 gfc_add_block_to_block (&body
, &lse
.post
);
3577 /* Increment count. */
3578 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3579 count
, gfc_index_one_node
);
3580 gfc_add_modify (&body
, count
, tmp
);
3582 tmp
= gfc_finish_block (&body
);
3584 /* Generate body and loops according to the information in
3585 nested_forall_info. */
3586 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3587 gfc_add_expr_to_block (block
, tmp
);
3590 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3592 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
3593 gfc_init_se (&lse
, NULL
);
3594 gfc_conv_expr_descriptor (&lse
, expr1
);
3595 gfc_add_modify (&lse
.pre
, lse
.expr
, parm
);
3596 gfc_start_block (&body
);
3597 gfc_add_block_to_block (&body
, &lse
.pre
);
3598 gfc_add_block_to_block (&body
, &lse
.post
);
3600 /* Increment count. */
3601 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3602 count
, gfc_index_one_node
);
3603 gfc_add_modify (&body
, count
, tmp
);
3605 tmp
= gfc_finish_block (&body
);
3607 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3608 gfc_add_expr_to_block (block
, tmp
);
3610 /* Free the temporary. */
3613 tmp
= gfc_call_free (ptemp1
);
3614 gfc_add_expr_to_block (block
, tmp
);
3619 /* FORALL and WHERE statements are really nasty, especially when you nest
3620 them. All the rhs of a forall assignment must be evaluated before the
3621 actual assignments are performed. Presumably this also applies to all the
3622 assignments in an inner where statement. */
3624 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3625 linear array, relying on the fact that we process in the same order in all
3628 forall (i=start:end:stride; maskexpr)
3632 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3634 count = ((end + 1 - start) / stride)
3635 masktmp(:) = maskexpr(:)
3638 for (i = start; i <= end; i += stride)
3640 if (masktmp[maskindex++])
3644 for (i = start; i <= end; i += stride)
3646 if (masktmp[maskindex++])
3650 Note that this code only works when there are no dependencies.
3651 Forall loop with array assignments and data dependencies are a real pain,
3652 because the size of the temporary cannot always be determined before the
3653 loop is executed. This problem is compounded by the presence of nested
3658 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
3675 tree cycle_label
= NULL_TREE
;
3679 gfc_forall_iterator
*fa
;
3682 gfc_saved_var
*saved_vars
;
3683 iter_info
*this_forall
;
3687 /* Do nothing if the mask is false. */
3689 && code
->expr1
->expr_type
== EXPR_CONSTANT
3690 && !code
->expr1
->value
.logical
)
3691 return build_empty_stmt (input_location
);
3694 /* Count the FORALL index number. */
3695 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3699 /* Allocate the space for var, start, end, step, varexpr. */
3700 var
= XCNEWVEC (tree
, nvar
);
3701 start
= XCNEWVEC (tree
, nvar
);
3702 end
= XCNEWVEC (tree
, nvar
);
3703 step
= XCNEWVEC (tree
, nvar
);
3704 varexpr
= XCNEWVEC (gfc_expr
*, nvar
);
3705 saved_vars
= XCNEWVEC (gfc_saved_var
, nvar
);
3707 /* Allocate the space for info. */
3708 info
= XCNEW (forall_info
);
3710 gfc_start_block (&pre
);
3711 gfc_init_block (&post
);
3712 gfc_init_block (&block
);
3715 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3717 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
3719 /* Allocate space for this_forall. */
3720 this_forall
= XCNEW (iter_info
);
3722 /* Create a temporary variable for the FORALL index. */
3723 tmp
= gfc_typenode_for_spec (&sym
->ts
);
3724 var
[n
] = gfc_create_var (tmp
, sym
->name
);
3725 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
3727 /* Record it in this_forall. */
3728 this_forall
->var
= var
[n
];
3730 /* Replace the index symbol's backend_decl with the temporary decl. */
3731 sym
->backend_decl
= var
[n
];
3733 /* Work out the start, end and stride for the loop. */
3734 gfc_init_se (&se
, NULL
);
3735 gfc_conv_expr_val (&se
, fa
->start
);
3736 /* Record it in this_forall. */
3737 this_forall
->start
= se
.expr
;
3738 gfc_add_block_to_block (&block
, &se
.pre
);
3741 gfc_init_se (&se
, NULL
);
3742 gfc_conv_expr_val (&se
, fa
->end
);
3743 /* Record it in this_forall. */
3744 this_forall
->end
= se
.expr
;
3745 gfc_make_safe_expr (&se
);
3746 gfc_add_block_to_block (&block
, &se
.pre
);
3749 gfc_init_se (&se
, NULL
);
3750 gfc_conv_expr_val (&se
, fa
->stride
);
3751 /* Record it in this_forall. */
3752 this_forall
->step
= se
.expr
;
3753 gfc_make_safe_expr (&se
);
3754 gfc_add_block_to_block (&block
, &se
.pre
);
3757 /* Set the NEXT field of this_forall to NULL. */
3758 this_forall
->next
= NULL
;
3759 /* Link this_forall to the info construct. */
3760 if (info
->this_loop
)
3762 iter_info
*iter_tmp
= info
->this_loop
;
3763 while (iter_tmp
->next
!= NULL
)
3764 iter_tmp
= iter_tmp
->next
;
3765 iter_tmp
->next
= this_forall
;
3768 info
->this_loop
= this_forall
;
3774 /* Calculate the size needed for the current forall level. */
3775 size
= gfc_index_one_node
;
3776 for (n
= 0; n
< nvar
; n
++)
3778 /* size = (end + step - start) / step. */
3779 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (start
[n
]),
3781 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (end
[n
]),
3783 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, TREE_TYPE (tmp
),
3785 tmp
= convert (gfc_array_index_type
, tmp
);
3787 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3791 /* Record the nvar and size of current forall level. */
3797 /* If the mask is .true., consider the FORALL unconditional. */
3798 if (code
->expr1
->expr_type
== EXPR_CONSTANT
3799 && code
->expr1
->value
.logical
)
3807 /* First we need to allocate the mask. */
3810 /* As the mask array can be very big, prefer compact boolean types. */
3811 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3812 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
3813 size
, NULL
, &block
, &pmask
);
3814 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
3816 /* Record them in the info structure. */
3817 info
->maskindex
= maskindex
;
3822 /* No mask was specified. */
3823 maskindex
= NULL_TREE
;
3824 mask
= pmask
= NULL_TREE
;
3827 /* Link the current forall level to nested_forall_info. */
3828 info
->prev_nest
= nested_forall_info
;
3829 nested_forall_info
= info
;
3831 /* Copy the mask into a temporary variable if required.
3832 For now we assume a mask temporary is needed. */
3835 /* As the mask array can be very big, prefer compact boolean types. */
3836 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3838 gfc_add_modify (&block
, maskindex
, gfc_index_zero_node
);
3840 /* Start of mask assignment loop body. */
3841 gfc_start_block (&body
);
3843 /* Evaluate the mask expression. */
3844 gfc_init_se (&se
, NULL
);
3845 gfc_conv_expr_val (&se
, code
->expr1
);
3846 gfc_add_block_to_block (&body
, &se
.pre
);
3848 /* Store the mask. */
3849 se
.expr
= convert (mask_type
, se
.expr
);
3851 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
3852 gfc_add_modify (&body
, tmp
, se
.expr
);
3854 /* Advance to the next mask element. */
3855 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3856 maskindex
, gfc_index_one_node
);
3857 gfc_add_modify (&body
, maskindex
, tmp
);
3859 /* Generate the loops. */
3860 tmp
= gfc_finish_block (&body
);
3861 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
3862 gfc_add_expr_to_block (&block
, tmp
);
3865 if (code
->op
== EXEC_DO_CONCURRENT
)
3867 gfc_init_block (&body
);
3868 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3869 code
->cycle_label
= cycle_label
;
3870 tmp
= gfc_trans_code (code
->block
->next
);
3871 gfc_add_expr_to_block (&body
, tmp
);
3873 if (TREE_USED (cycle_label
))
3875 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3876 gfc_add_expr_to_block (&body
, tmp
);
3879 tmp
= gfc_finish_block (&body
);
3880 nested_forall_info
->do_concurrent
= true;
3881 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3882 gfc_add_expr_to_block (&block
, tmp
);
3886 c
= code
->block
->next
;
3888 /* TODO: loop merging in FORALL statements. */
3889 /* Now that we've got a copy of the mask, generate the assignment loops. */
3895 /* A scalar or array assignment. DO the simple check for
3896 lhs to rhs dependencies. These make a temporary for the
3897 rhs and form a second forall block to copy to variable. */
3898 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
3900 /* Temporaries due to array assignment data dependencies introduce
3901 no end of problems. */
3903 gfc_trans_assign_need_temp (c
->expr1
, c
->expr2
, NULL
, false,
3904 nested_forall_info
, &block
);
3907 /* Use the normal assignment copying routines. */
3908 assign
= gfc_trans_assignment (c
->expr1
, c
->expr2
, false, true);
3910 /* Generate body and loops. */
3911 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3913 gfc_add_expr_to_block (&block
, tmp
);
3916 /* Cleanup any temporary symtrees that have been made to deal
3917 with dependencies. */
3919 cleanup_forall_symtrees (c
);
3924 /* Translate WHERE or WHERE construct nested in FORALL. */
3925 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
3928 /* Pointer assignment inside FORALL. */
3929 case EXEC_POINTER_ASSIGN
:
3930 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
3932 gfc_trans_pointer_assign_need_temp (c
->expr1
, c
->expr2
,
3933 nested_forall_info
, &block
);
3936 /* Use the normal assignment copying routines. */
3937 assign
= gfc_trans_pointer_assignment (c
->expr1
, c
->expr2
);
3939 /* Generate body and loops. */
3940 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3942 gfc_add_expr_to_block (&block
, tmp
);
3947 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
3948 gfc_add_expr_to_block (&block
, tmp
);
3951 /* Explicit subroutine calls are prevented by the frontend but interface
3952 assignments can legitimately produce them. */
3953 case EXEC_ASSIGN_CALL
:
3954 assign
= gfc_trans_call (c
, true, NULL_TREE
, NULL_TREE
, false);
3955 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
3956 gfc_add_expr_to_block (&block
, tmp
);
3967 /* Restore the original index variables. */
3968 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
3969 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
3971 /* Free the space for var, start, end, step, varexpr. */
3979 for (this_forall
= info
->this_loop
; this_forall
;)
3981 iter_info
*next
= this_forall
->next
;
3986 /* Free the space for this forall_info. */
3991 /* Free the temporary for the mask. */
3992 tmp
= gfc_call_free (pmask
);
3993 gfc_add_expr_to_block (&block
, tmp
);
3996 pushdecl (maskindex
);
3998 gfc_add_block_to_block (&pre
, &block
);
3999 gfc_add_block_to_block (&pre
, &post
);
4001 return gfc_finish_block (&pre
);
4005 /* Translate the FORALL statement or construct. */
4007 tree
gfc_trans_forall (gfc_code
* code
)
4009 return gfc_trans_forall_1 (code
, NULL
);
4013 /* Translate the DO CONCURRENT construct. */
4015 tree
gfc_trans_do_concurrent (gfc_code
* code
)
4017 return gfc_trans_forall_1 (code
, NULL
);
4021 /* Evaluate the WHERE mask expression, copy its value to a temporary.
4022 If the WHERE construct is nested in FORALL, compute the overall temporary
4023 needed by the WHERE mask expression multiplied by the iterator number of
4025 ME is the WHERE mask expression.
4026 MASK is the current execution mask upon input, whose sense may or may
4027 not be inverted as specified by the INVERT argument.
4028 CMASK is the updated execution mask on output, or NULL if not required.
4029 PMASK is the pending execution mask on output, or NULL if not required.
4030 BLOCK is the block in which to place the condition evaluation loops. */
4033 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
4034 tree mask
, bool invert
, tree cmask
, tree pmask
,
4035 tree mask_type
, stmtblock_t
* block
)
4040 stmtblock_t body
, body1
;
4041 tree count
, cond
, mtmp
;
4044 gfc_init_loopinfo (&loop
);
4046 lss
= gfc_walk_expr (me
);
4047 rss
= gfc_walk_expr (me
);
4049 /* Variable to index the temporary. */
4050 count
= gfc_create_var (gfc_array_index_type
, "count");
4051 /* Initialize count. */
4052 gfc_add_modify (block
, count
, gfc_index_zero_node
);
4054 gfc_start_block (&body
);
4056 gfc_init_se (&rse
, NULL
);
4057 gfc_init_se (&lse
, NULL
);
4059 if (lss
== gfc_ss_terminator
)
4061 gfc_init_block (&body1
);
4065 /* Initialize the loop. */
4066 gfc_init_loopinfo (&loop
);
4068 /* We may need LSS to determine the shape of the expression. */
4069 gfc_add_ss_to_loop (&loop
, lss
);
4070 gfc_add_ss_to_loop (&loop
, rss
);
4072 gfc_conv_ss_startstride (&loop
);
4073 gfc_conv_loop_setup (&loop
, &me
->where
);
4075 gfc_mark_ss_chain_used (rss
, 1);
4076 /* Start the loop body. */
4077 gfc_start_scalarized_body (&loop
, &body1
);
4079 /* Translate the expression. */
4080 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4082 gfc_conv_expr (&rse
, me
);
4085 /* Variable to evaluate mask condition. */
4086 cond
= gfc_create_var (mask_type
, "cond");
4087 if (mask
&& (cmask
|| pmask
))
4088 mtmp
= gfc_create_var (mask_type
, "mask");
4089 else mtmp
= NULL_TREE
;
4091 gfc_add_block_to_block (&body1
, &lse
.pre
);
4092 gfc_add_block_to_block (&body1
, &rse
.pre
);
4094 gfc_add_modify (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
4096 if (mask
&& (cmask
|| pmask
))
4098 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
4100 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, tmp
);
4101 gfc_add_modify (&body1
, mtmp
, tmp
);
4106 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
4109 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
,
4111 gfc_add_modify (&body1
, tmp1
, tmp
);
4116 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
4117 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, cond
);
4119 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
, mtmp
,
4121 gfc_add_modify (&body1
, tmp1
, tmp
);
4124 gfc_add_block_to_block (&body1
, &lse
.post
);
4125 gfc_add_block_to_block (&body1
, &rse
.post
);
4127 if (lss
== gfc_ss_terminator
)
4129 gfc_add_block_to_block (&body
, &body1
);
4133 /* Increment count. */
4134 tmp1
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4135 count
, gfc_index_one_node
);
4136 gfc_add_modify (&body1
, count
, tmp1
);
4138 /* Generate the copying loops. */
4139 gfc_trans_scalarizing_loops (&loop
, &body1
);
4141 gfc_add_block_to_block (&body
, &loop
.pre
);
4142 gfc_add_block_to_block (&body
, &loop
.post
);
4144 gfc_cleanup_loop (&loop
);
4145 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4146 as tree nodes in SS may not be valid in different scope. */
4149 tmp1
= gfc_finish_block (&body
);
4150 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4151 if (nested_forall_info
!= NULL
)
4152 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
4154 gfc_add_expr_to_block (block
, tmp1
);
4158 /* Translate an assignment statement in a WHERE statement or construct
4159 statement. The MASK expression is used to control which elements
4160 of EXPR1 shall be assigned. The sense of MASK is specified by
4164 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
4165 tree mask
, bool invert
,
4166 tree count1
, tree count2
,
4172 gfc_ss
*lss_section
;
4179 tree index
, maskexpr
;
4181 /* A defined assignment. */
4182 if (cnext
&& cnext
->resolved_sym
)
4183 return gfc_trans_call (cnext
, true, mask
, count1
, invert
);
4186 /* TODO: handle this special case.
4187 Special case a single function returning an array. */
4188 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
4190 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
4196 /* Assignment of the form lhs = rhs. */
4197 gfc_start_block (&block
);
4199 gfc_init_se (&lse
, NULL
);
4200 gfc_init_se (&rse
, NULL
);
4203 lss
= gfc_walk_expr (expr1
);
4206 /* In each where-assign-stmt, the mask-expr and the variable being
4207 defined shall be arrays of the same shape. */
4208 gcc_assert (lss
!= gfc_ss_terminator
);
4210 /* The assignment needs scalarization. */
4213 /* Find a non-scalar SS from the lhs. */
4214 while (lss_section
!= gfc_ss_terminator
4215 && lss_section
->info
->type
!= GFC_SS_SECTION
)
4216 lss_section
= lss_section
->next
;
4218 gcc_assert (lss_section
!= gfc_ss_terminator
);
4220 /* Initialize the scalarizer. */
4221 gfc_init_loopinfo (&loop
);
4224 rss
= gfc_walk_expr (expr2
);
4225 if (rss
== gfc_ss_terminator
)
4227 /* The rhs is scalar. Add a ss for the expression. */
4228 rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
4229 rss
->info
->where
= 1;
4232 /* Associate the SS with the loop. */
4233 gfc_add_ss_to_loop (&loop
, lss
);
4234 gfc_add_ss_to_loop (&loop
, rss
);
4236 /* Calculate the bounds of the scalarization. */
4237 gfc_conv_ss_startstride (&loop
);
4239 /* Resolve any data dependencies in the statement. */
4240 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
4242 /* Setup the scalarizing loops. */
4243 gfc_conv_loop_setup (&loop
, &expr2
->where
);
4245 /* Setup the gfc_se structures. */
4246 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4247 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4250 gfc_mark_ss_chain_used (rss
, 1);
4251 if (loop
.temp_ss
== NULL
)
4254 gfc_mark_ss_chain_used (lss
, 1);
4258 lse
.ss
= loop
.temp_ss
;
4259 gfc_mark_ss_chain_used (lss
, 3);
4260 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
4263 /* Start the scalarized loop body. */
4264 gfc_start_scalarized_body (&loop
, &body
);
4266 /* Translate the expression. */
4267 gfc_conv_expr (&rse
, expr2
);
4268 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4269 gfc_conv_tmp_array_ref (&lse
);
4271 gfc_conv_expr (&lse
, expr1
);
4273 /* Form the mask expression according to the mask. */
4275 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4277 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4278 TREE_TYPE (maskexpr
), maskexpr
);
4280 /* Use the scalar assignment as is. */
4281 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
4282 loop
.temp_ss
!= NULL
, false, true);
4284 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt (input_location
));
4286 gfc_add_expr_to_block (&body
, tmp
);
4288 if (lss
== gfc_ss_terminator
)
4290 /* Increment count1. */
4291 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4292 count1
, gfc_index_one_node
);
4293 gfc_add_modify (&body
, count1
, tmp
);
4295 /* Use the scalar assignment as is. */
4296 gfc_add_block_to_block (&block
, &body
);
4300 gcc_assert (lse
.ss
== gfc_ss_terminator
4301 && rse
.ss
== gfc_ss_terminator
);
4303 if (loop
.temp_ss
!= NULL
)
4305 /* Increment count1 before finish the main body of a scalarized
4307 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4308 gfc_array_index_type
, count1
, gfc_index_one_node
);
4309 gfc_add_modify (&body
, count1
, tmp
);
4310 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
4312 /* We need to copy the temporary to the actual lhs. */
4313 gfc_init_se (&lse
, NULL
);
4314 gfc_init_se (&rse
, NULL
);
4315 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4316 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4318 rse
.ss
= loop
.temp_ss
;
4321 gfc_conv_tmp_array_ref (&rse
);
4322 gfc_conv_expr (&lse
, expr1
);
4324 gcc_assert (lse
.ss
== gfc_ss_terminator
4325 && rse
.ss
== gfc_ss_terminator
);
4327 /* Form the mask expression according to the mask tree list. */
4329 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4331 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4332 TREE_TYPE (maskexpr
), maskexpr
);
4334 /* Use the scalar assignment as is. */
4335 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false,
4337 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
,
4338 build_empty_stmt (input_location
));
4339 gfc_add_expr_to_block (&body
, tmp
);
4341 /* Increment count2. */
4342 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4343 gfc_array_index_type
, count2
,
4344 gfc_index_one_node
);
4345 gfc_add_modify (&body
, count2
, tmp
);
4349 /* Increment count1. */
4350 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4351 gfc_array_index_type
, count1
,
4352 gfc_index_one_node
);
4353 gfc_add_modify (&body
, count1
, tmp
);
4356 /* Generate the copying loops. */
4357 gfc_trans_scalarizing_loops (&loop
, &body
);
4359 /* Wrap the whole thing up. */
4360 gfc_add_block_to_block (&block
, &loop
.pre
);
4361 gfc_add_block_to_block (&block
, &loop
.post
);
4362 gfc_cleanup_loop (&loop
);
4365 return gfc_finish_block (&block
);
4369 /* Translate the WHERE construct or statement.
4370 This function can be called iteratively to translate the nested WHERE
4371 construct or statement.
4372 MASK is the control mask. */
4375 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
4376 forall_info
* nested_forall_info
, stmtblock_t
* block
)
4378 stmtblock_t inner_size_body
;
4379 tree inner_size
, size
;
4388 tree count1
, count2
;
4392 tree pcmask
= NULL_TREE
;
4393 tree ppmask
= NULL_TREE
;
4394 tree cmask
= NULL_TREE
;
4395 tree pmask
= NULL_TREE
;
4396 gfc_actual_arglist
*arg
;
4398 /* the WHERE statement or the WHERE construct statement. */
4399 cblock
= code
->block
;
4401 /* As the mask array can be very big, prefer compact boolean types. */
4402 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
4404 /* Determine which temporary masks are needed. */
4407 /* One clause: No ELSEWHEREs. */
4408 need_cmask
= (cblock
->next
!= 0);
4411 else if (cblock
->block
->block
)
4413 /* Three or more clauses: Conditional ELSEWHEREs. */
4417 else if (cblock
->next
)
4419 /* Two clauses, the first non-empty. */
4421 need_pmask
= (mask
!= NULL_TREE
4422 && cblock
->block
->next
!= 0);
4424 else if (!cblock
->block
->next
)
4426 /* Two clauses, both empty. */
4430 /* Two clauses, the first empty, the second non-empty. */
4433 need_cmask
= (cblock
->block
->expr1
!= 0);
4442 if (need_cmask
|| need_pmask
)
4444 /* Calculate the size of temporary needed by the mask-expr. */
4445 gfc_init_block (&inner_size_body
);
4446 inner_size
= compute_inner_temp_size (cblock
->expr1
, cblock
->expr1
,
4447 &inner_size_body
, &lss
, &rss
);
4449 gfc_free_ss_chain (lss
);
4450 gfc_free_ss_chain (rss
);
4452 /* Calculate the total size of temporary needed. */
4453 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
4454 &inner_size_body
, block
);
4456 /* Check whether the size is negative. */
4457 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, size
,
4458 gfc_index_zero_node
);
4459 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
4460 cond
, gfc_index_zero_node
, size
);
4461 size
= gfc_evaluate_now (size
, block
);
4463 /* Allocate temporary for WHERE mask if needed. */
4465 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4468 /* Allocate temporary for !mask if needed. */
4470 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4476 /* Each time around this loop, the where clause is conditional
4477 on the value of mask and invert, which are updated at the
4478 bottom of the loop. */
4480 /* Has mask-expr. */
4483 /* Ensure that the WHERE mask will be evaluated exactly once.
4484 If there are no statements in this WHERE/ELSEWHERE clause,
4485 then we don't need to update the control mask (cmask).
4486 If this is the last clause of the WHERE construct, then
4487 we don't need to update the pending control mask (pmask). */
4489 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4491 cblock
->next
? cmask
: NULL_TREE
,
4492 cblock
->block
? pmask
: NULL_TREE
,
4495 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4497 (cblock
->next
|| cblock
->block
)
4498 ? cmask
: NULL_TREE
,
4499 NULL_TREE
, mask_type
, block
);
4503 /* It's a final elsewhere-stmt. No mask-expr is present. */
4507 /* The body of this where clause are controlled by cmask with
4508 sense specified by invert. */
4510 /* Get the assignment statement of a WHERE statement, or the first
4511 statement in where-body-construct of a WHERE construct. */
4512 cnext
= cblock
->next
;
4517 /* WHERE assignment statement. */
4518 case EXEC_ASSIGN_CALL
:
4520 arg
= cnext
->ext
.actual
;
4521 expr1
= expr2
= NULL
;
4522 for (; arg
; arg
= arg
->next
)
4534 expr1
= cnext
->expr1
;
4535 expr2
= cnext
->expr2
;
4537 if (nested_forall_info
!= NULL
)
4539 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
4540 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
4541 gfc_trans_assign_need_temp (expr1
, expr2
,
4543 nested_forall_info
, block
);
4546 /* Variables to control maskexpr. */
4547 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4548 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4549 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4550 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4552 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4557 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
4559 gfc_add_expr_to_block (block
, tmp
);
4564 /* Variables to control maskexpr. */
4565 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4566 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4567 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4568 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4570 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4574 gfc_add_expr_to_block (block
, tmp
);
4579 /* WHERE or WHERE construct is part of a where-body-construct. */
4581 gfc_trans_where_2 (cnext
, cmask
, invert
,
4582 nested_forall_info
, block
);
4589 /* The next statement within the same where-body-construct. */
4590 cnext
= cnext
->next
;
4592 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4593 cblock
= cblock
->block
;
4594 if (mask
== NULL_TREE
)
4596 /* If we're the initial WHERE, we can simply invert the sense
4597 of the current mask to obtain the "mask" for the remaining
4604 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4610 /* If we allocated a pending mask array, deallocate it now. */
4613 tmp
= gfc_call_free (ppmask
);
4614 gfc_add_expr_to_block (block
, tmp
);
4617 /* If we allocated a current mask array, deallocate it now. */
4620 tmp
= gfc_call_free (pcmask
);
4621 gfc_add_expr_to_block (block
, tmp
);
4625 /* Translate a simple WHERE construct or statement without dependencies.
4626 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4627 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4628 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4631 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
4633 stmtblock_t block
, body
;
4634 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
4635 tree tmp
, cexpr
, tstmt
, estmt
;
4636 gfc_ss
*css
, *tdss
, *tsss
;
4637 gfc_se cse
, tdse
, tsse
, edse
, esse
;
4642 /* Allow the scalarizer to workshare simple where loops. */
4643 if (ompws_flags
& OMPWS_WORKSHARE_FLAG
)
4644 ompws_flags
|= OMPWS_SCALARIZER_WS
;
4646 cond
= cblock
->expr1
;
4647 tdst
= cblock
->next
->expr1
;
4648 tsrc
= cblock
->next
->expr2
;
4649 edst
= eblock
? eblock
->next
->expr1
: NULL
;
4650 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
4652 gfc_start_block (&block
);
4653 gfc_init_loopinfo (&loop
);
4655 /* Handle the condition. */
4656 gfc_init_se (&cse
, NULL
);
4657 css
= gfc_walk_expr (cond
);
4658 gfc_add_ss_to_loop (&loop
, css
);
4660 /* Handle the then-clause. */
4661 gfc_init_se (&tdse
, NULL
);
4662 gfc_init_se (&tsse
, NULL
);
4663 tdss
= gfc_walk_expr (tdst
);
4664 tsss
= gfc_walk_expr (tsrc
);
4665 if (tsss
== gfc_ss_terminator
)
4667 tsss
= gfc_get_scalar_ss (gfc_ss_terminator
, tsrc
);
4668 tsss
->info
->where
= 1;
4670 gfc_add_ss_to_loop (&loop
, tdss
);
4671 gfc_add_ss_to_loop (&loop
, tsss
);
4675 /* Handle the else clause. */
4676 gfc_init_se (&edse
, NULL
);
4677 gfc_init_se (&esse
, NULL
);
4678 edss
= gfc_walk_expr (edst
);
4679 esss
= gfc_walk_expr (esrc
);
4680 if (esss
== gfc_ss_terminator
)
4682 esss
= gfc_get_scalar_ss (gfc_ss_terminator
, esrc
);
4683 esss
->info
->where
= 1;
4685 gfc_add_ss_to_loop (&loop
, edss
);
4686 gfc_add_ss_to_loop (&loop
, esss
);
4689 gfc_conv_ss_startstride (&loop
);
4690 gfc_conv_loop_setup (&loop
, &tdst
->where
);
4692 gfc_mark_ss_chain_used (css
, 1);
4693 gfc_mark_ss_chain_used (tdss
, 1);
4694 gfc_mark_ss_chain_used (tsss
, 1);
4697 gfc_mark_ss_chain_used (edss
, 1);
4698 gfc_mark_ss_chain_used (esss
, 1);
4701 gfc_start_scalarized_body (&loop
, &body
);
4703 gfc_copy_loopinfo_to_se (&cse
, &loop
);
4704 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
4705 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
4711 gfc_copy_loopinfo_to_se (&edse
, &loop
);
4712 gfc_copy_loopinfo_to_se (&esse
, &loop
);
4717 gfc_conv_expr (&cse
, cond
);
4718 gfc_add_block_to_block (&body
, &cse
.pre
);
4721 gfc_conv_expr (&tsse
, tsrc
);
4722 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4723 gfc_conv_tmp_array_ref (&tdse
);
4725 gfc_conv_expr (&tdse
, tdst
);
4729 gfc_conv_expr (&esse
, esrc
);
4730 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4731 gfc_conv_tmp_array_ref (&edse
);
4733 gfc_conv_expr (&edse
, edst
);
4736 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false, true);
4737 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false,
4739 : build_empty_stmt (input_location
);
4740 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
4741 gfc_add_expr_to_block (&body
, tmp
);
4742 gfc_add_block_to_block (&body
, &cse
.post
);
4744 gfc_trans_scalarizing_loops (&loop
, &body
);
4745 gfc_add_block_to_block (&block
, &loop
.pre
);
4746 gfc_add_block_to_block (&block
, &loop
.post
);
4747 gfc_cleanup_loop (&loop
);
4749 return gfc_finish_block (&block
);
4752 /* As the WHERE or WHERE construct statement can be nested, we call
4753 gfc_trans_where_2 to do the translation, and pass the initial
4754 NULL values for both the control mask and the pending control mask. */
4757 gfc_trans_where (gfc_code
* code
)
4763 cblock
= code
->block
;
4765 && cblock
->next
->op
== EXEC_ASSIGN
4766 && !cblock
->next
->next
)
4768 eblock
= cblock
->block
;
4771 /* A simple "WHERE (cond) x = y" statement or block is
4772 dependence free if cond is not dependent upon writing x,
4773 and the source y is unaffected by the destination x. */
4774 if (!gfc_check_dependency (cblock
->next
->expr1
,
4776 && !gfc_check_dependency (cblock
->next
->expr1
,
4777 cblock
->next
->expr2
, 0))
4778 return gfc_trans_where_3 (cblock
, NULL
);
4780 else if (!eblock
->expr1
4783 && eblock
->next
->op
== EXEC_ASSIGN
4784 && !eblock
->next
->next
)
4786 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4787 block is dependence free if cond is not dependent on writes
4788 to x1 and x2, y1 is not dependent on writes to x2, and y2
4789 is not dependent on writes to x1, and both y's are not
4790 dependent upon their own x's. In addition to this, the
4791 final two dependency checks below exclude all but the same
4792 array reference if the where and elswhere destinations
4793 are the same. In short, this is VERY conservative and this
4794 is needed because the two loops, required by the standard
4795 are coalesced in gfc_trans_where_3. */
4796 if (!gfc_check_dependency (cblock
->next
->expr1
,
4798 && !gfc_check_dependency (eblock
->next
->expr1
,
4800 && !gfc_check_dependency (cblock
->next
->expr1
,
4801 eblock
->next
->expr2
, 1)
4802 && !gfc_check_dependency (eblock
->next
->expr1
,
4803 cblock
->next
->expr2
, 1)
4804 && !gfc_check_dependency (cblock
->next
->expr1
,
4805 cblock
->next
->expr2
, 1)
4806 && !gfc_check_dependency (eblock
->next
->expr1
,
4807 eblock
->next
->expr2
, 1)
4808 && !gfc_check_dependency (cblock
->next
->expr1
,
4809 eblock
->next
->expr1
, 0)
4810 && !gfc_check_dependency (eblock
->next
->expr1
,
4811 cblock
->next
->expr1
, 0))
4812 return gfc_trans_where_3 (cblock
, eblock
);
4816 gfc_start_block (&block
);
4818 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
4820 return gfc_finish_block (&block
);
4824 /* CYCLE a DO loop. The label decl has already been created by
4825 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4826 node at the head of the loop. We must mark the label as used. */
4829 gfc_trans_cycle (gfc_code
* code
)
4833 cycle_label
= code
->ext
.which_construct
->cycle_label
;
4834 gcc_assert (cycle_label
);
4836 TREE_USED (cycle_label
) = 1;
4837 return build1_v (GOTO_EXPR
, cycle_label
);
4841 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4842 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4846 gfc_trans_exit (gfc_code
* code
)
4850 exit_label
= code
->ext
.which_construct
->exit_label
;
4851 gcc_assert (exit_label
);
4853 TREE_USED (exit_label
) = 1;
4854 return build1_v (GOTO_EXPR
, exit_label
);
4858 /* Translate the ALLOCATE statement. */
4861 gfc_trans_allocate (gfc_code
* code
)
4883 tree memsize
= NULL_TREE
;
4884 tree classexpr
= NULL_TREE
;
4886 if (!code
->ext
.alloc
.list
)
4889 stat
= tmp
= memsz
= NULL_TREE
;
4890 label_errmsg
= label_finish
= errmsg
= errlen
= NULL_TREE
;
4892 gfc_init_block (&block
);
4893 gfc_init_block (&post
);
4895 /* STAT= (and maybe ERRMSG=) is present. */
4899 tree gfc_int4_type_node
= gfc_get_int_type (4);
4900 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4902 /* ERRMSG= only makes sense with STAT=. */
4905 gfc_init_se (&se
, NULL
);
4906 se
.want_pointer
= 1;
4907 gfc_conv_expr_lhs (&se
, code
->expr2
);
4909 errlen
= se
.string_length
;
4913 errmsg
= null_pointer_node
;
4914 errlen
= build_int_cst (gfc_charlen_type_node
, 0);
4917 /* GOTO destinations. */
4918 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
4919 label_finish
= gfc_build_label_decl (NULL_TREE
);
4920 TREE_USED (label_finish
) = 0;
4926 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
4928 expr
= gfc_copy_expr (al
->expr
);
4930 if (expr
->ts
.type
== BT_CLASS
)
4931 gfc_add_data_component (expr
);
4933 gfc_init_se (&se
, NULL
);
4935 se
.want_pointer
= 1;
4936 se
.descriptor_only
= 1;
4937 gfc_conv_expr (&se
, expr
);
4939 /* Evaluate expr3 just once if not a variable. */
4940 if (al
== code
->ext
.alloc
.list
4941 && al
->expr
->ts
.type
== BT_CLASS
4943 && code
->expr3
->ts
.type
== BT_CLASS
4944 && code
->expr3
->expr_type
!= EXPR_VARIABLE
)
4946 gfc_init_se (&se_sz
, NULL
);
4947 gfc_conv_expr_reference (&se_sz
, code
->expr3
);
4948 gfc_conv_class_to_class (&se_sz
, code
->expr3
,
4949 code
->expr3
->ts
, false, true, false, false);
4950 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4951 gfc_add_block_to_block (&se
.post
, &se_sz
.post
);
4952 classexpr
= build_fold_indirect_ref_loc (input_location
,
4954 classexpr
= gfc_evaluate_now (classexpr
, &se
.pre
);
4955 memsize
= gfc_vtable_size_get (classexpr
);
4956 memsize
= fold_convert (sizetype
, memsize
);
4960 class_expr
= classexpr
;
4963 if (!gfc_array_allocate (&se
, expr
, stat
, errmsg
, errlen
, label_finish
,
4964 memsz
, &nelems
, code
->expr3
, &code
->ext
.alloc
.ts
))
4966 bool unlimited_char
;
4968 unlimited_char
= UNLIMITED_POLY (al
->expr
)
4969 && ((code
->expr3
&& code
->expr3
->ts
.type
== BT_CHARACTER
)
4970 || (code
->ext
.alloc
.ts
.type
== BT_CHARACTER
4971 && code
->ext
.alloc
.ts
.u
.cl
4972 && code
->ext
.alloc
.ts
.u
.cl
->length
));
4974 /* A scalar or derived type. */
4976 /* Determine allocate size. */
4977 if (al
->expr
->ts
.type
== BT_CLASS
4980 && memsz
== NULL_TREE
)
4982 if (code
->expr3
->ts
.type
== BT_CLASS
)
4984 sz
= gfc_copy_expr (code
->expr3
);
4985 gfc_add_vptr_component (sz
);
4986 gfc_add_size_component (sz
);
4987 gfc_init_se (&se_sz
, NULL
);
4988 gfc_conv_expr (&se_sz
, sz
);
4993 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->expr3
->ts
));
4995 else if (((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
4996 || unlimited_char
) && code
->expr3
)
4998 if (!code
->expr3
->ts
.u
.cl
->backend_decl
)
5000 /* Convert and use the length expression. */
5001 gfc_init_se (&se_sz
, NULL
);
5002 if (code
->expr3
->expr_type
== EXPR_VARIABLE
5003 || code
->expr3
->expr_type
== EXPR_CONSTANT
)
5005 gfc_conv_expr (&se_sz
, code
->expr3
);
5006 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5008 = gfc_evaluate_now (se_sz
.string_length
, &se
.pre
);
5009 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5010 memsz
= se_sz
.string_length
;
5012 else if (code
->expr3
->mold
5013 && code
->expr3
->ts
.u
.cl
5014 && code
->expr3
->ts
.u
.cl
->length
)
5016 gfc_conv_expr (&se_sz
, code
->expr3
->ts
.u
.cl
->length
);
5017 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5018 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5019 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5024 /* This is would be inefficient and possibly could
5025 generate wrong code if the result were not stored
5027 if (slen3
== NULL_TREE
)
5029 gfc_conv_expr (&se_sz
, code
->expr3
);
5030 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5031 expr3
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5032 gfc_add_block_to_block (&post
, &se_sz
.post
);
5033 slen3
= gfc_evaluate_now (se_sz
.string_length
,
5040 /* Otherwise use the stored string length. */
5041 memsz
= code
->expr3
->ts
.u
.cl
->backend_decl
;
5042 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5044 /* Store the string length. */
5045 if (tmp
&& TREE_CODE (tmp
) == VAR_DECL
)
5046 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5048 else if (al
->expr
->ts
.type
== BT_CHARACTER
5049 && al
->expr
->ts
.deferred
&& se
.string_length
)
5050 gfc_add_modify (&se
.pre
, se
.string_length
,
5051 fold_convert (TREE_TYPE (se
.string_length
),
5054 /* Convert to size in bytes, using the character KIND. */
5056 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->expr3
->ts
));
5058 tmp
= TREE_TYPE (gfc_typenode_for_spec (&al
->expr
->ts
));
5059 tmp
= TYPE_SIZE_UNIT (tmp
);
5060 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5061 TREE_TYPE (tmp
), tmp
,
5062 fold_convert (TREE_TYPE (tmp
), memsz
));
5064 else if ((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5067 gcc_assert (code
->ext
.alloc
.ts
.u
.cl
&& code
->ext
.alloc
.ts
.u
.cl
->length
);
5068 gfc_init_se (&se_sz
, NULL
);
5069 gfc_conv_expr (&se_sz
, code
->ext
.alloc
.ts
.u
.cl
->length
);
5070 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5071 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5072 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5073 /* Store the string length. */
5074 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5075 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5077 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5078 tmp
= TYPE_SIZE_UNIT (tmp
);
5079 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5080 TREE_TYPE (tmp
), tmp
,
5081 fold_convert (TREE_TYPE (se_sz
.expr
),
5084 else if (code
->ext
.alloc
.ts
.type
!= BT_UNKNOWN
)
5085 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5086 else if (memsz
== NULL_TREE
)
5087 memsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
5089 if (expr
->ts
.type
== BT_CHARACTER
&& memsz
== NULL_TREE
)
5091 memsz
= se
.string_length
;
5093 /* Convert to size in bytes, using the character KIND. */
5094 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5095 tmp
= TYPE_SIZE_UNIT (tmp
);
5096 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5097 TREE_TYPE (tmp
), tmp
,
5098 fold_convert (TREE_TYPE (tmp
), memsz
));
5101 /* Allocate - for non-pointers with re-alloc checking. */
5102 if (gfc_expr_attr (expr
).allocatable
)
5103 gfc_allocate_allocatable (&se
.pre
, se
.expr
, memsz
, NULL_TREE
,
5104 stat
, errmsg
, errlen
, label_finish
, expr
);
5106 gfc_allocate_using_malloc (&se
.pre
, se
.expr
, memsz
, stat
);
5108 if (al
->expr
->ts
.type
== BT_DERIVED
5109 && expr
->ts
.u
.derived
->attr
.alloc_comp
)
5111 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5112 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, tmp
, 0);
5113 gfc_add_expr_to_block (&se
.pre
, tmp
);
5117 gfc_add_block_to_block (&block
, &se
.pre
);
5119 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5122 tmp
= build1_v (GOTO_EXPR
, label_errmsg
);
5123 parm
= fold_build2_loc (input_location
, NE_EXPR
,
5124 boolean_type_node
, stat
,
5125 build_int_cst (TREE_TYPE (stat
), 0));
5126 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5127 gfc_unlikely (parm
, PRED_FORTRAN_FAIL_ALLOC
),
5128 tmp
, build_empty_stmt (input_location
));
5129 gfc_add_expr_to_block (&block
, tmp
);
5132 /* We need the vptr of CLASS objects to be initialized. */
5133 e
= gfc_copy_expr (al
->expr
);
5134 if (e
->ts
.type
== BT_CLASS
)
5136 gfc_expr
*lhs
, *rhs
;
5138 gfc_ref
*ref
, *class_ref
, *tail
;
5140 /* Find the last class reference. */
5142 for (ref
= e
->ref
; ref
; ref
= ref
->next
)
5144 if (ref
->type
== REF_COMPONENT
5145 && ref
->u
.c
.component
->ts
.type
== BT_CLASS
)
5148 if (ref
->next
== NULL
)
5152 /* Remove and store all subsequent references after the
5156 tail
= class_ref
->next
;
5157 class_ref
->next
= NULL
;
5165 lhs
= gfc_expr_to_initialize (e
);
5166 gfc_add_vptr_component (lhs
);
5168 /* Remove the _vptr component and restore the original tail
5172 gfc_free_ref_list (class_ref
->next
);
5173 class_ref
->next
= tail
;
5177 gfc_free_ref_list (e
->ref
);
5181 if (class_expr
!= NULL_TREE
)
5183 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5184 gfc_init_se (&lse
, NULL
);
5185 lse
.want_pointer
= 1;
5186 gfc_conv_expr (&lse
, lhs
);
5187 tmp
= gfc_class_vptr_get (class_expr
);
5188 gfc_add_modify (&block
, lse
.expr
,
5189 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5191 else if (code
->expr3
&& code
->expr3
->ts
.type
== BT_CLASS
)
5193 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5194 rhs
= gfc_copy_expr (code
->expr3
);
5195 gfc_add_vptr_component (rhs
);
5196 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5197 gfc_add_expr_to_block (&block
, tmp
);
5198 gfc_free_expr (rhs
);
5199 rhs
= gfc_expr_to_initialize (e
);
5203 /* VPTR is fixed at compile time. */
5207 ts
= &code
->expr3
->ts
;
5208 else if (e
->ts
.type
== BT_DERIVED
)
5210 else if (code
->ext
.alloc
.ts
.type
== BT_DERIVED
|| UNLIMITED_POLY (al
->expr
))
5211 ts
= &code
->ext
.alloc
.ts
;
5212 else if (e
->ts
.type
== BT_CLASS
)
5213 ts
= &CLASS_DATA (e
)->ts
;
5217 if (ts
->type
== BT_DERIVED
|| UNLIMITED_POLY (e
))
5219 vtab
= gfc_find_vtab (ts
);
5221 gfc_init_se (&lse
, NULL
);
5222 lse
.want_pointer
= 1;
5223 gfc_conv_expr (&lse
, lhs
);
5224 tmp
= gfc_build_addr_expr (NULL_TREE
,
5225 gfc_get_symbol_decl (vtab
));
5226 gfc_add_modify (&block
, lse
.expr
,
5227 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5230 gfc_free_expr (lhs
);
5235 if (code
->expr3
&& !code
->expr3
->mold
)
5237 /* Initialization via SOURCE block
5238 (or static default initializer). */
5239 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
5240 if (class_expr
!= NULL_TREE
)
5243 to
= TREE_OPERAND (se
.expr
, 0);
5245 tmp
= gfc_copy_class_to_class (class_expr
, to
, nelems
);
5247 else if (al
->expr
->ts
.type
== BT_CLASS
)
5249 gfc_actual_arglist
*actual
;
5252 gfc_ref
*ref
, *dataref
;
5254 /* Do a polymorphic deep copy. */
5255 actual
= gfc_get_actual_arglist ();
5256 actual
->expr
= gfc_copy_expr (rhs
);
5257 if (rhs
->ts
.type
== BT_CLASS
)
5258 gfc_add_data_component (actual
->expr
);
5259 actual
->next
= gfc_get_actual_arglist ();
5260 actual
->next
->expr
= gfc_copy_expr (al
->expr
);
5261 actual
->next
->expr
->ts
.type
= BT_CLASS
;
5262 gfc_add_data_component (actual
->next
->expr
);
5265 /* Make sure we go up through the reference chain to
5266 the _data reference, where the arrayspec is found. */
5267 for (ref
= actual
->next
->expr
->ref
; ref
; ref
= ref
->next
)
5268 if (ref
->type
== REF_COMPONENT
5269 && strcmp (ref
->u
.c
.component
->name
, "_data") == 0)
5272 if (dataref
&& dataref
->u
.c
.component
->as
)
5276 gfc_ref
*ref
= dataref
->next
;
5277 ref
->u
.ar
.type
= AR_SECTION
;
5278 /* We have to set up the array reference to give ranges
5279 in all dimensions and ensure that the end and stride
5280 are set so that the copy can be scalarized. */
5282 for (; dim
< dataref
->u
.c
.component
->as
->rank
; dim
++)
5284 ref
->u
.ar
.dimen_type
[dim
] = DIMEN_RANGE
;
5285 if (ref
->u
.ar
.end
[dim
] == NULL
)
5287 ref
->u
.ar
.end
[dim
] = ref
->u
.ar
.start
[dim
];
5288 temp
= gfc_get_int_expr (gfc_default_integer_kind
,
5289 &al
->expr
->where
, 1);
5290 ref
->u
.ar
.start
[dim
] = temp
;
5292 temp
= gfc_subtract (gfc_copy_expr (ref
->u
.ar
.end
[dim
]),
5293 gfc_copy_expr (ref
->u
.ar
.start
[dim
]));
5294 temp
= gfc_add (gfc_get_int_expr (gfc_default_integer_kind
,
5295 &al
->expr
->where
, 1),
5299 if (rhs
->ts
.type
== BT_CLASS
)
5301 ppc
= gfc_copy_expr (rhs
);
5302 gfc_add_vptr_component (ppc
);
5305 ppc
= gfc_lval_expr_from_sym (gfc_find_vtab (&rhs
->ts
));
5306 gfc_add_component_ref (ppc
, "_copy");
5308 ppc_code
= gfc_get_code (EXEC_CALL
);
5309 ppc_code
->resolved_sym
= ppc
->symtree
->n
.sym
;
5310 /* Although '_copy' is set to be elemental in class.c, it is
5311 not staying that way. Find out why, sometime.... */
5312 ppc_code
->resolved_sym
->attr
.elemental
= 1;
5313 ppc_code
->ext
.actual
= actual
;
5314 ppc_code
->expr1
= ppc
;
5315 /* Since '_copy' is elemental, the scalarizer will take care
5316 of arrays in gfc_trans_call. */
5317 tmp
= gfc_trans_call (ppc_code
, true, NULL
, NULL
, false);
5318 gfc_free_statements (ppc_code
);
5320 else if (expr3
!= NULL_TREE
)
5322 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5323 gfc_trans_string_copy (&block
, slen3
, tmp
, code
->expr3
->ts
.kind
,
5324 slen3
, expr3
, code
->expr3
->ts
.kind
);
5329 /* Switch off automatic reallocation since we have just done
5331 int realloc_lhs
= flag_realloc_lhs
;
5332 flag_realloc_lhs
= 0;
5333 tmp
= gfc_trans_assignment (gfc_expr_to_initialize (expr
),
5335 flag_realloc_lhs
= realloc_lhs
;
5337 gfc_free_expr (rhs
);
5338 gfc_add_expr_to_block (&block
, tmp
);
5340 else if (code
->expr3
&& code
->expr3
->mold
5341 && code
->expr3
->ts
.type
== BT_CLASS
)
5343 /* Since the _vptr has already been assigned to the allocate
5344 object, we can use gfc_copy_class_to_class in its
5345 initialization mode. */
5346 tmp
= TREE_OPERAND (se
.expr
, 0);
5347 tmp
= gfc_copy_class_to_class (NULL_TREE
, tmp
, nelems
);
5348 gfc_add_expr_to_block (&block
, tmp
);
5351 gfc_free_expr (expr
);
5357 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5358 gfc_add_expr_to_block (&block
, tmp
);
5361 /* ERRMSG - only useful if STAT is present. */
5362 if (code
->expr1
&& code
->expr2
)
5364 const char *msg
= "Attempt to allocate an allocated object";
5365 tree slen
, dlen
, errmsg_str
;
5366 stmtblock_t errmsg_block
;
5368 gfc_init_block (&errmsg_block
);
5370 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5371 gfc_add_modify (&errmsg_block
, errmsg_str
,
5372 gfc_build_addr_expr (pchar_type_node
,
5373 gfc_build_localized_cstring_const (msg
)));
5375 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5376 dlen
= gfc_get_expr_charlen (code
->expr2
);
5377 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
5380 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5381 slen
, errmsg_str
, gfc_default_character_kind
);
5382 dlen
= gfc_finish_block (&errmsg_block
);
5384 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5385 build_int_cst (TREE_TYPE (stat
), 0));
5387 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
5389 gfc_add_expr_to_block (&block
, tmp
);
5395 if (TREE_USED (label_finish
))
5397 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5398 gfc_add_expr_to_block (&block
, tmp
);
5401 gfc_init_se (&se
, NULL
);
5402 gfc_conv_expr_lhs (&se
, code
->expr1
);
5403 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5404 gfc_add_modify (&block
, se
.expr
, tmp
);
5407 gfc_add_block_to_block (&block
, &se
.post
);
5408 gfc_add_block_to_block (&block
, &post
);
5410 return gfc_finish_block (&block
);
5414 /* Translate a DEALLOCATE statement. */
5417 gfc_trans_deallocate (gfc_code
*code
)
5421 tree apstat
, pstat
, stat
, errmsg
, errlen
, tmp
;
5422 tree label_finish
, label_errmsg
;
5425 pstat
= apstat
= stat
= errmsg
= errlen
= tmp
= NULL_TREE
;
5426 label_finish
= label_errmsg
= NULL_TREE
;
5428 gfc_start_block (&block
);
5430 /* Count the number of failed deallocations. If deallocate() was
5431 called with STAT= , then set STAT to the count. If deallocate
5432 was called with ERRMSG, then set ERRMG to a string. */
5435 tree gfc_int4_type_node
= gfc_get_int_type (4);
5437 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
5438 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
5440 /* GOTO destinations. */
5441 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
5442 label_finish
= gfc_build_label_decl (NULL_TREE
);
5443 TREE_USED (label_finish
) = 0;
5446 /* Set ERRMSG - only needed if STAT is available. */
5447 if (code
->expr1
&& code
->expr2
)
5449 gfc_init_se (&se
, NULL
);
5450 se
.want_pointer
= 1;
5451 gfc_conv_expr_lhs (&se
, code
->expr2
);
5453 errlen
= se
.string_length
;
5456 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
5458 gfc_expr
*expr
= gfc_copy_expr (al
->expr
);
5459 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
5461 if (expr
->ts
.type
== BT_CLASS
)
5462 gfc_add_data_component (expr
);
5464 gfc_init_se (&se
, NULL
);
5465 gfc_start_block (&se
.pre
);
5467 se
.want_pointer
= 1;
5468 se
.descriptor_only
= 1;
5469 gfc_conv_expr (&se
, expr
);
5471 if (expr
->rank
|| gfc_is_coarray (expr
))
5473 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
5474 && !gfc_is_finalizable (expr
->ts
.u
.derived
, NULL
))
5477 gfc_ref
*last
= NULL
;
5478 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
5479 if (ref
->type
== REF_COMPONENT
)
5482 /* Do not deallocate the components of a derived type
5483 ultimate pointer component. */
5484 if (!(last
&& last
->u
.c
.component
->attr
.pointer
)
5485 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
5487 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
.expr
,
5489 gfc_add_expr_to_block (&se
.pre
, tmp
);
5492 tmp
= gfc_array_deallocate (se
.expr
, pstat
, errmsg
, errlen
,
5493 label_finish
, expr
);
5494 gfc_add_expr_to_block (&se
.pre
, tmp
);
5495 if (al
->expr
->ts
.type
== BT_CLASS
)
5496 gfc_reset_vptr (&se
.pre
, al
->expr
);
5500 tmp
= gfc_deallocate_scalar_with_status (se
.expr
, pstat
, false,
5501 al
->expr
, al
->expr
->ts
);
5502 gfc_add_expr_to_block (&se
.pre
, tmp
);
5504 /* Set to zero after deallocation. */
5505 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5507 build_int_cst (TREE_TYPE (se
.expr
), 0));
5508 gfc_add_expr_to_block (&se
.pre
, tmp
);
5510 if (al
->expr
->ts
.type
== BT_CLASS
)
5511 gfc_reset_vptr (&se
.pre
, al
->expr
);
5518 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5519 build_int_cst (TREE_TYPE (stat
), 0));
5520 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5521 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
),
5522 build1_v (GOTO_EXPR
, label_errmsg
),
5523 build_empty_stmt (input_location
));
5524 gfc_add_expr_to_block (&se
.pre
, tmp
);
5527 tmp
= gfc_finish_block (&se
.pre
);
5528 gfc_add_expr_to_block (&block
, tmp
);
5529 gfc_free_expr (expr
);
5534 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5535 gfc_add_expr_to_block (&block
, tmp
);
5538 /* Set ERRMSG - only needed if STAT is available. */
5539 if (code
->expr1
&& code
->expr2
)
5541 const char *msg
= "Attempt to deallocate an unallocated object";
5542 stmtblock_t errmsg_block
;
5543 tree errmsg_str
, slen
, dlen
, cond
;
5545 gfc_init_block (&errmsg_block
);
5547 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5548 gfc_add_modify (&errmsg_block
, errmsg_str
,
5549 gfc_build_addr_expr (pchar_type_node
,
5550 gfc_build_localized_cstring_const (msg
)));
5551 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5552 dlen
= gfc_get_expr_charlen (code
->expr2
);
5554 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5555 slen
, errmsg_str
, gfc_default_character_kind
);
5556 tmp
= gfc_finish_block (&errmsg_block
);
5558 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5559 build_int_cst (TREE_TYPE (stat
), 0));
5560 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5561 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
), tmp
,
5562 build_empty_stmt (input_location
));
5564 gfc_add_expr_to_block (&block
, tmp
);
5567 if (code
->expr1
&& TREE_USED (label_finish
))
5569 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5570 gfc_add_expr_to_block (&block
, tmp
);
5576 gfc_init_se (&se
, NULL
);
5577 gfc_conv_expr_lhs (&se
, code
->expr1
);
5578 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5579 gfc_add_modify (&block
, se
.expr
, tmp
);
5582 return gfc_finish_block (&block
);
5585 #include "gt-fortran-trans-stmt.h"