Update ChangeLogs for wide-int work.
[gcc.git] / gcc / fortran / trans-array.c
1 /* Array translation routines
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* trans-array.c-- Various array related code, including scalarization,
23 allocation, initialization and other support routines. */
24
25 /* How the scalarizer works.
26 In gfortran, array expressions use the same core routines as scalar
27 expressions.
28 First, a Scalarization State (SS) chain is built. This is done by walking
29 the expression tree, and building a linear list of the terms in the
30 expression. As the tree is walked, scalar subexpressions are translated.
31
32 The scalarization parameters are stored in a gfc_loopinfo structure.
33 First the start and stride of each term is calculated by
34 gfc_conv_ss_startstride. During this process the expressions for the array
35 descriptors and data pointers are also translated.
36
37 If the expression is an assignment, we must then resolve any dependencies.
38 In Fortran all the rhs values of an assignment must be evaluated before
39 any assignments take place. This can require a temporary array to store the
40 values. We also require a temporary when we are passing array expressions
41 or vector subscripts as procedure parameters.
42
43 Array sections are passed without copying to a temporary. These use the
44 scalarizer to determine the shape of the section. The flag
45 loop->array_parameter tells the scalarizer that the actual values and loop
46 variables will not be required.
47
48 The function gfc_conv_loop_setup generates the scalarization setup code.
49 It determines the range of the scalarizing loop variables. If a temporary
50 is required, this is created and initialized. Code for scalar expressions
51 taken outside the loop is also generated at this time. Next the offset and
52 scaling required to translate from loop variables to array indices for each
53 term is calculated.
54
55 A call to gfc_start_scalarized_body marks the start of the scalarized
56 expression. This creates a scope and declares the loop variables. Before
57 calling this gfc_make_ss_chain_used must be used to indicate which terms
58 will be used inside this loop.
59
60 The scalar gfc_conv_* functions are then used to build the main body of the
61 scalarization loop. Scalarization loop variables and precalculated scalar
62 values are automatically substituted. Note that gfc_advance_se_ss_chain
63 must be used, rather than changing the se->ss directly.
64
65 For assignment expressions requiring a temporary two sub loops are
66 generated. The first stores the result of the expression in the temporary,
67 the second copies it to the result. A call to
68 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
69 the start of the copying loop. The temporary may be less than full rank.
70
71 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
72 loops. The loops are added to the pre chain of the loopinfo. The post
73 chain may still contain cleanup code.
74
75 After the loop code has been added into its parent scope gfc_cleanup_loop
76 is called to free all the SS allocated by the scalarizer. */
77
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "tree.h"
82 #include "gimple-expr.h"
83 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
84 #include "flags.h"
85 #include "gfortran.h"
86 #include "constructor.h"
87 #include "trans.h"
88 #include "trans-stmt.h"
89 #include "trans-types.h"
90 #include "trans-array.h"
91 #include "trans-const.h"
92 #include "dependency.h"
93 #include "wide-int.h"
94
95 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
96
97 /* The contents of this structure aren't actually used, just the address. */
98 static gfc_ss gfc_ss_terminator_var;
99 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
100
101
102 static tree
103 gfc_array_dataptr_type (tree desc)
104 {
105 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
106 }
107
108
109 /* Build expressions to access the members of an array descriptor.
110 It's surprisingly easy to mess up here, so never access
111 an array descriptor by "brute force", always use these
112 functions. This also avoids problems if we change the format
113 of an array descriptor.
114
115 To understand these magic numbers, look at the comments
116 before gfc_build_array_type() in trans-types.c.
117
118 The code within these defines should be the only code which knows the format
119 of an array descriptor.
120
121 Any code just needing to read obtain the bounds of an array should use
122 gfc_conv_array_* rather than the following functions as these will return
123 know constant values, and work with arrays which do not have descriptors.
124
125 Don't forget to #undef these! */
126
127 #define DATA_FIELD 0
128 #define OFFSET_FIELD 1
129 #define DTYPE_FIELD 2
130 #define DIMENSION_FIELD 3
131 #define CAF_TOKEN_FIELD 4
132
133 #define STRIDE_SUBFIELD 0
134 #define LBOUND_SUBFIELD 1
135 #define UBOUND_SUBFIELD 2
136
137 /* This provides READ-ONLY access to the data field. The field itself
138 doesn't have the proper type. */
139
140 tree
141 gfc_conv_descriptor_data_get (tree desc)
142 {
143 tree field, type, t;
144
145 type = TREE_TYPE (desc);
146 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
147
148 field = TYPE_FIELDS (type);
149 gcc_assert (DATA_FIELD == 0);
150
151 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
152 field, NULL_TREE);
153 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
154
155 return t;
156 }
157
158 /* This provides WRITE access to the data field.
159
160 TUPLES_P is true if we are generating tuples.
161
162 This function gets called through the following macros:
163 gfc_conv_descriptor_data_set
164 gfc_conv_descriptor_data_set. */
165
166 void
167 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
168 {
169 tree field, type, t;
170
171 type = TREE_TYPE (desc);
172 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
173
174 field = TYPE_FIELDS (type);
175 gcc_assert (DATA_FIELD == 0);
176
177 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
178 field, NULL_TREE);
179 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
180 }
181
182
183 /* This provides address access to the data field. This should only be
184 used by array allocation, passing this on to the runtime. */
185
186 tree
187 gfc_conv_descriptor_data_addr (tree desc)
188 {
189 tree field, type, t;
190
191 type = TREE_TYPE (desc);
192 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
193
194 field = TYPE_FIELDS (type);
195 gcc_assert (DATA_FIELD == 0);
196
197 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
198 field, NULL_TREE);
199 return gfc_build_addr_expr (NULL_TREE, t);
200 }
201
202 static tree
203 gfc_conv_descriptor_offset (tree desc)
204 {
205 tree type;
206 tree field;
207
208 type = TREE_TYPE (desc);
209 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
210
211 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
212 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
213
214 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
215 desc, field, NULL_TREE);
216 }
217
218 tree
219 gfc_conv_descriptor_offset_get (tree desc)
220 {
221 return gfc_conv_descriptor_offset (desc);
222 }
223
224 void
225 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
226 tree value)
227 {
228 tree t = gfc_conv_descriptor_offset (desc);
229 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
230 }
231
232
233 tree
234 gfc_conv_descriptor_dtype (tree desc)
235 {
236 tree field;
237 tree type;
238
239 type = TREE_TYPE (desc);
240 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
241
242 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
243 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
244
245 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
246 desc, field, NULL_TREE);
247 }
248
249
250 tree
251 gfc_conv_descriptor_rank (tree desc)
252 {
253 tree tmp;
254 tree dtype;
255
256 dtype = gfc_conv_descriptor_dtype (desc);
257 tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK);
258 tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype),
259 dtype, tmp);
260 return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
261 }
262
263
264 tree
265 gfc_get_descriptor_dimension (tree desc)
266 {
267 tree type, field;
268
269 type = TREE_TYPE (desc);
270 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
271
272 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
273 gcc_assert (field != NULL_TREE
274 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
275 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
276
277 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
278 desc, field, NULL_TREE);
279 }
280
281
282 static tree
283 gfc_conv_descriptor_dimension (tree desc, tree dim)
284 {
285 tree tmp;
286
287 tmp = gfc_get_descriptor_dimension (desc);
288
289 return gfc_build_array_ref (tmp, dim, NULL);
290 }
291
292
293 tree
294 gfc_conv_descriptor_token (tree desc)
295 {
296 tree type;
297 tree field;
298
299 type = TREE_TYPE (desc);
300 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
301 gcc_assert (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE);
302 gcc_assert (gfc_option.coarray == GFC_FCOARRAY_LIB);
303 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
304
305 /* Should be a restricted pointer - except in the finalization wrapper. */
306 gcc_assert (field != NULL_TREE
307 && (TREE_TYPE (field) == prvoid_type_node
308 || TREE_TYPE (field) == pvoid_type_node));
309
310 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
311 desc, field, NULL_TREE);
312 }
313
314
315 static tree
316 gfc_conv_descriptor_stride (tree desc, tree dim)
317 {
318 tree tmp;
319 tree field;
320
321 tmp = gfc_conv_descriptor_dimension (desc, dim);
322 field = TYPE_FIELDS (TREE_TYPE (tmp));
323 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
324 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
325
326 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
327 tmp, field, NULL_TREE);
328 return tmp;
329 }
330
331 tree
332 gfc_conv_descriptor_stride_get (tree desc, tree dim)
333 {
334 tree type = TREE_TYPE (desc);
335 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
336 if (integer_zerop (dim)
337 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
338 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
339 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT
340 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
341 return gfc_index_one_node;
342
343 return gfc_conv_descriptor_stride (desc, dim);
344 }
345
346 void
347 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
348 tree dim, tree value)
349 {
350 tree t = gfc_conv_descriptor_stride (desc, dim);
351 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
352 }
353
354 static tree
355 gfc_conv_descriptor_lbound (tree desc, tree dim)
356 {
357 tree tmp;
358 tree field;
359
360 tmp = gfc_conv_descriptor_dimension (desc, dim);
361 field = TYPE_FIELDS (TREE_TYPE (tmp));
362 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
363 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
364
365 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
366 tmp, field, NULL_TREE);
367 return tmp;
368 }
369
370 tree
371 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
372 {
373 return gfc_conv_descriptor_lbound (desc, dim);
374 }
375
376 void
377 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
378 tree dim, tree value)
379 {
380 tree t = gfc_conv_descriptor_lbound (desc, dim);
381 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
382 }
383
384 static tree
385 gfc_conv_descriptor_ubound (tree desc, tree dim)
386 {
387 tree tmp;
388 tree field;
389
390 tmp = gfc_conv_descriptor_dimension (desc, dim);
391 field = TYPE_FIELDS (TREE_TYPE (tmp));
392 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
393 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
394
395 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
396 tmp, field, NULL_TREE);
397 return tmp;
398 }
399
400 tree
401 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
402 {
403 return gfc_conv_descriptor_ubound (desc, dim);
404 }
405
406 void
407 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
408 tree dim, tree value)
409 {
410 tree t = gfc_conv_descriptor_ubound (desc, dim);
411 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
412 }
413
414 /* Build a null array descriptor constructor. */
415
416 tree
417 gfc_build_null_descriptor (tree type)
418 {
419 tree field;
420 tree tmp;
421
422 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
423 gcc_assert (DATA_FIELD == 0);
424 field = TYPE_FIELDS (type);
425
426 /* Set a NULL data pointer. */
427 tmp = build_constructor_single (type, field, null_pointer_node);
428 TREE_CONSTANT (tmp) = 1;
429 /* All other fields are ignored. */
430
431 return tmp;
432 }
433
434
435 /* Modify a descriptor such that the lbound of a given dimension is the value
436 specified. This also updates ubound and offset accordingly. */
437
438 void
439 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
440 int dim, tree new_lbound)
441 {
442 tree offs, ubound, lbound, stride;
443 tree diff, offs_diff;
444
445 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
446
447 offs = gfc_conv_descriptor_offset_get (desc);
448 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
449 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
450 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
451
452 /* Get difference (new - old) by which to shift stuff. */
453 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
454 new_lbound, lbound);
455
456 /* Shift ubound and offset accordingly. This has to be done before
457 updating the lbound, as they depend on the lbound expression! */
458 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
459 ubound, diff);
460 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
461 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
462 diff, stride);
463 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
464 offs, offs_diff);
465 gfc_conv_descriptor_offset_set (block, desc, offs);
466
467 /* Finally set lbound to value we want. */
468 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
469 }
470
471
472 /* Cleanup those #defines. */
473
474 #undef DATA_FIELD
475 #undef OFFSET_FIELD
476 #undef DTYPE_FIELD
477 #undef DIMENSION_FIELD
478 #undef CAF_TOKEN_FIELD
479 #undef STRIDE_SUBFIELD
480 #undef LBOUND_SUBFIELD
481 #undef UBOUND_SUBFIELD
482
483
484 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
485 flags & 1 = Main loop body.
486 flags & 2 = temp copy loop. */
487
488 void
489 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
490 {
491 for (; ss != gfc_ss_terminator; ss = ss->next)
492 ss->info->useflags = flags;
493 }
494
495
496 /* Free a gfc_ss chain. */
497
498 void
499 gfc_free_ss_chain (gfc_ss * ss)
500 {
501 gfc_ss *next;
502
503 while (ss != gfc_ss_terminator)
504 {
505 gcc_assert (ss != NULL);
506 next = ss->next;
507 gfc_free_ss (ss);
508 ss = next;
509 }
510 }
511
512
513 static void
514 free_ss_info (gfc_ss_info *ss_info)
515 {
516 int n;
517
518 ss_info->refcount--;
519 if (ss_info->refcount > 0)
520 return;
521
522 gcc_assert (ss_info->refcount == 0);
523
524 switch (ss_info->type)
525 {
526 case GFC_SS_SECTION:
527 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
528 if (ss_info->data.array.subscript[n])
529 gfc_free_ss_chain (ss_info->data.array.subscript[n]);
530 break;
531
532 default:
533 break;
534 }
535
536 free (ss_info);
537 }
538
539
540 /* Free a SS. */
541
542 void
543 gfc_free_ss (gfc_ss * ss)
544 {
545 free_ss_info (ss->info);
546 free (ss);
547 }
548
549
550 /* Creates and initializes an array type gfc_ss struct. */
551
552 gfc_ss *
553 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
554 {
555 gfc_ss *ss;
556 gfc_ss_info *ss_info;
557 int i;
558
559 ss_info = gfc_get_ss_info ();
560 ss_info->refcount++;
561 ss_info->type = type;
562 ss_info->expr = expr;
563
564 ss = gfc_get_ss ();
565 ss->info = ss_info;
566 ss->next = next;
567 ss->dimen = dimen;
568 for (i = 0; i < ss->dimen; i++)
569 ss->dim[i] = i;
570
571 return ss;
572 }
573
574
575 /* Creates and initializes a temporary type gfc_ss struct. */
576
577 gfc_ss *
578 gfc_get_temp_ss (tree type, tree string_length, int dimen)
579 {
580 gfc_ss *ss;
581 gfc_ss_info *ss_info;
582 int i;
583
584 ss_info = gfc_get_ss_info ();
585 ss_info->refcount++;
586 ss_info->type = GFC_SS_TEMP;
587 ss_info->string_length = string_length;
588 ss_info->data.temp.type = type;
589
590 ss = gfc_get_ss ();
591 ss->info = ss_info;
592 ss->next = gfc_ss_terminator;
593 ss->dimen = dimen;
594 for (i = 0; i < ss->dimen; i++)
595 ss->dim[i] = i;
596
597 return ss;
598 }
599
600
601 /* Creates and initializes a scalar type gfc_ss struct. */
602
603 gfc_ss *
604 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
605 {
606 gfc_ss *ss;
607 gfc_ss_info *ss_info;
608
609 ss_info = gfc_get_ss_info ();
610 ss_info->refcount++;
611 ss_info->type = GFC_SS_SCALAR;
612 ss_info->expr = expr;
613
614 ss = gfc_get_ss ();
615 ss->info = ss_info;
616 ss->next = next;
617
618 return ss;
619 }
620
621
622 /* Free all the SS associated with a loop. */
623
624 void
625 gfc_cleanup_loop (gfc_loopinfo * loop)
626 {
627 gfc_loopinfo *loop_next, **ploop;
628 gfc_ss *ss;
629 gfc_ss *next;
630
631 ss = loop->ss;
632 while (ss != gfc_ss_terminator)
633 {
634 gcc_assert (ss != NULL);
635 next = ss->loop_chain;
636 gfc_free_ss (ss);
637 ss = next;
638 }
639
640 /* Remove reference to self in the parent loop. */
641 if (loop->parent)
642 for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next)
643 if (*ploop == loop)
644 {
645 *ploop = loop->next;
646 break;
647 }
648
649 /* Free non-freed nested loops. */
650 for (loop = loop->nested; loop; loop = loop_next)
651 {
652 loop_next = loop->next;
653 gfc_cleanup_loop (loop);
654 free (loop);
655 }
656 }
657
658
659 static void
660 set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop)
661 {
662 int n;
663
664 for (; ss != gfc_ss_terminator; ss = ss->next)
665 {
666 ss->loop = loop;
667
668 if (ss->info->type == GFC_SS_SCALAR
669 || ss->info->type == GFC_SS_REFERENCE
670 || ss->info->type == GFC_SS_TEMP)
671 continue;
672
673 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
674 if (ss->info->data.array.subscript[n] != NULL)
675 set_ss_loop (ss->info->data.array.subscript[n], loop);
676 }
677 }
678
679
680 /* Associate a SS chain with a loop. */
681
682 void
683 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
684 {
685 gfc_ss *ss;
686 gfc_loopinfo *nested_loop;
687
688 if (head == gfc_ss_terminator)
689 return;
690
691 set_ss_loop (head, loop);
692
693 ss = head;
694 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
695 {
696 if (ss->nested_ss)
697 {
698 nested_loop = ss->nested_ss->loop;
699
700 /* More than one ss can belong to the same loop. Hence, we add the
701 loop to the chain only if it is different from the previously
702 added one, to avoid duplicate nested loops. */
703 if (nested_loop != loop->nested)
704 {
705 gcc_assert (nested_loop->parent == NULL);
706 nested_loop->parent = loop;
707
708 gcc_assert (nested_loop->next == NULL);
709 nested_loop->next = loop->nested;
710 loop->nested = nested_loop;
711 }
712 else
713 gcc_assert (nested_loop->parent == loop);
714 }
715
716 if (ss->next == gfc_ss_terminator)
717 ss->loop_chain = loop->ss;
718 else
719 ss->loop_chain = ss->next;
720 }
721 gcc_assert (ss == gfc_ss_terminator);
722 loop->ss = head;
723 }
724
725
726 /* Generate an initializer for a static pointer or allocatable array. */
727
728 void
729 gfc_trans_static_array_pointer (gfc_symbol * sym)
730 {
731 tree type;
732
733 gcc_assert (TREE_STATIC (sym->backend_decl));
734 /* Just zero the data member. */
735 type = TREE_TYPE (sym->backend_decl);
736 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
737 }
738
739
740 /* If the bounds of SE's loop have not yet been set, see if they can be
741 determined from array spec AS, which is the array spec of a called
742 function. MAPPING maps the callee's dummy arguments to the values
743 that the caller is passing. Add any initialization and finalization
744 code to SE. */
745
746 void
747 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
748 gfc_se * se, gfc_array_spec * as)
749 {
750 int n, dim, total_dim;
751 gfc_se tmpse;
752 gfc_ss *ss;
753 tree lower;
754 tree upper;
755 tree tmp;
756
757 total_dim = 0;
758
759 if (!as || as->type != AS_EXPLICIT)
760 return;
761
762 for (ss = se->ss; ss; ss = ss->parent)
763 {
764 total_dim += ss->loop->dimen;
765 for (n = 0; n < ss->loop->dimen; n++)
766 {
767 /* The bound is known, nothing to do. */
768 if (ss->loop->to[n] != NULL_TREE)
769 continue;
770
771 dim = ss->dim[n];
772 gcc_assert (dim < as->rank);
773 gcc_assert (ss->loop->dimen <= as->rank);
774
775 /* Evaluate the lower bound. */
776 gfc_init_se (&tmpse, NULL);
777 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
778 gfc_add_block_to_block (&se->pre, &tmpse.pre);
779 gfc_add_block_to_block (&se->post, &tmpse.post);
780 lower = fold_convert (gfc_array_index_type, tmpse.expr);
781
782 /* ...and the upper bound. */
783 gfc_init_se (&tmpse, NULL);
784 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
785 gfc_add_block_to_block (&se->pre, &tmpse.pre);
786 gfc_add_block_to_block (&se->post, &tmpse.post);
787 upper = fold_convert (gfc_array_index_type, tmpse.expr);
788
789 /* Set the upper bound of the loop to UPPER - LOWER. */
790 tmp = fold_build2_loc (input_location, MINUS_EXPR,
791 gfc_array_index_type, upper, lower);
792 tmp = gfc_evaluate_now (tmp, &se->pre);
793 ss->loop->to[n] = tmp;
794 }
795 }
796
797 gcc_assert (total_dim == as->rank);
798 }
799
800
801 /* Generate code to allocate an array temporary, or create a variable to
802 hold the data. If size is NULL, zero the descriptor so that the
803 callee will allocate the array. If DEALLOC is true, also generate code to
804 free the array afterwards.
805
806 If INITIAL is not NULL, it is packed using internal_pack and the result used
807 as data instead of allocating a fresh, unitialized area of memory.
808
809 Initialization code is added to PRE and finalization code to POST.
810 DYNAMIC is true if the caller may want to extend the array later
811 using realloc. This prevents us from putting the array on the stack. */
812
813 static void
814 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
815 gfc_array_info * info, tree size, tree nelem,
816 tree initial, bool dynamic, bool dealloc)
817 {
818 tree tmp;
819 tree desc;
820 bool onstack;
821
822 desc = info->descriptor;
823 info->offset = gfc_index_zero_node;
824 if (size == NULL_TREE || integer_zerop (size))
825 {
826 /* A callee allocated array. */
827 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
828 onstack = FALSE;
829 }
830 else
831 {
832 /* Allocate the temporary. */
833 onstack = !dynamic && initial == NULL_TREE
834 && (gfc_option.flag_stack_arrays
835 || gfc_can_put_var_on_stack (size));
836
837 if (onstack)
838 {
839 /* Make a temporary variable to hold the data. */
840 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
841 nelem, gfc_index_one_node);
842 tmp = gfc_evaluate_now (tmp, pre);
843 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
844 tmp);
845 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
846 tmp);
847 tmp = gfc_create_var (tmp, "A");
848 /* If we're here only because of -fstack-arrays we have to
849 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
850 if (!gfc_can_put_var_on_stack (size))
851 gfc_add_expr_to_block (pre,
852 fold_build1_loc (input_location,
853 DECL_EXPR, TREE_TYPE (tmp),
854 tmp));
855 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
856 gfc_conv_descriptor_data_set (pre, desc, tmp);
857 }
858 else
859 {
860 /* Allocate memory to hold the data or call internal_pack. */
861 if (initial == NULL_TREE)
862 {
863 tmp = gfc_call_malloc (pre, NULL, size);
864 tmp = gfc_evaluate_now (tmp, pre);
865 }
866 else
867 {
868 tree packed;
869 tree source_data;
870 tree was_packed;
871 stmtblock_t do_copying;
872
873 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
874 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
875 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
876 tmp = gfc_get_element_type (tmp);
877 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
878 packed = gfc_create_var (build_pointer_type (tmp), "data");
879
880 tmp = build_call_expr_loc (input_location,
881 gfor_fndecl_in_pack, 1, initial);
882 tmp = fold_convert (TREE_TYPE (packed), tmp);
883 gfc_add_modify (pre, packed, tmp);
884
885 tmp = build_fold_indirect_ref_loc (input_location,
886 initial);
887 source_data = gfc_conv_descriptor_data_get (tmp);
888
889 /* internal_pack may return source->data without any allocation
890 or copying if it is already packed. If that's the case, we
891 need to allocate and copy manually. */
892
893 gfc_start_block (&do_copying);
894 tmp = gfc_call_malloc (&do_copying, NULL, size);
895 tmp = fold_convert (TREE_TYPE (packed), tmp);
896 gfc_add_modify (&do_copying, packed, tmp);
897 tmp = gfc_build_memcpy_call (packed, source_data, size);
898 gfc_add_expr_to_block (&do_copying, tmp);
899
900 was_packed = fold_build2_loc (input_location, EQ_EXPR,
901 boolean_type_node, packed,
902 source_data);
903 tmp = gfc_finish_block (&do_copying);
904 tmp = build3_v (COND_EXPR, was_packed, tmp,
905 build_empty_stmt (input_location));
906 gfc_add_expr_to_block (pre, tmp);
907
908 tmp = fold_convert (pvoid_type_node, packed);
909 }
910
911 gfc_conv_descriptor_data_set (pre, desc, tmp);
912 }
913 }
914 info->data = gfc_conv_descriptor_data_get (desc);
915
916 /* The offset is zero because we create temporaries with a zero
917 lower bound. */
918 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
919
920 if (dealloc && !onstack)
921 {
922 /* Free the temporary. */
923 tmp = gfc_conv_descriptor_data_get (desc);
924 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
925 gfc_add_expr_to_block (post, tmp);
926 }
927 }
928
929
930 /* Get the scalarizer array dimension corresponding to actual array dimension
931 given by ARRAY_DIM.
932
933 For example, if SS represents the array ref a(1,:,:,1), it is a
934 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
935 and 1 for ARRAY_DIM=2.
936 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
937 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
938 ARRAY_DIM=3.
939 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
940 array. If called on the inner ss, the result would be respectively 0,1,2 for
941 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
942 for ARRAY_DIM=1,2. */
943
944 static int
945 get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim)
946 {
947 int array_ref_dim;
948 int n;
949
950 array_ref_dim = 0;
951
952 for (; ss; ss = ss->parent)
953 for (n = 0; n < ss->dimen; n++)
954 if (ss->dim[n] < array_dim)
955 array_ref_dim++;
956
957 return array_ref_dim;
958 }
959
960
961 static gfc_ss *
962 innermost_ss (gfc_ss *ss)
963 {
964 while (ss->nested_ss != NULL)
965 ss = ss->nested_ss;
966
967 return ss;
968 }
969
970
971
972 /* Get the array reference dimension corresponding to the given loop dimension.
973 It is different from the true array dimension given by the dim array in
974 the case of a partial array reference (i.e. a(:,:,1,:) for example)
975 It is different from the loop dimension in the case of a transposed array.
976 */
977
978 static int
979 get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim)
980 {
981 return get_scalarizer_dim_for_array_dim (innermost_ss (ss),
982 ss->dim[loop_dim]);
983 }
984
985
986 /* Generate code to create and initialize the descriptor for a temporary
987 array. This is used for both temporaries needed by the scalarizer, and
988 functions returning arrays. Adjusts the loop variables to be
989 zero-based, and calculates the loop bounds for callee allocated arrays.
990 Allocate the array unless it's callee allocated (we have a callee
991 allocated array if 'callee_alloc' is true, or if loop->to[n] is
992 NULL_TREE for any n). Also fills in the descriptor, data and offset
993 fields of info if known. Returns the size of the array, or NULL for a
994 callee allocated array.
995
996 'eltype' == NULL signals that the temporary should be a class object.
997 The 'initial' expression is used to obtain the size of the dynamic
998 type; otherwise the allocation and initialization proceeds as for any
999 other expression
1000
1001 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
1002 gfc_trans_allocate_array_storage. */
1003
1004 tree
1005 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss,
1006 tree eltype, tree initial, bool dynamic,
1007 bool dealloc, bool callee_alloc, locus * where)
1008 {
1009 gfc_loopinfo *loop;
1010 gfc_ss *s;
1011 gfc_array_info *info;
1012 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
1013 tree type;
1014 tree desc;
1015 tree tmp;
1016 tree size;
1017 tree nelem;
1018 tree cond;
1019 tree or_expr;
1020 tree class_expr = NULL_TREE;
1021 int n, dim, tmp_dim;
1022 int total_dim = 0;
1023
1024 /* This signals a class array for which we need the size of the
1025 dynamic type. Generate an eltype and then the class expression. */
1026 if (eltype == NULL_TREE && initial)
1027 {
1028 gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial)));
1029 class_expr = build_fold_indirect_ref_loc (input_location, initial);
1030 eltype = TREE_TYPE (class_expr);
1031 eltype = gfc_get_element_type (eltype);
1032 /* Obtain the structure (class) expression. */
1033 class_expr = TREE_OPERAND (class_expr, 0);
1034 gcc_assert (class_expr);
1035 }
1036
1037 memset (from, 0, sizeof (from));
1038 memset (to, 0, sizeof (to));
1039
1040 info = &ss->info->data.array;
1041
1042 gcc_assert (ss->dimen > 0);
1043 gcc_assert (ss->loop->dimen == ss->dimen);
1044
1045 if (gfc_option.warn_array_temp && where)
1046 gfc_warning ("Creating array temporary at %L", where);
1047
1048 /* Set the lower bound to zero. */
1049 for (s = ss; s; s = s->parent)
1050 {
1051 loop = s->loop;
1052
1053 total_dim += loop->dimen;
1054 for (n = 0; n < loop->dimen; n++)
1055 {
1056 dim = s->dim[n];
1057
1058 /* Callee allocated arrays may not have a known bound yet. */
1059 if (loop->to[n])
1060 loop->to[n] = gfc_evaluate_now (
1061 fold_build2_loc (input_location, MINUS_EXPR,
1062 gfc_array_index_type,
1063 loop->to[n], loop->from[n]),
1064 pre);
1065 loop->from[n] = gfc_index_zero_node;
1066
1067 /* We have just changed the loop bounds, we must clear the
1068 corresponding specloop, so that delta calculation is not skipped
1069 later in gfc_set_delta. */
1070 loop->specloop[n] = NULL;
1071
1072 /* We are constructing the temporary's descriptor based on the loop
1073 dimensions. As the dimensions may be accessed in arbitrary order
1074 (think of transpose) the size taken from the n'th loop may not map
1075 to the n'th dimension of the array. We need to reconstruct loop
1076 infos in the right order before using it to set the descriptor
1077 bounds. */
1078 tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim);
1079 from[tmp_dim] = loop->from[n];
1080 to[tmp_dim] = loop->to[n];
1081
1082 info->delta[dim] = gfc_index_zero_node;
1083 info->start[dim] = gfc_index_zero_node;
1084 info->end[dim] = gfc_index_zero_node;
1085 info->stride[dim] = gfc_index_one_node;
1086 }
1087 }
1088
1089 /* Initialize the descriptor. */
1090 type =
1091 gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1,
1092 GFC_ARRAY_UNKNOWN, true);
1093 desc = gfc_create_var (type, "atmp");
1094 GFC_DECL_PACKED_ARRAY (desc) = 1;
1095
1096 info->descriptor = desc;
1097 size = gfc_index_one_node;
1098
1099 /* Fill in the array dtype. */
1100 tmp = gfc_conv_descriptor_dtype (desc);
1101 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
1102
1103 /*
1104 Fill in the bounds and stride. This is a packed array, so:
1105
1106 size = 1;
1107 for (n = 0; n < rank; n++)
1108 {
1109 stride[n] = size
1110 delta = ubound[n] + 1 - lbound[n];
1111 size = size * delta;
1112 }
1113 size = size * sizeof(element);
1114 */
1115
1116 or_expr = NULL_TREE;
1117
1118 /* If there is at least one null loop->to[n], it is a callee allocated
1119 array. */
1120 for (n = 0; n < total_dim; n++)
1121 if (to[n] == NULL_TREE)
1122 {
1123 size = NULL_TREE;
1124 break;
1125 }
1126
1127 if (size == NULL_TREE)
1128 for (s = ss; s; s = s->parent)
1129 for (n = 0; n < s->loop->dimen; n++)
1130 {
1131 dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]);
1132
1133 /* For a callee allocated array express the loop bounds in terms
1134 of the descriptor fields. */
1135 tmp = fold_build2_loc (input_location,
1136 MINUS_EXPR, gfc_array_index_type,
1137 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
1138 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
1139 s->loop->to[n] = tmp;
1140 }
1141 else
1142 {
1143 for (n = 0; n < total_dim; n++)
1144 {
1145 /* Store the stride and bound components in the descriptor. */
1146 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
1147
1148 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
1149 gfc_index_zero_node);
1150
1151 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]);
1152
1153 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1154 gfc_array_index_type,
1155 to[n], gfc_index_one_node);
1156
1157 /* Check whether the size for this dimension is negative. */
1158 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1159 tmp, gfc_index_zero_node);
1160 cond = gfc_evaluate_now (cond, pre);
1161
1162 if (n == 0)
1163 or_expr = cond;
1164 else
1165 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
1166 boolean_type_node, or_expr, cond);
1167
1168 size = fold_build2_loc (input_location, MULT_EXPR,
1169 gfc_array_index_type, size, tmp);
1170 size = gfc_evaluate_now (size, pre);
1171 }
1172 }
1173
1174 /* Get the size of the array. */
1175 if (size && !callee_alloc)
1176 {
1177 tree elemsize;
1178 /* If or_expr is true, then the extent in at least one
1179 dimension is zero and the size is set to zero. */
1180 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
1181 or_expr, gfc_index_zero_node, size);
1182
1183 nelem = size;
1184 if (class_expr == NULL_TREE)
1185 elemsize = fold_convert (gfc_array_index_type,
1186 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
1187 else
1188 elemsize = gfc_vtable_size_get (class_expr);
1189
1190 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1191 size, elemsize);
1192 }
1193 else
1194 {
1195 nelem = size;
1196 size = NULL_TREE;
1197 }
1198
1199 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1200 dynamic, dealloc);
1201
1202 while (ss->parent)
1203 ss = ss->parent;
1204
1205 if (ss->dimen > ss->loop->temp_dim)
1206 ss->loop->temp_dim = ss->dimen;
1207
1208 return size;
1209 }
1210
1211
1212 /* Return the number of iterations in a loop that starts at START,
1213 ends at END, and has step STEP. */
1214
1215 static tree
1216 gfc_get_iteration_count (tree start, tree end, tree step)
1217 {
1218 tree tmp;
1219 tree type;
1220
1221 type = TREE_TYPE (step);
1222 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1223 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1224 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1225 build_int_cst (type, 1));
1226 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1227 build_int_cst (type, 0));
1228 return fold_convert (gfc_array_index_type, tmp);
1229 }
1230
1231
1232 /* Extend the data in array DESC by EXTRA elements. */
1233
1234 static void
1235 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1236 {
1237 tree arg0, arg1;
1238 tree tmp;
1239 tree size;
1240 tree ubound;
1241
1242 if (integer_zerop (extra))
1243 return;
1244
1245 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1246
1247 /* Add EXTRA to the upper bound. */
1248 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1249 ubound, extra);
1250 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1251
1252 /* Get the value of the current data pointer. */
1253 arg0 = gfc_conv_descriptor_data_get (desc);
1254
1255 /* Calculate the new array size. */
1256 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1257 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1258 ubound, gfc_index_one_node);
1259 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1260 fold_convert (size_type_node, tmp),
1261 fold_convert (size_type_node, size));
1262
1263 /* Call the realloc() function. */
1264 tmp = gfc_call_realloc (pblock, arg0, arg1);
1265 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1266 }
1267
1268
1269 /* Return true if the bounds of iterator I can only be determined
1270 at run time. */
1271
1272 static inline bool
1273 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1274 {
1275 return (i->start->expr_type != EXPR_CONSTANT
1276 || i->end->expr_type != EXPR_CONSTANT
1277 || i->step->expr_type != EXPR_CONSTANT);
1278 }
1279
1280
1281 /* Split the size of constructor element EXPR into the sum of two terms,
1282 one of which can be determined at compile time and one of which must
1283 be calculated at run time. Set *SIZE to the former and return true
1284 if the latter might be nonzero. */
1285
1286 static bool
1287 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1288 {
1289 if (expr->expr_type == EXPR_ARRAY)
1290 return gfc_get_array_constructor_size (size, expr->value.constructor);
1291 else if (expr->rank > 0)
1292 {
1293 /* Calculate everything at run time. */
1294 mpz_set_ui (*size, 0);
1295 return true;
1296 }
1297 else
1298 {
1299 /* A single element. */
1300 mpz_set_ui (*size, 1);
1301 return false;
1302 }
1303 }
1304
1305
1306 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1307 of array constructor C. */
1308
1309 static bool
1310 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1311 {
1312 gfc_constructor *c;
1313 gfc_iterator *i;
1314 mpz_t val;
1315 mpz_t len;
1316 bool dynamic;
1317
1318 mpz_set_ui (*size, 0);
1319 mpz_init (len);
1320 mpz_init (val);
1321
1322 dynamic = false;
1323 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1324 {
1325 i = c->iterator;
1326 if (i && gfc_iterator_has_dynamic_bounds (i))
1327 dynamic = true;
1328 else
1329 {
1330 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1331 if (i)
1332 {
1333 /* Multiply the static part of the element size by the
1334 number of iterations. */
1335 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1336 mpz_fdiv_q (val, val, i->step->value.integer);
1337 mpz_add_ui (val, val, 1);
1338 if (mpz_sgn (val) > 0)
1339 mpz_mul (len, len, val);
1340 else
1341 mpz_set_ui (len, 0);
1342 }
1343 mpz_add (*size, *size, len);
1344 }
1345 }
1346 mpz_clear (len);
1347 mpz_clear (val);
1348 return dynamic;
1349 }
1350
1351
1352 /* Make sure offset is a variable. */
1353
1354 static void
1355 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1356 tree * offsetvar)
1357 {
1358 /* We should have already created the offset variable. We cannot
1359 create it here because we may be in an inner scope. */
1360 gcc_assert (*offsetvar != NULL_TREE);
1361 gfc_add_modify (pblock, *offsetvar, *poffset);
1362 *poffset = *offsetvar;
1363 TREE_USED (*offsetvar) = 1;
1364 }
1365
1366
1367 /* Variables needed for bounds-checking. */
1368 static bool first_len;
1369 static tree first_len_val;
1370 static bool typespec_chararray_ctor;
1371
1372 static void
1373 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1374 tree offset, gfc_se * se, gfc_expr * expr)
1375 {
1376 tree tmp;
1377
1378 gfc_conv_expr (se, expr);
1379
1380 /* Store the value. */
1381 tmp = build_fold_indirect_ref_loc (input_location,
1382 gfc_conv_descriptor_data_get (desc));
1383 tmp = gfc_build_array_ref (tmp, offset, NULL);
1384
1385 if (expr->ts.type == BT_CHARACTER)
1386 {
1387 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1388 tree esize;
1389
1390 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1391 esize = fold_convert (gfc_charlen_type_node, esize);
1392 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1393 gfc_charlen_type_node, esize,
1394 build_int_cst (gfc_charlen_type_node,
1395 gfc_character_kinds[i].bit_size / 8));
1396
1397 gfc_conv_string_parameter (se);
1398 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1399 {
1400 /* The temporary is an array of pointers. */
1401 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1402 gfc_add_modify (&se->pre, tmp, se->expr);
1403 }
1404 else
1405 {
1406 /* The temporary is an array of string values. */
1407 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1408 /* We know the temporary and the value will be the same length,
1409 so can use memcpy. */
1410 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1411 se->string_length, se->expr, expr->ts.kind);
1412 }
1413 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1414 {
1415 if (first_len)
1416 {
1417 gfc_add_modify (&se->pre, first_len_val,
1418 se->string_length);
1419 first_len = false;
1420 }
1421 else
1422 {
1423 /* Verify that all constructor elements are of the same
1424 length. */
1425 tree cond = fold_build2_loc (input_location, NE_EXPR,
1426 boolean_type_node, first_len_val,
1427 se->string_length);
1428 gfc_trans_runtime_check
1429 (true, false, cond, &se->pre, &expr->where,
1430 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1431 fold_convert (long_integer_type_node, first_len_val),
1432 fold_convert (long_integer_type_node, se->string_length));
1433 }
1434 }
1435 }
1436 else
1437 {
1438 /* TODO: Should the frontend already have done this conversion? */
1439 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1440 gfc_add_modify (&se->pre, tmp, se->expr);
1441 }
1442
1443 gfc_add_block_to_block (pblock, &se->pre);
1444 gfc_add_block_to_block (pblock, &se->post);
1445 }
1446
1447
1448 /* Add the contents of an array to the constructor. DYNAMIC is as for
1449 gfc_trans_array_constructor_value. */
1450
1451 static void
1452 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1453 tree type ATTRIBUTE_UNUSED,
1454 tree desc, gfc_expr * expr,
1455 tree * poffset, tree * offsetvar,
1456 bool dynamic)
1457 {
1458 gfc_se se;
1459 gfc_ss *ss;
1460 gfc_loopinfo loop;
1461 stmtblock_t body;
1462 tree tmp;
1463 tree size;
1464 int n;
1465
1466 /* We need this to be a variable so we can increment it. */
1467 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1468
1469 gfc_init_se (&se, NULL);
1470
1471 /* Walk the array expression. */
1472 ss = gfc_walk_expr (expr);
1473 gcc_assert (ss != gfc_ss_terminator);
1474
1475 /* Initialize the scalarizer. */
1476 gfc_init_loopinfo (&loop);
1477 gfc_add_ss_to_loop (&loop, ss);
1478
1479 /* Initialize the loop. */
1480 gfc_conv_ss_startstride (&loop);
1481 gfc_conv_loop_setup (&loop, &expr->where);
1482
1483 /* Make sure the constructed array has room for the new data. */
1484 if (dynamic)
1485 {
1486 /* Set SIZE to the total number of elements in the subarray. */
1487 size = gfc_index_one_node;
1488 for (n = 0; n < loop.dimen; n++)
1489 {
1490 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1491 gfc_index_one_node);
1492 size = fold_build2_loc (input_location, MULT_EXPR,
1493 gfc_array_index_type, size, tmp);
1494 }
1495
1496 /* Grow the constructed array by SIZE elements. */
1497 gfc_grow_array (&loop.pre, desc, size);
1498 }
1499
1500 /* Make the loop body. */
1501 gfc_mark_ss_chain_used (ss, 1);
1502 gfc_start_scalarized_body (&loop, &body);
1503 gfc_copy_loopinfo_to_se (&se, &loop);
1504 se.ss = ss;
1505
1506 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1507 gcc_assert (se.ss == gfc_ss_terminator);
1508
1509 /* Increment the offset. */
1510 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1511 *poffset, gfc_index_one_node);
1512 gfc_add_modify (&body, *poffset, tmp);
1513
1514 /* Finish the loop. */
1515 gfc_trans_scalarizing_loops (&loop, &body);
1516 gfc_add_block_to_block (&loop.pre, &loop.post);
1517 tmp = gfc_finish_block (&loop.pre);
1518 gfc_add_expr_to_block (pblock, tmp);
1519
1520 gfc_cleanup_loop (&loop);
1521 }
1522
1523
1524 /* Assign the values to the elements of an array constructor. DYNAMIC
1525 is true if descriptor DESC only contains enough data for the static
1526 size calculated by gfc_get_array_constructor_size. When true, memory
1527 for the dynamic parts must be allocated using realloc. */
1528
1529 static void
1530 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1531 tree desc, gfc_constructor_base base,
1532 tree * poffset, tree * offsetvar,
1533 bool dynamic)
1534 {
1535 tree tmp;
1536 tree start = NULL_TREE;
1537 tree end = NULL_TREE;
1538 tree step = NULL_TREE;
1539 stmtblock_t body;
1540 gfc_se se;
1541 mpz_t size;
1542 gfc_constructor *c;
1543
1544 tree shadow_loopvar = NULL_TREE;
1545 gfc_saved_var saved_loopvar;
1546
1547 mpz_init (size);
1548 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1549 {
1550 /* If this is an iterator or an array, the offset must be a variable. */
1551 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1552 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1553
1554 /* Shadowing the iterator avoids changing its value and saves us from
1555 keeping track of it. Further, it makes sure that there's always a
1556 backend-decl for the symbol, even if there wasn't one before,
1557 e.g. in the case of an iterator that appears in a specification
1558 expression in an interface mapping. */
1559 if (c->iterator)
1560 {
1561 gfc_symbol *sym;
1562 tree type;
1563
1564 /* Evaluate loop bounds before substituting the loop variable
1565 in case they depend on it. Such a case is invalid, but it is
1566 not more expensive to do the right thing here.
1567 See PR 44354. */
1568 gfc_init_se (&se, NULL);
1569 gfc_conv_expr_val (&se, c->iterator->start);
1570 gfc_add_block_to_block (pblock, &se.pre);
1571 start = gfc_evaluate_now (se.expr, pblock);
1572
1573 gfc_init_se (&se, NULL);
1574 gfc_conv_expr_val (&se, c->iterator->end);
1575 gfc_add_block_to_block (pblock, &se.pre);
1576 end = gfc_evaluate_now (se.expr, pblock);
1577
1578 gfc_init_se (&se, NULL);
1579 gfc_conv_expr_val (&se, c->iterator->step);
1580 gfc_add_block_to_block (pblock, &se.pre);
1581 step = gfc_evaluate_now (se.expr, pblock);
1582
1583 sym = c->iterator->var->symtree->n.sym;
1584 type = gfc_typenode_for_spec (&sym->ts);
1585
1586 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1587 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1588 }
1589
1590 gfc_start_block (&body);
1591
1592 if (c->expr->expr_type == EXPR_ARRAY)
1593 {
1594 /* Array constructors can be nested. */
1595 gfc_trans_array_constructor_value (&body, type, desc,
1596 c->expr->value.constructor,
1597 poffset, offsetvar, dynamic);
1598 }
1599 else if (c->expr->rank > 0)
1600 {
1601 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1602 poffset, offsetvar, dynamic);
1603 }
1604 else
1605 {
1606 /* This code really upsets the gimplifier so don't bother for now. */
1607 gfc_constructor *p;
1608 HOST_WIDE_INT n;
1609 HOST_WIDE_INT size;
1610
1611 p = c;
1612 n = 0;
1613 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1614 {
1615 p = gfc_constructor_next (p);
1616 n++;
1617 }
1618 if (n < 4)
1619 {
1620 /* Scalar values. */
1621 gfc_init_se (&se, NULL);
1622 gfc_trans_array_ctor_element (&body, desc, *poffset,
1623 &se, c->expr);
1624
1625 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1626 gfc_array_index_type,
1627 *poffset, gfc_index_one_node);
1628 }
1629 else
1630 {
1631 /* Collect multiple scalar constants into a constructor. */
1632 vec<constructor_elt, va_gc> *v = NULL;
1633 tree init;
1634 tree bound;
1635 tree tmptype;
1636 HOST_WIDE_INT idx = 0;
1637
1638 p = c;
1639 /* Count the number of consecutive scalar constants. */
1640 while (p && !(p->iterator
1641 || p->expr->expr_type != EXPR_CONSTANT))
1642 {
1643 gfc_init_se (&se, NULL);
1644 gfc_conv_constant (&se, p->expr);
1645
1646 if (c->expr->ts.type != BT_CHARACTER)
1647 se.expr = fold_convert (type, se.expr);
1648 /* For constant character array constructors we build
1649 an array of pointers. */
1650 else if (POINTER_TYPE_P (type))
1651 se.expr = gfc_build_addr_expr
1652 (gfc_get_pchar_type (p->expr->ts.kind),
1653 se.expr);
1654
1655 CONSTRUCTOR_APPEND_ELT (v,
1656 build_int_cst (gfc_array_index_type,
1657 idx++),
1658 se.expr);
1659 c = p;
1660 p = gfc_constructor_next (p);
1661 }
1662
1663 bound = size_int (n - 1);
1664 /* Create an array type to hold them. */
1665 tmptype = build_range_type (gfc_array_index_type,
1666 gfc_index_zero_node, bound);
1667 tmptype = build_array_type (type, tmptype);
1668
1669 init = build_constructor (tmptype, v);
1670 TREE_CONSTANT (init) = 1;
1671 TREE_STATIC (init) = 1;
1672 /* Create a static variable to hold the data. */
1673 tmp = gfc_create_var (tmptype, "data");
1674 TREE_STATIC (tmp) = 1;
1675 TREE_CONSTANT (tmp) = 1;
1676 TREE_READONLY (tmp) = 1;
1677 DECL_INITIAL (tmp) = init;
1678 init = tmp;
1679
1680 /* Use BUILTIN_MEMCPY to assign the values. */
1681 tmp = gfc_conv_descriptor_data_get (desc);
1682 tmp = build_fold_indirect_ref_loc (input_location,
1683 tmp);
1684 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1685 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1686 init = gfc_build_addr_expr (NULL_TREE, init);
1687
1688 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1689 bound = build_int_cst (size_type_node, n * size);
1690 tmp = build_call_expr_loc (input_location,
1691 builtin_decl_explicit (BUILT_IN_MEMCPY),
1692 3, tmp, init, bound);
1693 gfc_add_expr_to_block (&body, tmp);
1694
1695 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1696 gfc_array_index_type, *poffset,
1697 build_int_cst (gfc_array_index_type, n));
1698 }
1699 if (!INTEGER_CST_P (*poffset))
1700 {
1701 gfc_add_modify (&body, *offsetvar, *poffset);
1702 *poffset = *offsetvar;
1703 }
1704 }
1705
1706 /* The frontend should already have done any expansions
1707 at compile-time. */
1708 if (!c->iterator)
1709 {
1710 /* Pass the code as is. */
1711 tmp = gfc_finish_block (&body);
1712 gfc_add_expr_to_block (pblock, tmp);
1713 }
1714 else
1715 {
1716 /* Build the implied do-loop. */
1717 stmtblock_t implied_do_block;
1718 tree cond;
1719 tree exit_label;
1720 tree loopbody;
1721 tree tmp2;
1722
1723 loopbody = gfc_finish_block (&body);
1724
1725 /* Create a new block that holds the implied-do loop. A temporary
1726 loop-variable is used. */
1727 gfc_start_block(&implied_do_block);
1728
1729 /* Initialize the loop. */
1730 gfc_add_modify (&implied_do_block, shadow_loopvar, start);
1731
1732 /* If this array expands dynamically, and the number of iterations
1733 is not constant, we won't have allocated space for the static
1734 part of C->EXPR's size. Do that now. */
1735 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1736 {
1737 /* Get the number of iterations. */
1738 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1739
1740 /* Get the static part of C->EXPR's size. */
1741 gfc_get_array_constructor_element_size (&size, c->expr);
1742 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1743
1744 /* Grow the array by TMP * TMP2 elements. */
1745 tmp = fold_build2_loc (input_location, MULT_EXPR,
1746 gfc_array_index_type, tmp, tmp2);
1747 gfc_grow_array (&implied_do_block, desc, tmp);
1748 }
1749
1750 /* Generate the loop body. */
1751 exit_label = gfc_build_label_decl (NULL_TREE);
1752 gfc_start_block (&body);
1753
1754 /* Generate the exit condition. Depending on the sign of
1755 the step variable we have to generate the correct
1756 comparison. */
1757 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1758 step, build_int_cst (TREE_TYPE (step), 0));
1759 cond = fold_build3_loc (input_location, COND_EXPR,
1760 boolean_type_node, tmp,
1761 fold_build2_loc (input_location, GT_EXPR,
1762 boolean_type_node, shadow_loopvar, end),
1763 fold_build2_loc (input_location, LT_EXPR,
1764 boolean_type_node, shadow_loopvar, end));
1765 tmp = build1_v (GOTO_EXPR, exit_label);
1766 TREE_USED (exit_label) = 1;
1767 tmp = build3_v (COND_EXPR, cond, tmp,
1768 build_empty_stmt (input_location));
1769 gfc_add_expr_to_block (&body, tmp);
1770
1771 /* The main loop body. */
1772 gfc_add_expr_to_block (&body, loopbody);
1773
1774 /* Increase loop variable by step. */
1775 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1776 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1777 step);
1778 gfc_add_modify (&body, shadow_loopvar, tmp);
1779
1780 /* Finish the loop. */
1781 tmp = gfc_finish_block (&body);
1782 tmp = build1_v (LOOP_EXPR, tmp);
1783 gfc_add_expr_to_block (&implied_do_block, tmp);
1784
1785 /* Add the exit label. */
1786 tmp = build1_v (LABEL_EXPR, exit_label);
1787 gfc_add_expr_to_block (&implied_do_block, tmp);
1788
1789 /* Finish the implied-do loop. */
1790 tmp = gfc_finish_block(&implied_do_block);
1791 gfc_add_expr_to_block(pblock, tmp);
1792
1793 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1794 }
1795 }
1796 mpz_clear (size);
1797 }
1798
1799
1800 /* A catch-all to obtain the string length for anything that is not
1801 a substring of non-constant length, a constant, array or variable. */
1802
1803 static void
1804 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1805 {
1806 gfc_se se;
1807
1808 /* Don't bother if we already know the length is a constant. */
1809 if (*len && INTEGER_CST_P (*len))
1810 return;
1811
1812 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1813 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1814 {
1815 /* This is easy. */
1816 gfc_conv_const_charlen (e->ts.u.cl);
1817 *len = e->ts.u.cl->backend_decl;
1818 }
1819 else
1820 {
1821 /* Otherwise, be brutal even if inefficient. */
1822 gfc_init_se (&se, NULL);
1823
1824 /* No function call, in case of side effects. */
1825 se.no_function_call = 1;
1826 if (e->rank == 0)
1827 gfc_conv_expr (&se, e);
1828 else
1829 gfc_conv_expr_descriptor (&se, e);
1830
1831 /* Fix the value. */
1832 *len = gfc_evaluate_now (se.string_length, &se.pre);
1833
1834 gfc_add_block_to_block (block, &se.pre);
1835 gfc_add_block_to_block (block, &se.post);
1836
1837 e->ts.u.cl->backend_decl = *len;
1838 }
1839 }
1840
1841
1842 /* Figure out the string length of a variable reference expression.
1843 Used by get_array_ctor_strlen. */
1844
1845 static void
1846 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1847 {
1848 gfc_ref *ref;
1849 gfc_typespec *ts;
1850 mpz_t char_len;
1851
1852 /* Don't bother if we already know the length is a constant. */
1853 if (*len && INTEGER_CST_P (*len))
1854 return;
1855
1856 ts = &expr->symtree->n.sym->ts;
1857 for (ref = expr->ref; ref; ref = ref->next)
1858 {
1859 switch (ref->type)
1860 {
1861 case REF_ARRAY:
1862 /* Array references don't change the string length. */
1863 break;
1864
1865 case REF_COMPONENT:
1866 /* Use the length of the component. */
1867 ts = &ref->u.c.component->ts;
1868 break;
1869
1870 case REF_SUBSTRING:
1871 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1872 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1873 {
1874 /* Note that this might evaluate expr. */
1875 get_array_ctor_all_strlen (block, expr, len);
1876 return;
1877 }
1878 mpz_init_set_ui (char_len, 1);
1879 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1880 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1881 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1882 *len = convert (gfc_charlen_type_node, *len);
1883 mpz_clear (char_len);
1884 return;
1885
1886 default:
1887 gcc_unreachable ();
1888 }
1889 }
1890
1891 *len = ts->u.cl->backend_decl;
1892 }
1893
1894
1895 /* Figure out the string length of a character array constructor.
1896 If len is NULL, don't calculate the length; this happens for recursive calls
1897 when a sub-array-constructor is an element but not at the first position,
1898 so when we're not interested in the length.
1899 Returns TRUE if all elements are character constants. */
1900
1901 bool
1902 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1903 {
1904 gfc_constructor *c;
1905 bool is_const;
1906
1907 is_const = TRUE;
1908
1909 if (gfc_constructor_first (base) == NULL)
1910 {
1911 if (len)
1912 *len = build_int_cstu (gfc_charlen_type_node, 0);
1913 return is_const;
1914 }
1915
1916 /* Loop over all constructor elements to find out is_const, but in len we
1917 want to store the length of the first, not the last, element. We can
1918 of course exit the loop as soon as is_const is found to be false. */
1919 for (c = gfc_constructor_first (base);
1920 c && is_const; c = gfc_constructor_next (c))
1921 {
1922 switch (c->expr->expr_type)
1923 {
1924 case EXPR_CONSTANT:
1925 if (len && !(*len && INTEGER_CST_P (*len)))
1926 *len = build_int_cstu (gfc_charlen_type_node,
1927 c->expr->value.character.length);
1928 break;
1929
1930 case EXPR_ARRAY:
1931 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1932 is_const = false;
1933 break;
1934
1935 case EXPR_VARIABLE:
1936 is_const = false;
1937 if (len)
1938 get_array_ctor_var_strlen (block, c->expr, len);
1939 break;
1940
1941 default:
1942 is_const = false;
1943 if (len)
1944 get_array_ctor_all_strlen (block, c->expr, len);
1945 break;
1946 }
1947
1948 /* After the first iteration, we don't want the length modified. */
1949 len = NULL;
1950 }
1951
1952 return is_const;
1953 }
1954
1955 /* Check whether the array constructor C consists entirely of constant
1956 elements, and if so returns the number of those elements, otherwise
1957 return zero. Note, an empty or NULL array constructor returns zero. */
1958
1959 unsigned HOST_WIDE_INT
1960 gfc_constant_array_constructor_p (gfc_constructor_base base)
1961 {
1962 unsigned HOST_WIDE_INT nelem = 0;
1963
1964 gfc_constructor *c = gfc_constructor_first (base);
1965 while (c)
1966 {
1967 if (c->iterator
1968 || c->expr->rank > 0
1969 || c->expr->expr_type != EXPR_CONSTANT)
1970 return 0;
1971 c = gfc_constructor_next (c);
1972 nelem++;
1973 }
1974 return nelem;
1975 }
1976
1977
1978 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1979 and the tree type of it's elements, TYPE, return a static constant
1980 variable that is compile-time initialized. */
1981
1982 tree
1983 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1984 {
1985 tree tmptype, init, tmp;
1986 HOST_WIDE_INT nelem;
1987 gfc_constructor *c;
1988 gfc_array_spec as;
1989 gfc_se se;
1990 int i;
1991 vec<constructor_elt, va_gc> *v = NULL;
1992
1993 /* First traverse the constructor list, converting the constants
1994 to tree to build an initializer. */
1995 nelem = 0;
1996 c = gfc_constructor_first (expr->value.constructor);
1997 while (c)
1998 {
1999 gfc_init_se (&se, NULL);
2000 gfc_conv_constant (&se, c->expr);
2001 if (c->expr->ts.type != BT_CHARACTER)
2002 se.expr = fold_convert (type, se.expr);
2003 else if (POINTER_TYPE_P (type))
2004 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
2005 se.expr);
2006 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
2007 se.expr);
2008 c = gfc_constructor_next (c);
2009 nelem++;
2010 }
2011
2012 /* Next determine the tree type for the array. We use the gfortran
2013 front-end's gfc_get_nodesc_array_type in order to create a suitable
2014 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
2015
2016 memset (&as, 0, sizeof (gfc_array_spec));
2017
2018 as.rank = expr->rank;
2019 as.type = AS_EXPLICIT;
2020 if (!expr->shape)
2021 {
2022 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2023 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
2024 NULL, nelem - 1);
2025 }
2026 else
2027 for (i = 0; i < expr->rank; i++)
2028 {
2029 int tmp = (int) mpz_get_si (expr->shape[i]);
2030 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2031 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
2032 NULL, tmp - 1);
2033 }
2034
2035 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
2036
2037 /* as is not needed anymore. */
2038 for (i = 0; i < as.rank + as.corank; i++)
2039 {
2040 gfc_free_expr (as.lower[i]);
2041 gfc_free_expr (as.upper[i]);
2042 }
2043
2044 init = build_constructor (tmptype, v);
2045
2046 TREE_CONSTANT (init) = 1;
2047 TREE_STATIC (init) = 1;
2048
2049 tmp = gfc_create_var (tmptype, "A");
2050 TREE_STATIC (tmp) = 1;
2051 TREE_CONSTANT (tmp) = 1;
2052 TREE_READONLY (tmp) = 1;
2053 DECL_INITIAL (tmp) = init;
2054
2055 return tmp;
2056 }
2057
2058
2059 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
2060 This mostly initializes the scalarizer state info structure with the
2061 appropriate values to directly use the array created by the function
2062 gfc_build_constant_array_constructor. */
2063
2064 static void
2065 trans_constant_array_constructor (gfc_ss * ss, tree type)
2066 {
2067 gfc_array_info *info;
2068 tree tmp;
2069 int i;
2070
2071 tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
2072
2073 info = &ss->info->data.array;
2074
2075 info->descriptor = tmp;
2076 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
2077 info->offset = gfc_index_zero_node;
2078
2079 for (i = 0; i < ss->dimen; i++)
2080 {
2081 info->delta[i] = gfc_index_zero_node;
2082 info->start[i] = gfc_index_zero_node;
2083 info->end[i] = gfc_index_zero_node;
2084 info->stride[i] = gfc_index_one_node;
2085 }
2086 }
2087
2088
2089 static int
2090 get_rank (gfc_loopinfo *loop)
2091 {
2092 int rank;
2093
2094 rank = 0;
2095 for (; loop; loop = loop->parent)
2096 rank += loop->dimen;
2097
2098 return rank;
2099 }
2100
2101
2102 /* Helper routine of gfc_trans_array_constructor to determine if the
2103 bounds of the loop specified by LOOP are constant and simple enough
2104 to use with trans_constant_array_constructor. Returns the
2105 iteration count of the loop if suitable, and NULL_TREE otherwise. */
2106
2107 static tree
2108 constant_array_constructor_loop_size (gfc_loopinfo * l)
2109 {
2110 gfc_loopinfo *loop;
2111 tree size = gfc_index_one_node;
2112 tree tmp;
2113 int i, total_dim;
2114
2115 total_dim = get_rank (l);
2116
2117 for (loop = l; loop; loop = loop->parent)
2118 {
2119 for (i = 0; i < loop->dimen; i++)
2120 {
2121 /* If the bounds aren't constant, return NULL_TREE. */
2122 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
2123 return NULL_TREE;
2124 if (!integer_zerop (loop->from[i]))
2125 {
2126 /* Only allow nonzero "from" in one-dimensional arrays. */
2127 if (total_dim != 1)
2128 return NULL_TREE;
2129 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2130 gfc_array_index_type,
2131 loop->to[i], loop->from[i]);
2132 }
2133 else
2134 tmp = loop->to[i];
2135 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2136 gfc_array_index_type, tmp, gfc_index_one_node);
2137 size = fold_build2_loc (input_location, MULT_EXPR,
2138 gfc_array_index_type, size, tmp);
2139 }
2140 }
2141
2142 return size;
2143 }
2144
2145
2146 static tree *
2147 get_loop_upper_bound_for_array (gfc_ss *array, int array_dim)
2148 {
2149 gfc_ss *ss;
2150 int n;
2151
2152 gcc_assert (array->nested_ss == NULL);
2153
2154 for (ss = array; ss; ss = ss->parent)
2155 for (n = 0; n < ss->loop->dimen; n++)
2156 if (array_dim == get_array_ref_dim_for_loop_dim (ss, n))
2157 return &(ss->loop->to[n]);
2158
2159 gcc_unreachable ();
2160 }
2161
2162
2163 static gfc_loopinfo *
2164 outermost_loop (gfc_loopinfo * loop)
2165 {
2166 while (loop->parent != NULL)
2167 loop = loop->parent;
2168
2169 return loop;
2170 }
2171
2172
2173 /* Array constructors are handled by constructing a temporary, then using that
2174 within the scalarization loop. This is not optimal, but seems by far the
2175 simplest method. */
2176
2177 static void
2178 trans_array_constructor (gfc_ss * ss, locus * where)
2179 {
2180 gfc_constructor_base c;
2181 tree offset;
2182 tree offsetvar;
2183 tree desc;
2184 tree type;
2185 tree tmp;
2186 tree *loop_ubound0;
2187 bool dynamic;
2188 bool old_first_len, old_typespec_chararray_ctor;
2189 tree old_first_len_val;
2190 gfc_loopinfo *loop, *outer_loop;
2191 gfc_ss_info *ss_info;
2192 gfc_expr *expr;
2193 gfc_ss *s;
2194
2195 /* Save the old values for nested checking. */
2196 old_first_len = first_len;
2197 old_first_len_val = first_len_val;
2198 old_typespec_chararray_ctor = typespec_chararray_ctor;
2199
2200 loop = ss->loop;
2201 outer_loop = outermost_loop (loop);
2202 ss_info = ss->info;
2203 expr = ss_info->expr;
2204
2205 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
2206 typespec was given for the array constructor. */
2207 typespec_chararray_ctor = (expr->ts.u.cl
2208 && expr->ts.u.cl->length_from_typespec);
2209
2210 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2211 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
2212 {
2213 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
2214 first_len = true;
2215 }
2216
2217 gcc_assert (ss->dimen == ss->loop->dimen);
2218
2219 c = expr->value.constructor;
2220 if (expr->ts.type == BT_CHARACTER)
2221 {
2222 bool const_string;
2223
2224 /* get_array_ctor_strlen walks the elements of the constructor, if a
2225 typespec was given, we already know the string length and want the one
2226 specified there. */
2227 if (typespec_chararray_ctor && expr->ts.u.cl->length
2228 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
2229 {
2230 gfc_se length_se;
2231
2232 const_string = false;
2233 gfc_init_se (&length_se, NULL);
2234 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
2235 gfc_charlen_type_node);
2236 ss_info->string_length = length_se.expr;
2237 gfc_add_block_to_block (&outer_loop->pre, &length_se.pre);
2238 gfc_add_block_to_block (&outer_loop->post, &length_se.post);
2239 }
2240 else
2241 const_string = get_array_ctor_strlen (&outer_loop->pre, c,
2242 &ss_info->string_length);
2243
2244 /* Complex character array constructors should have been taken care of
2245 and not end up here. */
2246 gcc_assert (ss_info->string_length);
2247
2248 expr->ts.u.cl->backend_decl = ss_info->string_length;
2249
2250 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
2251 if (const_string)
2252 type = build_pointer_type (type);
2253 }
2254 else
2255 type = gfc_typenode_for_spec (&expr->ts);
2256
2257 /* See if the constructor determines the loop bounds. */
2258 dynamic = false;
2259
2260 loop_ubound0 = get_loop_upper_bound_for_array (ss, 0);
2261
2262 if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE)
2263 {
2264 /* We have a multidimensional parameter. */
2265 for (s = ss; s; s = s->parent)
2266 {
2267 int n;
2268 for (n = 0; n < s->loop->dimen; n++)
2269 {
2270 s->loop->from[n] = gfc_index_zero_node;
2271 s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]],
2272 gfc_index_integer_kind);
2273 s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2274 gfc_array_index_type,
2275 s->loop->to[n],
2276 gfc_index_one_node);
2277 }
2278 }
2279 }
2280
2281 if (*loop_ubound0 == NULL_TREE)
2282 {
2283 mpz_t size;
2284
2285 /* We should have a 1-dimensional, zero-based loop. */
2286 gcc_assert (loop->parent == NULL && loop->nested == NULL);
2287 gcc_assert (loop->dimen == 1);
2288 gcc_assert (integer_zerop (loop->from[0]));
2289
2290 /* Split the constructor size into a static part and a dynamic part.
2291 Allocate the static size up-front and record whether the dynamic
2292 size might be nonzero. */
2293 mpz_init (size);
2294 dynamic = gfc_get_array_constructor_size (&size, c);
2295 mpz_sub_ui (size, size, 1);
2296 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2297 mpz_clear (size);
2298 }
2299
2300 /* Special case constant array constructors. */
2301 if (!dynamic)
2302 {
2303 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2304 if (nelem > 0)
2305 {
2306 tree size = constant_array_constructor_loop_size (loop);
2307 if (size && compare_tree_int (size, nelem) == 0)
2308 {
2309 trans_constant_array_constructor (ss, type);
2310 goto finish;
2311 }
2312 }
2313 }
2314
2315 gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type,
2316 NULL_TREE, dynamic, true, false, where);
2317
2318 desc = ss_info->data.array.descriptor;
2319 offset = gfc_index_zero_node;
2320 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2321 TREE_NO_WARNING (offsetvar) = 1;
2322 TREE_USED (offsetvar) = 0;
2323 gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c,
2324 &offset, &offsetvar, dynamic);
2325
2326 /* If the array grows dynamically, the upper bound of the loop variable
2327 is determined by the array's final upper bound. */
2328 if (dynamic)
2329 {
2330 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2331 gfc_array_index_type,
2332 offsetvar, gfc_index_one_node);
2333 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2334 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2335 if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL)
2336 gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp);
2337 else
2338 *loop_ubound0 = tmp;
2339 }
2340
2341 if (TREE_USED (offsetvar))
2342 pushdecl (offsetvar);
2343 else
2344 gcc_assert (INTEGER_CST_P (offset));
2345
2346 #if 0
2347 /* Disable bound checking for now because it's probably broken. */
2348 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2349 {
2350 gcc_unreachable ();
2351 }
2352 #endif
2353
2354 finish:
2355 /* Restore old values of globals. */
2356 first_len = old_first_len;
2357 first_len_val = old_first_len_val;
2358 typespec_chararray_ctor = old_typespec_chararray_ctor;
2359 }
2360
2361
2362 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2363 called after evaluating all of INFO's vector dimensions. Go through
2364 each such vector dimension and see if we can now fill in any missing
2365 loop bounds. */
2366
2367 static void
2368 set_vector_loop_bounds (gfc_ss * ss)
2369 {
2370 gfc_loopinfo *loop, *outer_loop;
2371 gfc_array_info *info;
2372 gfc_se se;
2373 tree tmp;
2374 tree desc;
2375 tree zero;
2376 int n;
2377 int dim;
2378
2379 outer_loop = outermost_loop (ss->loop);
2380
2381 info = &ss->info->data.array;
2382
2383 for (; ss; ss = ss->parent)
2384 {
2385 loop = ss->loop;
2386
2387 for (n = 0; n < loop->dimen; n++)
2388 {
2389 dim = ss->dim[n];
2390 if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR
2391 || loop->to[n] != NULL)
2392 continue;
2393
2394 /* Loop variable N indexes vector dimension DIM, and we don't
2395 yet know the upper bound of loop variable N. Set it to the
2396 difference between the vector's upper and lower bounds. */
2397 gcc_assert (loop->from[n] == gfc_index_zero_node);
2398 gcc_assert (info->subscript[dim]
2399 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2400
2401 gfc_init_se (&se, NULL);
2402 desc = info->subscript[dim]->info->data.array.descriptor;
2403 zero = gfc_rank_cst[0];
2404 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2405 gfc_array_index_type,
2406 gfc_conv_descriptor_ubound_get (desc, zero),
2407 gfc_conv_descriptor_lbound_get (desc, zero));
2408 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2409 loop->to[n] = tmp;
2410 }
2411 }
2412 }
2413
2414
2415 /* Add the pre and post chains for all the scalar expressions in a SS chain
2416 to loop. This is called after the loop parameters have been calculated,
2417 but before the actual scalarizing loops. */
2418
2419 static void
2420 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2421 locus * where)
2422 {
2423 gfc_loopinfo *nested_loop, *outer_loop;
2424 gfc_se se;
2425 gfc_ss_info *ss_info;
2426 gfc_array_info *info;
2427 gfc_expr *expr;
2428 int n;
2429
2430 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
2431 arguments could get evaluated multiple times. */
2432 if (ss->is_alloc_lhs)
2433 return;
2434
2435 outer_loop = outermost_loop (loop);
2436
2437 /* TODO: This can generate bad code if there are ordering dependencies,
2438 e.g., a callee allocated function and an unknown size constructor. */
2439 gcc_assert (ss != NULL);
2440
2441 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2442 {
2443 gcc_assert (ss);
2444
2445 /* Cross loop arrays are handled from within the most nested loop. */
2446 if (ss->nested_ss != NULL)
2447 continue;
2448
2449 ss_info = ss->info;
2450 expr = ss_info->expr;
2451 info = &ss_info->data.array;
2452
2453 switch (ss_info->type)
2454 {
2455 case GFC_SS_SCALAR:
2456 /* Scalar expression. Evaluate this now. This includes elemental
2457 dimension indices, but not array section bounds. */
2458 gfc_init_se (&se, NULL);
2459 gfc_conv_expr (&se, expr);
2460 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2461
2462 if (expr->ts.type != BT_CHARACTER)
2463 {
2464 /* Move the evaluation of scalar expressions outside the
2465 scalarization loop, except for WHERE assignments. */
2466 if (subscript)
2467 se.expr = convert(gfc_array_index_type, se.expr);
2468 if (!ss_info->where)
2469 se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre);
2470 gfc_add_block_to_block (&outer_loop->pre, &se.post);
2471 }
2472 else
2473 gfc_add_block_to_block (&outer_loop->post, &se.post);
2474
2475 ss_info->data.scalar.value = se.expr;
2476 ss_info->string_length = se.string_length;
2477 break;
2478
2479 case GFC_SS_REFERENCE:
2480 /* Scalar argument to elemental procedure. */
2481 gfc_init_se (&se, NULL);
2482 if (ss_info->can_be_null_ref)
2483 {
2484 /* If the actual argument can be absent (in other words, it can
2485 be a NULL reference), don't try to evaluate it; pass instead
2486 the reference directly. */
2487 gfc_conv_expr_reference (&se, expr);
2488 }
2489 else
2490 {
2491 /* Otherwise, evaluate the argument outside the loop and pass
2492 a reference to the value. */
2493 gfc_conv_expr (&se, expr);
2494 }
2495
2496 /* Ensure that a pointer to the string is stored. */
2497 if (expr->ts.type == BT_CHARACTER)
2498 gfc_conv_string_parameter (&se);
2499
2500 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2501 gfc_add_block_to_block (&outer_loop->post, &se.post);
2502 if (gfc_is_class_scalar_expr (expr))
2503 /* This is necessary because the dynamic type will always be
2504 large than the declared type. In consequence, assigning
2505 the value to a temporary could segfault.
2506 OOP-TODO: see if this is generally correct or is the value
2507 has to be written to an allocated temporary, whose address
2508 is passed via ss_info. */
2509 ss_info->data.scalar.value = se.expr;
2510 else
2511 ss_info->data.scalar.value = gfc_evaluate_now (se.expr,
2512 &outer_loop->pre);
2513
2514 ss_info->string_length = se.string_length;
2515 break;
2516
2517 case GFC_SS_SECTION:
2518 /* Add the expressions for scalar and vector subscripts. */
2519 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2520 if (info->subscript[n])
2521 gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
2522
2523 set_vector_loop_bounds (ss);
2524 break;
2525
2526 case GFC_SS_VECTOR:
2527 /* Get the vector's descriptor and store it in SS. */
2528 gfc_init_se (&se, NULL);
2529 gfc_conv_expr_descriptor (&se, expr);
2530 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2531 gfc_add_block_to_block (&outer_loop->post, &se.post);
2532 info->descriptor = se.expr;
2533 break;
2534
2535 case GFC_SS_INTRINSIC:
2536 gfc_add_intrinsic_ss_code (loop, ss);
2537 break;
2538
2539 case GFC_SS_FUNCTION:
2540 /* Array function return value. We call the function and save its
2541 result in a temporary for use inside the loop. */
2542 gfc_init_se (&se, NULL);
2543 se.loop = loop;
2544 se.ss = ss;
2545 gfc_conv_expr (&se, expr);
2546 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2547 gfc_add_block_to_block (&outer_loop->post, &se.post);
2548 ss_info->string_length = se.string_length;
2549 break;
2550
2551 case GFC_SS_CONSTRUCTOR:
2552 if (expr->ts.type == BT_CHARACTER
2553 && ss_info->string_length == NULL
2554 && expr->ts.u.cl
2555 && expr->ts.u.cl->length)
2556 {
2557 gfc_init_se (&se, NULL);
2558 gfc_conv_expr_type (&se, expr->ts.u.cl->length,
2559 gfc_charlen_type_node);
2560 ss_info->string_length = se.expr;
2561 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2562 gfc_add_block_to_block (&outer_loop->post, &se.post);
2563 }
2564 trans_array_constructor (ss, where);
2565 break;
2566
2567 case GFC_SS_TEMP:
2568 case GFC_SS_COMPONENT:
2569 /* Do nothing. These are handled elsewhere. */
2570 break;
2571
2572 default:
2573 gcc_unreachable ();
2574 }
2575 }
2576
2577 if (!subscript)
2578 for (nested_loop = loop->nested; nested_loop;
2579 nested_loop = nested_loop->next)
2580 gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where);
2581 }
2582
2583
2584 /* Translate expressions for the descriptor and data pointer of a SS. */
2585 /*GCC ARRAYS*/
2586
2587 static void
2588 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2589 {
2590 gfc_se se;
2591 gfc_ss_info *ss_info;
2592 gfc_array_info *info;
2593 tree tmp;
2594
2595 ss_info = ss->info;
2596 info = &ss_info->data.array;
2597
2598 /* Get the descriptor for the array to be scalarized. */
2599 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
2600 gfc_init_se (&se, NULL);
2601 se.descriptor_only = 1;
2602 gfc_conv_expr_lhs (&se, ss_info->expr);
2603 gfc_add_block_to_block (block, &se.pre);
2604 info->descriptor = se.expr;
2605 ss_info->string_length = se.string_length;
2606
2607 if (base)
2608 {
2609 /* Also the data pointer. */
2610 tmp = gfc_conv_array_data (se.expr);
2611 /* If this is a variable or address of a variable we use it directly.
2612 Otherwise we must evaluate it now to avoid breaking dependency
2613 analysis by pulling the expressions for elemental array indices
2614 inside the loop. */
2615 if (!(DECL_P (tmp)
2616 || (TREE_CODE (tmp) == ADDR_EXPR
2617 && DECL_P (TREE_OPERAND (tmp, 0)))))
2618 tmp = gfc_evaluate_now (tmp, block);
2619 info->data = tmp;
2620
2621 tmp = gfc_conv_array_offset (se.expr);
2622 info->offset = gfc_evaluate_now (tmp, block);
2623
2624 /* Make absolutely sure that the saved_offset is indeed saved
2625 so that the variable is still accessible after the loops
2626 are translated. */
2627 info->saved_offset = info->offset;
2628 }
2629 }
2630
2631
2632 /* Initialize a gfc_loopinfo structure. */
2633
2634 void
2635 gfc_init_loopinfo (gfc_loopinfo * loop)
2636 {
2637 int n;
2638
2639 memset (loop, 0, sizeof (gfc_loopinfo));
2640 gfc_init_block (&loop->pre);
2641 gfc_init_block (&loop->post);
2642
2643 /* Initially scalarize in order and default to no loop reversal. */
2644 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2645 {
2646 loop->order[n] = n;
2647 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2648 }
2649
2650 loop->ss = gfc_ss_terminator;
2651 }
2652
2653
2654 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2655 chain. */
2656
2657 void
2658 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2659 {
2660 se->loop = loop;
2661 }
2662
2663
2664 /* Return an expression for the data pointer of an array. */
2665
2666 tree
2667 gfc_conv_array_data (tree descriptor)
2668 {
2669 tree type;
2670
2671 type = TREE_TYPE (descriptor);
2672 if (GFC_ARRAY_TYPE_P (type))
2673 {
2674 if (TREE_CODE (type) == POINTER_TYPE)
2675 return descriptor;
2676 else
2677 {
2678 /* Descriptorless arrays. */
2679 return gfc_build_addr_expr (NULL_TREE, descriptor);
2680 }
2681 }
2682 else
2683 return gfc_conv_descriptor_data_get (descriptor);
2684 }
2685
2686
2687 /* Return an expression for the base offset of an array. */
2688
2689 tree
2690 gfc_conv_array_offset (tree descriptor)
2691 {
2692 tree type;
2693
2694 type = TREE_TYPE (descriptor);
2695 if (GFC_ARRAY_TYPE_P (type))
2696 return GFC_TYPE_ARRAY_OFFSET (type);
2697 else
2698 return gfc_conv_descriptor_offset_get (descriptor);
2699 }
2700
2701
2702 /* Get an expression for the array stride. */
2703
2704 tree
2705 gfc_conv_array_stride (tree descriptor, int dim)
2706 {
2707 tree tmp;
2708 tree type;
2709
2710 type = TREE_TYPE (descriptor);
2711
2712 /* For descriptorless arrays use the array size. */
2713 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2714 if (tmp != NULL_TREE)
2715 return tmp;
2716
2717 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2718 return tmp;
2719 }
2720
2721
2722 /* Like gfc_conv_array_stride, but for the lower bound. */
2723
2724 tree
2725 gfc_conv_array_lbound (tree descriptor, int dim)
2726 {
2727 tree tmp;
2728 tree type;
2729
2730 type = TREE_TYPE (descriptor);
2731
2732 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2733 if (tmp != NULL_TREE)
2734 return tmp;
2735
2736 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2737 return tmp;
2738 }
2739
2740
2741 /* Like gfc_conv_array_stride, but for the upper bound. */
2742
2743 tree
2744 gfc_conv_array_ubound (tree descriptor, int dim)
2745 {
2746 tree tmp;
2747 tree type;
2748
2749 type = TREE_TYPE (descriptor);
2750
2751 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2752 if (tmp != NULL_TREE)
2753 return tmp;
2754
2755 /* This should only ever happen when passing an assumed shape array
2756 as an actual parameter. The value will never be used. */
2757 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2758 return gfc_index_zero_node;
2759
2760 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2761 return tmp;
2762 }
2763
2764
2765 /* Generate code to perform an array index bound check. */
2766
2767 static tree
2768 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2769 locus * where, bool check_upper)
2770 {
2771 tree fault;
2772 tree tmp_lo, tmp_up;
2773 tree descriptor;
2774 char *msg;
2775 const char * name = NULL;
2776
2777 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2778 return index;
2779
2780 descriptor = ss->info->data.array.descriptor;
2781
2782 index = gfc_evaluate_now (index, &se->pre);
2783
2784 /* We find a name for the error message. */
2785 name = ss->info->expr->symtree->n.sym->name;
2786 gcc_assert (name != NULL);
2787
2788 if (TREE_CODE (descriptor) == VAR_DECL)
2789 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2790
2791 /* If upper bound is present, include both bounds in the error message. */
2792 if (check_upper)
2793 {
2794 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2795 tmp_up = gfc_conv_array_ubound (descriptor, n);
2796
2797 if (name)
2798 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2799 "outside of expected range (%%ld:%%ld)", n+1, name);
2800 else
2801 asprintf (&msg, "Index '%%ld' of dimension %d "
2802 "outside of expected range (%%ld:%%ld)", n+1);
2803
2804 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2805 index, tmp_lo);
2806 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2807 fold_convert (long_integer_type_node, index),
2808 fold_convert (long_integer_type_node, tmp_lo),
2809 fold_convert (long_integer_type_node, tmp_up));
2810 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2811 index, tmp_up);
2812 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2813 fold_convert (long_integer_type_node, index),
2814 fold_convert (long_integer_type_node, tmp_lo),
2815 fold_convert (long_integer_type_node, tmp_up));
2816 free (msg);
2817 }
2818 else
2819 {
2820 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2821
2822 if (name)
2823 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2824 "below lower bound of %%ld", n+1, name);
2825 else
2826 asprintf (&msg, "Index '%%ld' of dimension %d "
2827 "below lower bound of %%ld", n+1);
2828
2829 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2830 index, tmp_lo);
2831 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2832 fold_convert (long_integer_type_node, index),
2833 fold_convert (long_integer_type_node, tmp_lo));
2834 free (msg);
2835 }
2836
2837 return index;
2838 }
2839
2840
2841 /* Return the offset for an index. Performs bound checking for elemental
2842 dimensions. Single element references are processed separately.
2843 DIM is the array dimension, I is the loop dimension. */
2844
2845 static tree
2846 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2847 gfc_array_ref * ar, tree stride)
2848 {
2849 gfc_array_info *info;
2850 tree index;
2851 tree desc;
2852 tree data;
2853
2854 info = &ss->info->data.array;
2855
2856 /* Get the index into the array for this dimension. */
2857 if (ar)
2858 {
2859 gcc_assert (ar->type != AR_ELEMENT);
2860 switch (ar->dimen_type[dim])
2861 {
2862 case DIMEN_THIS_IMAGE:
2863 gcc_unreachable ();
2864 break;
2865 case DIMEN_ELEMENT:
2866 /* Elemental dimension. */
2867 gcc_assert (info->subscript[dim]
2868 && info->subscript[dim]->info->type == GFC_SS_SCALAR);
2869 /* We've already translated this value outside the loop. */
2870 index = info->subscript[dim]->info->data.scalar.value;
2871
2872 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2873 ar->as->type != AS_ASSUMED_SIZE
2874 || dim < ar->dimen - 1);
2875 break;
2876
2877 case DIMEN_VECTOR:
2878 gcc_assert (info && se->loop);
2879 gcc_assert (info->subscript[dim]
2880 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2881 desc = info->subscript[dim]->info->data.array.descriptor;
2882
2883 /* Get a zero-based index into the vector. */
2884 index = fold_build2_loc (input_location, MINUS_EXPR,
2885 gfc_array_index_type,
2886 se->loop->loopvar[i], se->loop->from[i]);
2887
2888 /* Multiply the index by the stride. */
2889 index = fold_build2_loc (input_location, MULT_EXPR,
2890 gfc_array_index_type,
2891 index, gfc_conv_array_stride (desc, 0));
2892
2893 /* Read the vector to get an index into info->descriptor. */
2894 data = build_fold_indirect_ref_loc (input_location,
2895 gfc_conv_array_data (desc));
2896 index = gfc_build_array_ref (data, index, NULL);
2897 index = gfc_evaluate_now (index, &se->pre);
2898 index = fold_convert (gfc_array_index_type, index);
2899
2900 /* Do any bounds checking on the final info->descriptor index. */
2901 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2902 ar->as->type != AS_ASSUMED_SIZE
2903 || dim < ar->dimen - 1);
2904 break;
2905
2906 case DIMEN_RANGE:
2907 /* Scalarized dimension. */
2908 gcc_assert (info && se->loop);
2909
2910 /* Multiply the loop variable by the stride and delta. */
2911 index = se->loop->loopvar[i];
2912 if (!integer_onep (info->stride[dim]))
2913 index = fold_build2_loc (input_location, MULT_EXPR,
2914 gfc_array_index_type, index,
2915 info->stride[dim]);
2916 if (!integer_zerop (info->delta[dim]))
2917 index = fold_build2_loc (input_location, PLUS_EXPR,
2918 gfc_array_index_type, index,
2919 info->delta[dim]);
2920 break;
2921
2922 default:
2923 gcc_unreachable ();
2924 }
2925 }
2926 else
2927 {
2928 /* Temporary array or derived type component. */
2929 gcc_assert (se->loop);
2930 index = se->loop->loopvar[se->loop->order[i]];
2931
2932 /* Pointer functions can have stride[0] different from unity.
2933 Use the stride returned by the function call and stored in
2934 the descriptor for the temporary. */
2935 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
2936 && se->ss->info->expr
2937 && se->ss->info->expr->symtree
2938 && se->ss->info->expr->symtree->n.sym->result
2939 && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
2940 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2941 gfc_rank_cst[dim]);
2942
2943 if (!integer_zerop (info->delta[dim]))
2944 index = fold_build2_loc (input_location, PLUS_EXPR,
2945 gfc_array_index_type, index, info->delta[dim]);
2946 }
2947
2948 /* Multiply by the stride. */
2949 if (!integer_onep (stride))
2950 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2951 index, stride);
2952
2953 return index;
2954 }
2955
2956
2957 /* Build a scalarized array reference using the vptr 'size'. */
2958
2959 static bool
2960 build_class_array_ref (gfc_se *se, tree base, tree index)
2961 {
2962 tree type;
2963 tree size;
2964 tree offset;
2965 tree decl;
2966 tree tmp;
2967 gfc_expr *expr = se->ss->info->expr;
2968 gfc_ref *ref;
2969 gfc_ref *class_ref;
2970 gfc_typespec *ts;
2971
2972 if (expr == NULL || expr->ts.type != BT_CLASS)
2973 return false;
2974
2975 if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS)
2976 ts = &expr->symtree->n.sym->ts;
2977 else
2978 ts = NULL;
2979 class_ref = NULL;
2980
2981 for (ref = expr->ref; ref; ref = ref->next)
2982 {
2983 if (ref->type == REF_COMPONENT
2984 && ref->u.c.component->ts.type == BT_CLASS
2985 && ref->next && ref->next->type == REF_COMPONENT
2986 && strcmp (ref->next->u.c.component->name, "_data") == 0
2987 && ref->next->next
2988 && ref->next->next->type == REF_ARRAY
2989 && ref->next->next->u.ar.type != AR_ELEMENT)
2990 {
2991 ts = &ref->u.c.component->ts;
2992 class_ref = ref;
2993 break;
2994 }
2995 }
2996
2997 if (ts == NULL)
2998 return false;
2999
3000 if (class_ref == NULL && expr->symtree->n.sym->attr.function
3001 && expr->symtree->n.sym == expr->symtree->n.sym->result)
3002 {
3003 gcc_assert (expr->symtree->n.sym->backend_decl == current_function_decl);
3004 decl = gfc_get_fake_result_decl (expr->symtree->n.sym, 0);
3005 }
3006 else if (class_ref == NULL)
3007 decl = expr->symtree->n.sym->backend_decl;
3008 else
3009 {
3010 /* Remove everything after the last class reference, convert the
3011 expression and then recover its tailend once more. */
3012 gfc_se tmpse;
3013 ref = class_ref->next;
3014 class_ref->next = NULL;
3015 gfc_init_se (&tmpse, NULL);
3016 gfc_conv_expr (&tmpse, expr);
3017 decl = tmpse.expr;
3018 class_ref->next = ref;
3019 }
3020
3021 size = gfc_vtable_size_get (decl);
3022
3023 /* Build the address of the element. */
3024 type = TREE_TYPE (TREE_TYPE (base));
3025 size = fold_convert (TREE_TYPE (index), size);
3026 offset = fold_build2_loc (input_location, MULT_EXPR,
3027 gfc_array_index_type,
3028 index, size);
3029 tmp = gfc_build_addr_expr (pvoid_type_node, base);
3030 tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
3031 tmp = fold_convert (build_pointer_type (type), tmp);
3032
3033 /* Return the element in the se expression. */
3034 se->expr = build_fold_indirect_ref_loc (input_location, tmp);
3035 return true;
3036 }
3037
3038
3039 /* Build a scalarized reference to an array. */
3040
3041 static void
3042 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
3043 {
3044 gfc_array_info *info;
3045 tree decl = NULL_TREE;
3046 tree index;
3047 tree tmp;
3048 gfc_ss *ss;
3049 gfc_expr *expr;
3050 int n;
3051
3052 ss = se->ss;
3053 expr = ss->info->expr;
3054 info = &ss->info->data.array;
3055 if (ar)
3056 n = se->loop->order[0];
3057 else
3058 n = 0;
3059
3060 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
3061 /* Add the offset for this dimension to the stored offset for all other
3062 dimensions. */
3063 if (!integer_zerop (info->offset))
3064 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3065 index, info->offset);
3066
3067 if (expr && is_subref_array (expr))
3068 decl = expr->symtree->n.sym->backend_decl;
3069
3070 tmp = build_fold_indirect_ref_loc (input_location, info->data);
3071
3072 /* Use the vptr 'size' field to access a class the element of a class
3073 array. */
3074 if (build_class_array_ref (se, tmp, index))
3075 return;
3076
3077 se->expr = gfc_build_array_ref (tmp, index, decl);
3078 }
3079
3080
3081 /* Translate access of temporary array. */
3082
3083 void
3084 gfc_conv_tmp_array_ref (gfc_se * se)
3085 {
3086 se->string_length = se->ss->info->string_length;
3087 gfc_conv_scalarized_array_ref (se, NULL);
3088 gfc_advance_se_ss_chain (se);
3089 }
3090
3091 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
3092
3093 static void
3094 add_to_offset (tree *cst_offset, tree *offset, tree t)
3095 {
3096 if (TREE_CODE (t) == INTEGER_CST)
3097 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
3098 else
3099 {
3100 if (!integer_zerop (*offset))
3101 *offset = fold_build2_loc (input_location, PLUS_EXPR,
3102 gfc_array_index_type, *offset, t);
3103 else
3104 *offset = t;
3105 }
3106 }
3107
3108
3109 static tree
3110 build_array_ref (tree desc, tree offset, tree decl)
3111 {
3112 tree tmp;
3113 tree type;
3114
3115 /* Class container types do not always have the GFC_CLASS_TYPE_P
3116 but the canonical type does. */
3117 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
3118 && TREE_CODE (desc) == COMPONENT_REF)
3119 {
3120 type = TREE_TYPE (TREE_OPERAND (desc, 0));
3121 if (TYPE_CANONICAL (type)
3122 && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type)))
3123 type = TYPE_CANONICAL (type);
3124 }
3125 else
3126 type = NULL;
3127
3128 /* Class array references need special treatment because the assigned
3129 type size needs to be used to point to the element. */
3130 if (type && GFC_CLASS_TYPE_P (type))
3131 {
3132 type = gfc_get_element_type (TREE_TYPE (desc));
3133 tmp = TREE_OPERAND (desc, 0);
3134 tmp = gfc_get_class_array_ref (offset, tmp);
3135 tmp = fold_convert (build_pointer_type (type), tmp);
3136 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3137 return tmp;
3138 }
3139
3140 tmp = gfc_conv_array_data (desc);
3141 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3142 tmp = gfc_build_array_ref (tmp, offset, decl);
3143 return tmp;
3144 }
3145
3146
3147 /* Build an array reference. se->expr already holds the array descriptor.
3148 This should be either a variable, indirect variable reference or component
3149 reference. For arrays which do not have a descriptor, se->expr will be
3150 the data pointer.
3151 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
3152
3153 void
3154 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_expr *expr,
3155 locus * where)
3156 {
3157 int n;
3158 tree offset, cst_offset;
3159 tree tmp;
3160 tree stride;
3161 gfc_se indexse;
3162 gfc_se tmpse;
3163 gfc_symbol * sym = expr->symtree->n.sym;
3164 char *var_name = NULL;
3165
3166 if (ar->dimen == 0)
3167 {
3168 gcc_assert (ar->codimen);
3169
3170 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
3171 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
3172 else
3173 {
3174 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
3175 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
3176 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
3177
3178 /* Use the actual tree type and not the wrapped coarray. */
3179 if (!se->want_pointer)
3180 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
3181 se->expr);
3182 }
3183
3184 return;
3185 }
3186
3187 /* Handle scalarized references separately. */
3188 if (ar->type != AR_ELEMENT)
3189 {
3190 gfc_conv_scalarized_array_ref (se, ar);
3191 gfc_advance_se_ss_chain (se);
3192 return;
3193 }
3194
3195 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3196 {
3197 size_t len;
3198 gfc_ref *ref;
3199
3200 len = strlen (sym->name) + 1;
3201 for (ref = expr->ref; ref; ref = ref->next)
3202 {
3203 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3204 break;
3205 if (ref->type == REF_COMPONENT)
3206 len += 1 + strlen (ref->u.c.component->name);
3207 }
3208
3209 var_name = XALLOCAVEC (char, len);
3210 strcpy (var_name, sym->name);
3211
3212 for (ref = expr->ref; ref; ref = ref->next)
3213 {
3214 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3215 break;
3216 if (ref->type == REF_COMPONENT)
3217 {
3218 strcat (var_name, "%%");
3219 strcat (var_name, ref->u.c.component->name);
3220 }
3221 }
3222 }
3223
3224 cst_offset = offset = gfc_index_zero_node;
3225 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
3226
3227 /* Calculate the offsets from all the dimensions. Make sure to associate
3228 the final offset so that we form a chain of loop invariant summands. */
3229 for (n = ar->dimen - 1; n >= 0; n--)
3230 {
3231 /* Calculate the index for this dimension. */
3232 gfc_init_se (&indexse, se);
3233 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
3234 gfc_add_block_to_block (&se->pre, &indexse.pre);
3235
3236 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3237 {
3238 /* Check array bounds. */
3239 tree cond;
3240 char *msg;
3241
3242 /* Evaluate the indexse.expr only once. */
3243 indexse.expr = save_expr (indexse.expr);
3244
3245 /* Lower bound. */
3246 tmp = gfc_conv_array_lbound (se->expr, n);
3247 if (sym->attr.temporary)
3248 {
3249 gfc_init_se (&tmpse, se);
3250 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
3251 gfc_array_index_type);
3252 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3253 tmp = tmpse.expr;
3254 }
3255
3256 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
3257 indexse.expr, tmp);
3258 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3259 "below lower bound of %%ld", n+1, var_name);
3260 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3261 fold_convert (long_integer_type_node,
3262 indexse.expr),
3263 fold_convert (long_integer_type_node, tmp));
3264 free (msg);
3265
3266 /* Upper bound, but not for the last dimension of assumed-size
3267 arrays. */
3268 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
3269 {
3270 tmp = gfc_conv_array_ubound (se->expr, n);
3271 if (sym->attr.temporary)
3272 {
3273 gfc_init_se (&tmpse, se);
3274 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
3275 gfc_array_index_type);
3276 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3277 tmp = tmpse.expr;
3278 }
3279
3280 cond = fold_build2_loc (input_location, GT_EXPR,
3281 boolean_type_node, indexse.expr, tmp);
3282 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3283 "above upper bound of %%ld", n+1, var_name);
3284 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3285 fold_convert (long_integer_type_node,
3286 indexse.expr),
3287 fold_convert (long_integer_type_node, tmp));
3288 free (msg);
3289 }
3290 }
3291
3292 /* Multiply the index by the stride. */
3293 stride = gfc_conv_array_stride (se->expr, n);
3294 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3295 indexse.expr, stride);
3296
3297 /* And add it to the total. */
3298 add_to_offset (&cst_offset, &offset, tmp);
3299 }
3300
3301 if (!integer_zerop (cst_offset))
3302 offset = fold_build2_loc (input_location, PLUS_EXPR,
3303 gfc_array_index_type, offset, cst_offset);
3304
3305 se->expr = build_array_ref (se->expr, offset, sym->backend_decl);
3306 }
3307
3308
3309 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
3310 LOOP_DIM dimension (if any) to array's offset. */
3311
3312 static void
3313 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
3314 gfc_array_ref *ar, int array_dim, int loop_dim)
3315 {
3316 gfc_se se;
3317 gfc_array_info *info;
3318 tree stride, index;
3319
3320 info = &ss->info->data.array;
3321
3322 gfc_init_se (&se, NULL);
3323 se.loop = loop;
3324 se.expr = info->descriptor;
3325 stride = gfc_conv_array_stride (info->descriptor, array_dim);
3326 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
3327 gfc_add_block_to_block (pblock, &se.pre);
3328
3329 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
3330 gfc_array_index_type,
3331 info->offset, index);
3332 info->offset = gfc_evaluate_now (info->offset, pblock);
3333 }
3334
3335
3336 /* Generate the code to be executed immediately before entering a
3337 scalarization loop. */
3338
3339 static void
3340 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
3341 stmtblock_t * pblock)
3342 {
3343 tree stride;
3344 gfc_ss_info *ss_info;
3345 gfc_array_info *info;
3346 gfc_ss_type ss_type;
3347 gfc_ss *ss, *pss;
3348 gfc_loopinfo *ploop;
3349 gfc_array_ref *ar;
3350 int i;
3351
3352 /* This code will be executed before entering the scalarization loop
3353 for this dimension. */
3354 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3355 {
3356 ss_info = ss->info;
3357
3358 if ((ss_info->useflags & flag) == 0)
3359 continue;
3360
3361 ss_type = ss_info->type;
3362 if (ss_type != GFC_SS_SECTION
3363 && ss_type != GFC_SS_FUNCTION
3364 && ss_type != GFC_SS_CONSTRUCTOR
3365 && ss_type != GFC_SS_COMPONENT)
3366 continue;
3367
3368 info = &ss_info->data.array;
3369
3370 gcc_assert (dim < ss->dimen);
3371 gcc_assert (ss->dimen == loop->dimen);
3372
3373 if (info->ref)
3374 ar = &info->ref->u.ar;
3375 else
3376 ar = NULL;
3377
3378 if (dim == loop->dimen - 1 && loop->parent != NULL)
3379 {
3380 /* If we are in the outermost dimension of this loop, the previous
3381 dimension shall be in the parent loop. */
3382 gcc_assert (ss->parent != NULL);
3383
3384 pss = ss->parent;
3385 ploop = loop->parent;
3386
3387 /* ss and ss->parent are about the same array. */
3388 gcc_assert (ss_info == pss->info);
3389 }
3390 else
3391 {
3392 ploop = loop;
3393 pss = ss;
3394 }
3395
3396 if (dim == loop->dimen - 1)
3397 i = 0;
3398 else
3399 i = dim + 1;
3400
3401 /* For the time being, there is no loop reordering. */
3402 gcc_assert (i == ploop->order[i]);
3403 i = ploop->order[i];
3404
3405 if (dim == loop->dimen - 1 && loop->parent == NULL)
3406 {
3407 stride = gfc_conv_array_stride (info->descriptor,
3408 innermost_ss (ss)->dim[i]);
3409
3410 /* Calculate the stride of the innermost loop. Hopefully this will
3411 allow the backend optimizers to do their stuff more effectively.
3412 */
3413 info->stride0 = gfc_evaluate_now (stride, pblock);
3414
3415 /* For the outermost loop calculate the offset due to any
3416 elemental dimensions. It will have been initialized with the
3417 base offset of the array. */
3418 if (info->ref)
3419 {
3420 for (i = 0; i < ar->dimen; i++)
3421 {
3422 if (ar->dimen_type[i] != DIMEN_ELEMENT)
3423 continue;
3424
3425 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
3426 }
3427 }
3428 }
3429 else
3430 /* Add the offset for the previous loop dimension. */
3431 add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i);
3432
3433 /* Remember this offset for the second loop. */
3434 if (dim == loop->temp_dim - 1 && loop->parent == NULL)
3435 info->saved_offset = info->offset;
3436 }
3437 }
3438
3439
3440 /* Start a scalarized expression. Creates a scope and declares loop
3441 variables. */
3442
3443 void
3444 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
3445 {
3446 int dim;
3447 int n;
3448 int flags;
3449
3450 gcc_assert (!loop->array_parameter);
3451
3452 for (dim = loop->dimen - 1; dim >= 0; dim--)
3453 {
3454 n = loop->order[dim];
3455
3456 gfc_start_block (&loop->code[n]);
3457
3458 /* Create the loop variable. */
3459 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
3460
3461 if (dim < loop->temp_dim)
3462 flags = 3;
3463 else
3464 flags = 1;
3465 /* Calculate values that will be constant within this loop. */
3466 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
3467 }
3468 gfc_start_block (pbody);
3469 }
3470
3471
3472 /* Generates the actual loop code for a scalarization loop. */
3473
3474 void
3475 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
3476 stmtblock_t * pbody)
3477 {
3478 stmtblock_t block;
3479 tree cond;
3480 tree tmp;
3481 tree loopbody;
3482 tree exit_label;
3483 tree stmt;
3484 tree init;
3485 tree incr;
3486
3487 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
3488 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
3489 && n == loop->dimen - 1)
3490 {
3491 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3492 init = make_tree_vec (1);
3493 cond = make_tree_vec (1);
3494 incr = make_tree_vec (1);
3495
3496 /* Cycle statement is implemented with a goto. Exit statement must not
3497 be present for this loop. */
3498 exit_label = gfc_build_label_decl (NULL_TREE);
3499 TREE_USED (exit_label) = 1;
3500
3501 /* Label for cycle statements (if needed). */
3502 tmp = build1_v (LABEL_EXPR, exit_label);
3503 gfc_add_expr_to_block (pbody, tmp);
3504
3505 stmt = make_node (OMP_FOR);
3506
3507 TREE_TYPE (stmt) = void_type_node;
3508 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
3509
3510 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
3511 OMP_CLAUSE_SCHEDULE);
3512 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3513 = OMP_CLAUSE_SCHEDULE_STATIC;
3514 if (ompws_flags & OMPWS_NOWAIT)
3515 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3516 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3517
3518 /* Initialize the loopvar. */
3519 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3520 loop->from[n]);
3521 OMP_FOR_INIT (stmt) = init;
3522 /* The exit condition. */
3523 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3524 boolean_type_node,
3525 loop->loopvar[n], loop->to[n]);
3526 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3527 OMP_FOR_COND (stmt) = cond;
3528 /* Increment the loopvar. */
3529 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3530 loop->loopvar[n], gfc_index_one_node);
3531 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3532 void_type_node, loop->loopvar[n], tmp);
3533 OMP_FOR_INCR (stmt) = incr;
3534
3535 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3536 gfc_add_expr_to_block (&loop->code[n], stmt);
3537 }
3538 else
3539 {
3540 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3541 && (loop->temp_ss == NULL);
3542
3543 loopbody = gfc_finish_block (pbody);
3544
3545 if (reverse_loop)
3546 {
3547 tmp = loop->from[n];
3548 loop->from[n] = loop->to[n];
3549 loop->to[n] = tmp;
3550 }
3551
3552 /* Initialize the loopvar. */
3553 if (loop->loopvar[n] != loop->from[n])
3554 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3555
3556 exit_label = gfc_build_label_decl (NULL_TREE);
3557
3558 /* Generate the loop body. */
3559 gfc_init_block (&block);
3560
3561 /* The exit condition. */
3562 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3563 boolean_type_node, loop->loopvar[n], loop->to[n]);
3564 tmp = build1_v (GOTO_EXPR, exit_label);
3565 TREE_USED (exit_label) = 1;
3566 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3567 gfc_add_expr_to_block (&block, tmp);
3568
3569 /* The main body. */
3570 gfc_add_expr_to_block (&block, loopbody);
3571
3572 /* Increment the loopvar. */
3573 tmp = fold_build2_loc (input_location,
3574 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3575 gfc_array_index_type, loop->loopvar[n],
3576 gfc_index_one_node);
3577
3578 gfc_add_modify (&block, loop->loopvar[n], tmp);
3579
3580 /* Build the loop. */
3581 tmp = gfc_finish_block (&block);
3582 tmp = build1_v (LOOP_EXPR, tmp);
3583 gfc_add_expr_to_block (&loop->code[n], tmp);
3584
3585 /* Add the exit label. */
3586 tmp = build1_v (LABEL_EXPR, exit_label);
3587 gfc_add_expr_to_block (&loop->code[n], tmp);
3588 }
3589
3590 }
3591
3592
3593 /* Finishes and generates the loops for a scalarized expression. */
3594
3595 void
3596 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3597 {
3598 int dim;
3599 int n;
3600 gfc_ss *ss;
3601 stmtblock_t *pblock;
3602 tree tmp;
3603
3604 pblock = body;
3605 /* Generate the loops. */
3606 for (dim = 0; dim < loop->dimen; dim++)
3607 {
3608 n = loop->order[dim];
3609 gfc_trans_scalarized_loop_end (loop, n, pblock);
3610 loop->loopvar[n] = NULL_TREE;
3611 pblock = &loop->code[n];
3612 }
3613
3614 tmp = gfc_finish_block (pblock);
3615 gfc_add_expr_to_block (&loop->pre, tmp);
3616
3617 /* Clear all the used flags. */
3618 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3619 if (ss->parent == NULL)
3620 ss->info->useflags = 0;
3621 }
3622
3623
3624 /* Finish the main body of a scalarized expression, and start the secondary
3625 copying body. */
3626
3627 void
3628 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3629 {
3630 int dim;
3631 int n;
3632 stmtblock_t *pblock;
3633 gfc_ss *ss;
3634
3635 pblock = body;
3636 /* We finish as many loops as are used by the temporary. */
3637 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3638 {
3639 n = loop->order[dim];
3640 gfc_trans_scalarized_loop_end (loop, n, pblock);
3641 loop->loopvar[n] = NULL_TREE;
3642 pblock = &loop->code[n];
3643 }
3644
3645 /* We don't want to finish the outermost loop entirely. */
3646 n = loop->order[loop->temp_dim - 1];
3647 gfc_trans_scalarized_loop_end (loop, n, pblock);
3648
3649 /* Restore the initial offsets. */
3650 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3651 {
3652 gfc_ss_type ss_type;
3653 gfc_ss_info *ss_info;
3654
3655 ss_info = ss->info;
3656
3657 if ((ss_info->useflags & 2) == 0)
3658 continue;
3659
3660 ss_type = ss_info->type;
3661 if (ss_type != GFC_SS_SECTION
3662 && ss_type != GFC_SS_FUNCTION
3663 && ss_type != GFC_SS_CONSTRUCTOR
3664 && ss_type != GFC_SS_COMPONENT)
3665 continue;
3666
3667 ss_info->data.array.offset = ss_info->data.array.saved_offset;
3668 }
3669
3670 /* Restart all the inner loops we just finished. */
3671 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3672 {
3673 n = loop->order[dim];
3674
3675 gfc_start_block (&loop->code[n]);
3676
3677 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3678
3679 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3680 }
3681
3682 /* Start a block for the secondary copying code. */
3683 gfc_start_block (body);
3684 }
3685
3686
3687 /* Precalculate (either lower or upper) bound of an array section.
3688 BLOCK: Block in which the (pre)calculation code will go.
3689 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3690 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3691 DESC: Array descriptor from which the bound will be picked if unspecified
3692 (either lower or upper bound according to LBOUND). */
3693
3694 static void
3695 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3696 tree desc, int dim, bool lbound)
3697 {
3698 gfc_se se;
3699 gfc_expr * input_val = values[dim];
3700 tree *output = &bounds[dim];
3701
3702
3703 if (input_val)
3704 {
3705 /* Specified section bound. */
3706 gfc_init_se (&se, NULL);
3707 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3708 gfc_add_block_to_block (block, &se.pre);
3709 *output = se.expr;
3710 }
3711 else
3712 {
3713 /* No specific bound specified so use the bound of the array. */
3714 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3715 gfc_conv_array_ubound (desc, dim);
3716 }
3717 *output = gfc_evaluate_now (*output, block);
3718 }
3719
3720
3721 /* Calculate the lower bound of an array section. */
3722
3723 static void
3724 gfc_conv_section_startstride (stmtblock_t * block, gfc_ss * ss, int dim)
3725 {
3726 gfc_expr *stride = NULL;
3727 tree desc;
3728 gfc_se se;
3729 gfc_array_info *info;
3730 gfc_array_ref *ar;
3731
3732 gcc_assert (ss->info->type == GFC_SS_SECTION);
3733
3734 info = &ss->info->data.array;
3735 ar = &info->ref->u.ar;
3736
3737 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3738 {
3739 /* We use a zero-based index to access the vector. */
3740 info->start[dim] = gfc_index_zero_node;
3741 info->end[dim] = NULL;
3742 info->stride[dim] = gfc_index_one_node;
3743 return;
3744 }
3745
3746 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3747 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3748 desc = info->descriptor;
3749 stride = ar->stride[dim];
3750
3751 /* Calculate the start of the range. For vector subscripts this will
3752 be the range of the vector. */
3753 evaluate_bound (block, info->start, ar->start, desc, dim, true);
3754
3755 /* Similarly calculate the end. Although this is not used in the
3756 scalarizer, it is needed when checking bounds and where the end
3757 is an expression with side-effects. */
3758 evaluate_bound (block, info->end, ar->end, desc, dim, false);
3759
3760 /* Calculate the stride. */
3761 if (stride == NULL)
3762 info->stride[dim] = gfc_index_one_node;
3763 else
3764 {
3765 gfc_init_se (&se, NULL);
3766 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3767 gfc_add_block_to_block (block, &se.pre);
3768 info->stride[dim] = gfc_evaluate_now (se.expr, block);
3769 }
3770 }
3771
3772
3773 /* Calculates the range start and stride for a SS chain. Also gets the
3774 descriptor and data pointer. The range of vector subscripts is the size
3775 of the vector. Array bounds are also checked. */
3776
3777 void
3778 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3779 {
3780 int n;
3781 tree tmp;
3782 gfc_ss *ss;
3783 tree desc;
3784
3785 gfc_loopinfo * const outer_loop = outermost_loop (loop);
3786
3787 loop->dimen = 0;
3788 /* Determine the rank of the loop. */
3789 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3790 {
3791 switch (ss->info->type)
3792 {
3793 case GFC_SS_SECTION:
3794 case GFC_SS_CONSTRUCTOR:
3795 case GFC_SS_FUNCTION:
3796 case GFC_SS_COMPONENT:
3797 loop->dimen = ss->dimen;
3798 goto done;
3799
3800 /* As usual, lbound and ubound are exceptions!. */
3801 case GFC_SS_INTRINSIC:
3802 switch (ss->info->expr->value.function.isym->id)
3803 {
3804 case GFC_ISYM_LBOUND:
3805 case GFC_ISYM_UBOUND:
3806 case GFC_ISYM_LCOBOUND:
3807 case GFC_ISYM_UCOBOUND:
3808 case GFC_ISYM_THIS_IMAGE:
3809 loop->dimen = ss->dimen;
3810 goto done;
3811
3812 default:
3813 break;
3814 }
3815
3816 default:
3817 break;
3818 }
3819 }
3820
3821 /* We should have determined the rank of the expression by now. If
3822 not, that's bad news. */
3823 gcc_unreachable ();
3824
3825 done:
3826 /* Loop over all the SS in the chain. */
3827 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3828 {
3829 gfc_ss_info *ss_info;
3830 gfc_array_info *info;
3831 gfc_expr *expr;
3832
3833 ss_info = ss->info;
3834 expr = ss_info->expr;
3835 info = &ss_info->data.array;
3836
3837 if (expr && expr->shape && !info->shape)
3838 info->shape = expr->shape;
3839
3840 switch (ss_info->type)
3841 {
3842 case GFC_SS_SECTION:
3843 /* Get the descriptor for the array. If it is a cross loops array,
3844 we got the descriptor already in the outermost loop. */
3845 if (ss->parent == NULL)
3846 gfc_conv_ss_descriptor (&outer_loop->pre, ss,
3847 !loop->array_parameter);
3848
3849 for (n = 0; n < ss->dimen; n++)
3850 gfc_conv_section_startstride (&outer_loop->pre, ss, ss->dim[n]);
3851 break;
3852
3853 case GFC_SS_INTRINSIC:
3854 switch (expr->value.function.isym->id)
3855 {
3856 /* Fall through to supply start and stride. */
3857 case GFC_ISYM_LBOUND:
3858 case GFC_ISYM_UBOUND:
3859 {
3860 gfc_expr *arg;
3861
3862 /* This is the variant without DIM=... */
3863 gcc_assert (expr->value.function.actual->next->expr == NULL);
3864
3865 arg = expr->value.function.actual->expr;
3866 if (arg->rank == -1)
3867 {
3868 gfc_se se;
3869 tree rank, tmp;
3870
3871 /* The rank (hence the return value's shape) is unknown,
3872 we have to retrieve it. */
3873 gfc_init_se (&se, NULL);
3874 se.descriptor_only = 1;
3875 gfc_conv_expr (&se, arg);
3876 /* This is a bare variable, so there is no preliminary
3877 or cleanup code. */
3878 gcc_assert (se.pre.head == NULL_TREE
3879 && se.post.head == NULL_TREE);
3880 rank = gfc_conv_descriptor_rank (se.expr);
3881 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3882 gfc_array_index_type,
3883 fold_convert (gfc_array_index_type,
3884 rank),
3885 gfc_index_one_node);
3886 info->end[0] = gfc_evaluate_now (tmp, &outer_loop->pre);
3887 info->start[0] = gfc_index_zero_node;
3888 info->stride[0] = gfc_index_one_node;
3889 continue;
3890 }
3891 /* Otherwise fall through GFC_SS_FUNCTION. */
3892 }
3893 case GFC_ISYM_LCOBOUND:
3894 case GFC_ISYM_UCOBOUND:
3895 case GFC_ISYM_THIS_IMAGE:
3896 break;
3897
3898 default:
3899 continue;
3900 }
3901
3902 case GFC_SS_CONSTRUCTOR:
3903 case GFC_SS_FUNCTION:
3904 for (n = 0; n < ss->dimen; n++)
3905 {
3906 int dim = ss->dim[n];
3907
3908 info->start[dim] = gfc_index_zero_node;
3909 info->end[dim] = gfc_index_zero_node;
3910 info->stride[dim] = gfc_index_one_node;
3911 }
3912 break;
3913
3914 default:
3915 break;
3916 }
3917 }
3918
3919 /* The rest is just runtime bound checking. */
3920 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3921 {
3922 stmtblock_t block;
3923 tree lbound, ubound;
3924 tree end;
3925 tree size[GFC_MAX_DIMENSIONS];
3926 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3927 gfc_array_info *info;
3928 char *msg;
3929 int dim;
3930
3931 gfc_start_block (&block);
3932
3933 for (n = 0; n < loop->dimen; n++)
3934 size[n] = NULL_TREE;
3935
3936 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3937 {
3938 stmtblock_t inner;
3939 gfc_ss_info *ss_info;
3940 gfc_expr *expr;
3941 locus *expr_loc;
3942 const char *expr_name;
3943
3944 ss_info = ss->info;
3945 if (ss_info->type != GFC_SS_SECTION)
3946 continue;
3947
3948 /* Catch allocatable lhs in f2003. */
3949 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3950 continue;
3951
3952 expr = ss_info->expr;
3953 expr_loc = &expr->where;
3954 expr_name = expr->symtree->name;
3955
3956 gfc_start_block (&inner);
3957
3958 /* TODO: range checking for mapped dimensions. */
3959 info = &ss_info->data.array;
3960
3961 /* This code only checks ranges. Elemental and vector
3962 dimensions are checked later. */
3963 for (n = 0; n < loop->dimen; n++)
3964 {
3965 bool check_upper;
3966
3967 dim = ss->dim[n];
3968 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3969 continue;
3970
3971 if (dim == info->ref->u.ar.dimen - 1
3972 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3973 check_upper = false;
3974 else
3975 check_upper = true;
3976
3977 /* Zero stride is not allowed. */
3978 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3979 info->stride[dim], gfc_index_zero_node);
3980 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3981 "of array '%s'", dim + 1, expr_name);
3982 gfc_trans_runtime_check (true, false, tmp, &inner,
3983 expr_loc, msg);
3984 free (msg);
3985
3986 desc = info->descriptor;
3987
3988 /* This is the run-time equivalent of resolve.c's
3989 check_dimension(). The logical is more readable there
3990 than it is here, with all the trees. */
3991 lbound = gfc_conv_array_lbound (desc, dim);
3992 end = info->end[dim];
3993 if (check_upper)
3994 ubound = gfc_conv_array_ubound (desc, dim);
3995 else
3996 ubound = NULL;
3997
3998 /* non_zerosized is true when the selected range is not
3999 empty. */
4000 stride_pos = fold_build2_loc (input_location, GT_EXPR,
4001 boolean_type_node, info->stride[dim],
4002 gfc_index_zero_node);
4003 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
4004 info->start[dim], end);
4005 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4006 boolean_type_node, stride_pos, tmp);
4007
4008 stride_neg = fold_build2_loc (input_location, LT_EXPR,
4009 boolean_type_node,
4010 info->stride[dim], gfc_index_zero_node);
4011 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4012 info->start[dim], end);
4013 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4014 boolean_type_node,
4015 stride_neg, tmp);
4016 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4017 boolean_type_node,
4018 stride_pos, stride_neg);
4019
4020 /* Check the start of the range against the lower and upper
4021 bounds of the array, if the range is not empty.
4022 If upper bound is present, include both bounds in the
4023 error message. */
4024 if (check_upper)
4025 {
4026 tmp = fold_build2_loc (input_location, LT_EXPR,
4027 boolean_type_node,
4028 info->start[dim], lbound);
4029 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4030 boolean_type_node,
4031 non_zerosized, tmp);
4032 tmp2 = fold_build2_loc (input_location, GT_EXPR,
4033 boolean_type_node,
4034 info->start[dim], ubound);
4035 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4036 boolean_type_node,
4037 non_zerosized, tmp2);
4038 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4039 "outside of expected range (%%ld:%%ld)",
4040 dim + 1, expr_name);
4041 gfc_trans_runtime_check (true, false, tmp, &inner,
4042 expr_loc, msg,
4043 fold_convert (long_integer_type_node, info->start[dim]),
4044 fold_convert (long_integer_type_node, lbound),
4045 fold_convert (long_integer_type_node, ubound));
4046 gfc_trans_runtime_check (true, false, tmp2, &inner,
4047 expr_loc, msg,
4048 fold_convert (long_integer_type_node, info->start[dim]),
4049 fold_convert (long_integer_type_node, lbound),
4050 fold_convert (long_integer_type_node, ubound));
4051 free (msg);
4052 }
4053 else
4054 {
4055 tmp = fold_build2_loc (input_location, LT_EXPR,
4056 boolean_type_node,
4057 info->start[dim], lbound);
4058 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4059 boolean_type_node, non_zerosized, tmp);
4060 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4061 "below lower bound of %%ld",
4062 dim + 1, expr_name);
4063 gfc_trans_runtime_check (true, false, tmp, &inner,
4064 expr_loc, msg,
4065 fold_convert (long_integer_type_node, info->start[dim]),
4066 fold_convert (long_integer_type_node, lbound));
4067 free (msg);
4068 }
4069
4070 /* Compute the last element of the range, which is not
4071 necessarily "end" (think 0:5:3, which doesn't contain 5)
4072 and check it against both lower and upper bounds. */
4073
4074 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4075 gfc_array_index_type, end,
4076 info->start[dim]);
4077 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
4078 gfc_array_index_type, tmp,
4079 info->stride[dim]);
4080 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4081 gfc_array_index_type, end, tmp);
4082 tmp2 = fold_build2_loc (input_location, LT_EXPR,
4083 boolean_type_node, tmp, lbound);
4084 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4085 boolean_type_node, non_zerosized, tmp2);
4086 if (check_upper)
4087 {
4088 tmp3 = fold_build2_loc (input_location, GT_EXPR,
4089 boolean_type_node, tmp, ubound);
4090 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4091 boolean_type_node, non_zerosized, tmp3);
4092 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4093 "outside of expected range (%%ld:%%ld)",
4094 dim + 1, expr_name);
4095 gfc_trans_runtime_check (true, false, tmp2, &inner,
4096 expr_loc, msg,
4097 fold_convert (long_integer_type_node, tmp),
4098 fold_convert (long_integer_type_node, ubound),
4099 fold_convert (long_integer_type_node, lbound));
4100 gfc_trans_runtime_check (true, false, tmp3, &inner,
4101 expr_loc, msg,
4102 fold_convert (long_integer_type_node, tmp),
4103 fold_convert (long_integer_type_node, ubound),
4104 fold_convert (long_integer_type_node, lbound));
4105 free (msg);
4106 }
4107 else
4108 {
4109 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4110 "below lower bound of %%ld",
4111 dim + 1, expr_name);
4112 gfc_trans_runtime_check (true, false, tmp2, &inner,
4113 expr_loc, msg,
4114 fold_convert (long_integer_type_node, tmp),
4115 fold_convert (long_integer_type_node, lbound));
4116 free (msg);
4117 }
4118
4119 /* Check the section sizes match. */
4120 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4121 gfc_array_index_type, end,
4122 info->start[dim]);
4123 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4124 gfc_array_index_type, tmp,
4125 info->stride[dim]);
4126 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4127 gfc_array_index_type,
4128 gfc_index_one_node, tmp);
4129 tmp = fold_build2_loc (input_location, MAX_EXPR,
4130 gfc_array_index_type, tmp,
4131 build_int_cst (gfc_array_index_type, 0));
4132 /* We remember the size of the first section, and check all the
4133 others against this. */
4134 if (size[n])
4135 {
4136 tmp3 = fold_build2_loc (input_location, NE_EXPR,
4137 boolean_type_node, tmp, size[n]);
4138 asprintf (&msg, "Array bound mismatch for dimension %d "
4139 "of array '%s' (%%ld/%%ld)",
4140 dim + 1, expr_name);
4141
4142 gfc_trans_runtime_check (true, false, tmp3, &inner,
4143 expr_loc, msg,
4144 fold_convert (long_integer_type_node, tmp),
4145 fold_convert (long_integer_type_node, size[n]));
4146
4147 free (msg);
4148 }
4149 else
4150 size[n] = gfc_evaluate_now (tmp, &inner);
4151 }
4152
4153 tmp = gfc_finish_block (&inner);
4154
4155 /* For optional arguments, only check bounds if the argument is
4156 present. */
4157 if (expr->symtree->n.sym->attr.optional
4158 || expr->symtree->n.sym->attr.not_always_present)
4159 tmp = build3_v (COND_EXPR,
4160 gfc_conv_expr_present (expr->symtree->n.sym),
4161 tmp, build_empty_stmt (input_location));
4162
4163 gfc_add_expr_to_block (&block, tmp);
4164
4165 }
4166
4167 tmp = gfc_finish_block (&block);
4168 gfc_add_expr_to_block (&outer_loop->pre, tmp);
4169 }
4170
4171 for (loop = loop->nested; loop; loop = loop->next)
4172 gfc_conv_ss_startstride (loop);
4173 }
4174
4175 /* Return true if both symbols could refer to the same data object. Does
4176 not take account of aliasing due to equivalence statements. */
4177
4178 static int
4179 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
4180 bool lsym_target, bool rsym_pointer, bool rsym_target)
4181 {
4182 /* Aliasing isn't possible if the symbols have different base types. */
4183 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
4184 return 0;
4185
4186 /* Pointers can point to other pointers and target objects. */
4187
4188 if ((lsym_pointer && (rsym_pointer || rsym_target))
4189 || (rsym_pointer && (lsym_pointer || lsym_target)))
4190 return 1;
4191
4192 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
4193 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
4194 checked above. */
4195 if (lsym_target && rsym_target
4196 && ((lsym->attr.dummy && !lsym->attr.contiguous
4197 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
4198 || (rsym->attr.dummy && !rsym->attr.contiguous
4199 && (!rsym->attr.dimension
4200 || rsym->as->type == AS_ASSUMED_SHAPE))))
4201 return 1;
4202
4203 return 0;
4204 }
4205
4206
4207 /* Return true if the two SS could be aliased, i.e. both point to the same data
4208 object. */
4209 /* TODO: resolve aliases based on frontend expressions. */
4210
4211 static int
4212 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
4213 {
4214 gfc_ref *lref;
4215 gfc_ref *rref;
4216 gfc_expr *lexpr, *rexpr;
4217 gfc_symbol *lsym;
4218 gfc_symbol *rsym;
4219 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
4220
4221 lexpr = lss->info->expr;
4222 rexpr = rss->info->expr;
4223
4224 lsym = lexpr->symtree->n.sym;
4225 rsym = rexpr->symtree->n.sym;
4226
4227 lsym_pointer = lsym->attr.pointer;
4228 lsym_target = lsym->attr.target;
4229 rsym_pointer = rsym->attr.pointer;
4230 rsym_target = rsym->attr.target;
4231
4232 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
4233 rsym_pointer, rsym_target))
4234 return 1;
4235
4236 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
4237 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
4238 return 0;
4239
4240 /* For derived types we must check all the component types. We can ignore
4241 array references as these will have the same base type as the previous
4242 component ref. */
4243 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
4244 {
4245 if (lref->type != REF_COMPONENT)
4246 continue;
4247
4248 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
4249 lsym_target = lsym_target || lref->u.c.sym->attr.target;
4250
4251 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
4252 rsym_pointer, rsym_target))
4253 return 1;
4254
4255 if ((lsym_pointer && (rsym_pointer || rsym_target))
4256 || (rsym_pointer && (lsym_pointer || lsym_target)))
4257 {
4258 if (gfc_compare_types (&lref->u.c.component->ts,
4259 &rsym->ts))
4260 return 1;
4261 }
4262
4263 for (rref = rexpr->ref; rref != rss->info->data.array.ref;
4264 rref = rref->next)
4265 {
4266 if (rref->type != REF_COMPONENT)
4267 continue;
4268
4269 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4270 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4271
4272 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
4273 lsym_pointer, lsym_target,
4274 rsym_pointer, rsym_target))
4275 return 1;
4276
4277 if ((lsym_pointer && (rsym_pointer || rsym_target))
4278 || (rsym_pointer && (lsym_pointer || lsym_target)))
4279 {
4280 if (gfc_compare_types (&lref->u.c.component->ts,
4281 &rref->u.c.sym->ts))
4282 return 1;
4283 if (gfc_compare_types (&lref->u.c.sym->ts,
4284 &rref->u.c.component->ts))
4285 return 1;
4286 if (gfc_compare_types (&lref->u.c.component->ts,
4287 &rref->u.c.component->ts))
4288 return 1;
4289 }
4290 }
4291 }
4292
4293 lsym_pointer = lsym->attr.pointer;
4294 lsym_target = lsym->attr.target;
4295 lsym_pointer = lsym->attr.pointer;
4296 lsym_target = lsym->attr.target;
4297
4298 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
4299 {
4300 if (rref->type != REF_COMPONENT)
4301 break;
4302
4303 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4304 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4305
4306 if (symbols_could_alias (rref->u.c.sym, lsym,
4307 lsym_pointer, lsym_target,
4308 rsym_pointer, rsym_target))
4309 return 1;
4310
4311 if ((lsym_pointer && (rsym_pointer || rsym_target))
4312 || (rsym_pointer && (lsym_pointer || lsym_target)))
4313 {
4314 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
4315 return 1;
4316 }
4317 }
4318
4319 return 0;
4320 }
4321
4322
4323 /* Resolve array data dependencies. Creates a temporary if required. */
4324 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
4325 dependency.c. */
4326
4327 void
4328 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
4329 gfc_ss * rss)
4330 {
4331 gfc_ss *ss;
4332 gfc_ref *lref;
4333 gfc_ref *rref;
4334 gfc_expr *dest_expr;
4335 gfc_expr *ss_expr;
4336 int nDepend = 0;
4337 int i, j;
4338
4339 loop->temp_ss = NULL;
4340 dest_expr = dest->info->expr;
4341
4342 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
4343 {
4344 ss_expr = ss->info->expr;
4345
4346 if (ss->info->type != GFC_SS_SECTION)
4347 {
4348 if (gfc_option.flag_realloc_lhs
4349 && dest_expr != ss_expr
4350 && gfc_is_reallocatable_lhs (dest_expr)
4351 && ss_expr->rank)
4352 nDepend = gfc_check_dependency (dest_expr, ss_expr, true);
4353
4354 continue;
4355 }
4356
4357 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
4358 {
4359 if (gfc_could_be_alias (dest, ss)
4360 || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
4361 {
4362 nDepend = 1;
4363 break;
4364 }
4365 }
4366 else
4367 {
4368 lref = dest_expr->ref;
4369 rref = ss_expr->ref;
4370
4371 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
4372
4373 if (nDepend == 1)
4374 break;
4375
4376 for (i = 0; i < dest->dimen; i++)
4377 for (j = 0; j < ss->dimen; j++)
4378 if (i != j
4379 && dest->dim[i] == ss->dim[j])
4380 {
4381 /* If we don't access array elements in the same order,
4382 there is a dependency. */
4383 nDepend = 1;
4384 goto temporary;
4385 }
4386 #if 0
4387 /* TODO : loop shifting. */
4388 if (nDepend == 1)
4389 {
4390 /* Mark the dimensions for LOOP SHIFTING */
4391 for (n = 0; n < loop->dimen; n++)
4392 {
4393 int dim = dest->data.info.dim[n];
4394
4395 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
4396 depends[n] = 2;
4397 else if (! gfc_is_same_range (&lref->u.ar,
4398 &rref->u.ar, dim, 0))
4399 depends[n] = 1;
4400 }
4401
4402 /* Put all the dimensions with dependencies in the
4403 innermost loops. */
4404 dim = 0;
4405 for (n = 0; n < loop->dimen; n++)
4406 {
4407 gcc_assert (loop->order[n] == n);
4408 if (depends[n])
4409 loop->order[dim++] = n;
4410 }
4411 for (n = 0; n < loop->dimen; n++)
4412 {
4413 if (! depends[n])
4414 loop->order[dim++] = n;
4415 }
4416
4417 gcc_assert (dim == loop->dimen);
4418 break;
4419 }
4420 #endif
4421 }
4422 }
4423
4424 temporary:
4425
4426 if (nDepend == 1)
4427 {
4428 tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
4429 if (GFC_ARRAY_TYPE_P (base_type)
4430 || GFC_DESCRIPTOR_TYPE_P (base_type))
4431 base_type = gfc_get_element_type (base_type);
4432 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
4433 loop->dimen);
4434 gfc_add_ss_to_loop (loop, loop->temp_ss);
4435 }
4436 else
4437 loop->temp_ss = NULL;
4438 }
4439
4440
4441 /* Browse through each array's information from the scalarizer and set the loop
4442 bounds according to the "best" one (per dimension), i.e. the one which
4443 provides the most information (constant bounds, shape, etc.). */
4444
4445 static void
4446 set_loop_bounds (gfc_loopinfo *loop)
4447 {
4448 int n, dim, spec_dim;
4449 gfc_array_info *info;
4450 gfc_array_info *specinfo;
4451 gfc_ss *ss;
4452 tree tmp;
4453 gfc_ss **loopspec;
4454 bool dynamic[GFC_MAX_DIMENSIONS];
4455 mpz_t *cshape;
4456 mpz_t i;
4457 bool nonoptional_arr;
4458
4459 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4460
4461 loopspec = loop->specloop;
4462
4463 mpz_init (i);
4464 for (n = 0; n < loop->dimen; n++)
4465 {
4466 loopspec[n] = NULL;
4467 dynamic[n] = false;
4468
4469 /* If there are both optional and nonoptional array arguments, scalarize
4470 over the nonoptional; otherwise, it does not matter as then all
4471 (optional) arrays have to be present per F2008, 125.2.12p3(6). */
4472
4473 nonoptional_arr = false;
4474
4475 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4476 if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP
4477 && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref)
4478 {
4479 nonoptional_arr = true;
4480 break;
4481 }
4482
4483 /* We use one SS term, and use that to determine the bounds of the
4484 loop for this dimension. We try to pick the simplest term. */
4485 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4486 {
4487 gfc_ss_type ss_type;
4488
4489 ss_type = ss->info->type;
4490 if (ss_type == GFC_SS_SCALAR
4491 || ss_type == GFC_SS_TEMP
4492 || ss_type == GFC_SS_REFERENCE
4493 || (ss->info->can_be_null_ref && nonoptional_arr))
4494 continue;
4495
4496 info = &ss->info->data.array;
4497 dim = ss->dim[n];
4498
4499 if (loopspec[n] != NULL)
4500 {
4501 specinfo = &loopspec[n]->info->data.array;
4502 spec_dim = loopspec[n]->dim[n];
4503 }
4504 else
4505 {
4506 /* Silence uninitialized warnings. */
4507 specinfo = NULL;
4508 spec_dim = 0;
4509 }
4510
4511 if (info->shape)
4512 {
4513 gcc_assert (info->shape[dim]);
4514 /* The frontend has worked out the size for us. */
4515 if (!loopspec[n]
4516 || !specinfo->shape
4517 || !integer_zerop (specinfo->start[spec_dim]))
4518 /* Prefer zero-based descriptors if possible. */
4519 loopspec[n] = ss;
4520 continue;
4521 }
4522
4523 if (ss_type == GFC_SS_CONSTRUCTOR)
4524 {
4525 gfc_constructor_base base;
4526 /* An unknown size constructor will always be rank one.
4527 Higher rank constructors will either have known shape,
4528 or still be wrapped in a call to reshape. */
4529 gcc_assert (loop->dimen == 1);
4530
4531 /* Always prefer to use the constructor bounds if the size
4532 can be determined at compile time. Prefer not to otherwise,
4533 since the general case involves realloc, and it's better to
4534 avoid that overhead if possible. */
4535 base = ss->info->expr->value.constructor;
4536 dynamic[n] = gfc_get_array_constructor_size (&i, base);
4537 if (!dynamic[n] || !loopspec[n])
4538 loopspec[n] = ss;
4539 continue;
4540 }
4541
4542 /* Avoid using an allocatable lhs in an assignment, since
4543 there might be a reallocation coming. */
4544 if (loopspec[n] && ss->is_alloc_lhs)
4545 continue;
4546
4547 if (!loopspec[n])
4548 loopspec[n] = ss;
4549 /* Criteria for choosing a loop specifier (most important first):
4550 doesn't need realloc
4551 stride of one
4552 known stride
4553 known lower bound
4554 known upper bound
4555 */
4556 else if (loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
4557 loopspec[n] = ss;
4558 else if (integer_onep (info->stride[dim])
4559 && !integer_onep (specinfo->stride[spec_dim]))
4560 loopspec[n] = ss;
4561 else if (INTEGER_CST_P (info->stride[dim])
4562 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
4563 loopspec[n] = ss;
4564 else if (INTEGER_CST_P (info->start[dim])
4565 && !INTEGER_CST_P (specinfo->start[spec_dim])
4566 && integer_onep (info->stride[dim])
4567 == integer_onep (specinfo->stride[spec_dim])
4568 && INTEGER_CST_P (info->stride[dim])
4569 == INTEGER_CST_P (specinfo->stride[spec_dim]))
4570 loopspec[n] = ss;
4571 /* We don't work out the upper bound.
4572 else if (INTEGER_CST_P (info->finish[n])
4573 && ! INTEGER_CST_P (specinfo->finish[n]))
4574 loopspec[n] = ss; */
4575 }
4576
4577 /* We should have found the scalarization loop specifier. If not,
4578 that's bad news. */
4579 gcc_assert (loopspec[n]);
4580
4581 info = &loopspec[n]->info->data.array;
4582 dim = loopspec[n]->dim[n];
4583
4584 /* Set the extents of this range. */
4585 cshape = info->shape;
4586 if (cshape && INTEGER_CST_P (info->start[dim])
4587 && INTEGER_CST_P (info->stride[dim]))
4588 {
4589 loop->from[n] = info->start[dim];
4590 mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]);
4591 mpz_sub_ui (i, i, 1);
4592 /* To = from + (size - 1) * stride. */
4593 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
4594 if (!integer_onep (info->stride[dim]))
4595 tmp = fold_build2_loc (input_location, MULT_EXPR,
4596 gfc_array_index_type, tmp,
4597 info->stride[dim]);
4598 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
4599 gfc_array_index_type,
4600 loop->from[n], tmp);
4601 }
4602 else
4603 {
4604 loop->from[n] = info->start[dim];
4605 switch (loopspec[n]->info->type)
4606 {
4607 case GFC_SS_CONSTRUCTOR:
4608 /* The upper bound is calculated when we expand the
4609 constructor. */
4610 gcc_assert (loop->to[n] == NULL_TREE);
4611 break;
4612
4613 case GFC_SS_SECTION:
4614 /* Use the end expression if it exists and is not constant,
4615 so that it is only evaluated once. */
4616 loop->to[n] = info->end[dim];
4617 break;
4618
4619 case GFC_SS_FUNCTION:
4620 /* The loop bound will be set when we generate the call. */
4621 gcc_assert (loop->to[n] == NULL_TREE);
4622 break;
4623
4624 case GFC_SS_INTRINSIC:
4625 {
4626 gfc_expr *expr = loopspec[n]->info->expr;
4627
4628 /* The {l,u}bound of an assumed rank. */
4629 gcc_assert ((expr->value.function.isym->id == GFC_ISYM_LBOUND
4630 || expr->value.function.isym->id == GFC_ISYM_UBOUND)
4631 && expr->value.function.actual->next->expr == NULL
4632 && expr->value.function.actual->expr->rank == -1);
4633
4634 loop->to[n] = info->end[dim];
4635 break;
4636 }
4637
4638 default:
4639 gcc_unreachable ();
4640 }
4641 }
4642
4643 /* Transform everything so we have a simple incrementing variable. */
4644 if (integer_onep (info->stride[dim]))
4645 info->delta[dim] = gfc_index_zero_node;
4646 else
4647 {
4648 /* Set the delta for this section. */
4649 info->delta[dim] = gfc_evaluate_now (loop->from[n], &outer_loop->pre);
4650 /* Number of iterations is (end - start + step) / step.
4651 with start = 0, this simplifies to
4652 last = end / step;
4653 for (i = 0; i<=last; i++){...}; */
4654 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4655 gfc_array_index_type, loop->to[n],
4656 loop->from[n]);
4657 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4658 gfc_array_index_type, tmp, info->stride[dim]);
4659 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4660 tmp, build_int_cst (gfc_array_index_type, -1));
4661 loop->to[n] = gfc_evaluate_now (tmp, &outer_loop->pre);
4662 /* Make the loop variable start at 0. */
4663 loop->from[n] = gfc_index_zero_node;
4664 }
4665 }
4666 mpz_clear (i);
4667
4668 for (loop = loop->nested; loop; loop = loop->next)
4669 set_loop_bounds (loop);
4670 }
4671
4672
4673 /* Initialize the scalarization loop. Creates the loop variables. Determines
4674 the range of the loop variables. Creates a temporary if required.
4675 Also generates code for scalar expressions which have been
4676 moved outside the loop. */
4677
4678 void
4679 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
4680 {
4681 gfc_ss *tmp_ss;
4682 tree tmp;
4683
4684 set_loop_bounds (loop);
4685
4686 /* Add all the scalar code that can be taken out of the loops.
4687 This may include calculating the loop bounds, so do it before
4688 allocating the temporary. */
4689 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4690
4691 tmp_ss = loop->temp_ss;
4692 /* If we want a temporary then create it. */
4693 if (tmp_ss != NULL)
4694 {
4695 gfc_ss_info *tmp_ss_info;
4696
4697 tmp_ss_info = tmp_ss->info;
4698 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
4699 gcc_assert (loop->parent == NULL);
4700
4701 /* Make absolutely sure that this is a complete type. */
4702 if (tmp_ss_info->string_length)
4703 tmp_ss_info->data.temp.type
4704 = gfc_get_character_type_len_for_eltype
4705 (TREE_TYPE (tmp_ss_info->data.temp.type),
4706 tmp_ss_info->string_length);
4707
4708 tmp = tmp_ss_info->data.temp.type;
4709 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
4710 tmp_ss_info->type = GFC_SS_SECTION;
4711
4712 gcc_assert (tmp_ss->dimen != 0);
4713
4714 gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp,
4715 NULL_TREE, false, true, false, where);
4716 }
4717
4718 /* For array parameters we don't have loop variables, so don't calculate the
4719 translations. */
4720 if (!loop->array_parameter)
4721 gfc_set_delta (loop);
4722 }
4723
4724
4725 /* Calculates how to transform from loop variables to array indices for each
4726 array: once loop bounds are chosen, sets the difference (DELTA field) between
4727 loop bounds and array reference bounds, for each array info. */
4728
4729 void
4730 gfc_set_delta (gfc_loopinfo *loop)
4731 {
4732 gfc_ss *ss, **loopspec;
4733 gfc_array_info *info;
4734 tree tmp;
4735 int n, dim;
4736
4737 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4738
4739 loopspec = loop->specloop;
4740
4741 /* Calculate the translation from loop variables to array indices. */
4742 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4743 {
4744 gfc_ss_type ss_type;
4745
4746 ss_type = ss->info->type;
4747 if (ss_type != GFC_SS_SECTION
4748 && ss_type != GFC_SS_COMPONENT
4749 && ss_type != GFC_SS_CONSTRUCTOR)
4750 continue;
4751
4752 info = &ss->info->data.array;
4753
4754 for (n = 0; n < ss->dimen; n++)
4755 {
4756 /* If we are specifying the range the delta is already set. */
4757 if (loopspec[n] != ss)
4758 {
4759 dim = ss->dim[n];
4760
4761 /* Calculate the offset relative to the loop variable.
4762 First multiply by the stride. */
4763 tmp = loop->from[n];
4764 if (!integer_onep (info->stride[dim]))
4765 tmp = fold_build2_loc (input_location, MULT_EXPR,
4766 gfc_array_index_type,
4767 tmp, info->stride[dim]);
4768
4769 /* Then subtract this from our starting value. */
4770 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4771 gfc_array_index_type,
4772 info->start[dim], tmp);
4773
4774 info->delta[dim] = gfc_evaluate_now (tmp, &outer_loop->pre);
4775 }
4776 }
4777 }
4778
4779 for (loop = loop->nested; loop; loop = loop->next)
4780 gfc_set_delta (loop);
4781 }
4782
4783
4784 /* Calculate the size of a given array dimension from the bounds. This
4785 is simply (ubound - lbound + 1) if this expression is positive
4786 or 0 if it is negative (pick either one if it is zero). Optionally
4787 (if or_expr is present) OR the (expression != 0) condition to it. */
4788
4789 tree
4790 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4791 {
4792 tree res;
4793 tree cond;
4794
4795 /* Calculate (ubound - lbound + 1). */
4796 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4797 ubound, lbound);
4798 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4799 gfc_index_one_node);
4800
4801 /* Check whether the size for this dimension is negative. */
4802 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4803 gfc_index_zero_node);
4804 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4805 gfc_index_zero_node, res);
4806
4807 /* Build OR expression. */
4808 if (or_expr)
4809 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4810 boolean_type_node, *or_expr, cond);
4811
4812 return res;
4813 }
4814
4815
4816 /* For an array descriptor, get the total number of elements. This is just
4817 the product of the extents along from_dim to to_dim. */
4818
4819 static tree
4820 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4821 {
4822 tree res;
4823 int dim;
4824
4825 res = gfc_index_one_node;
4826
4827 for (dim = from_dim; dim < to_dim; ++dim)
4828 {
4829 tree lbound;
4830 tree ubound;
4831 tree extent;
4832
4833 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4834 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4835
4836 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4837 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4838 res, extent);
4839 }
4840
4841 return res;
4842 }
4843
4844
4845 /* Full size of an array. */
4846
4847 tree
4848 gfc_conv_descriptor_size (tree desc, int rank)
4849 {
4850 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4851 }
4852
4853
4854 /* Size of a coarray for all dimensions but the last. */
4855
4856 tree
4857 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4858 {
4859 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4860 }
4861
4862
4863 /* Fills in an array descriptor, and returns the size of the array.
4864 The size will be a simple_val, ie a variable or a constant. Also
4865 calculates the offset of the base. The pointer argument overflow,
4866 which should be of integer type, will increase in value if overflow
4867 occurs during the size calculation. Returns the size of the array.
4868 {
4869 stride = 1;
4870 offset = 0;
4871 for (n = 0; n < rank; n++)
4872 {
4873 a.lbound[n] = specified_lower_bound;
4874 offset = offset + a.lbond[n] * stride;
4875 size = 1 - lbound;
4876 a.ubound[n] = specified_upper_bound;
4877 a.stride[n] = stride;
4878 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4879 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4880 stride = stride * size;
4881 }
4882 for (n = rank; n < rank+corank; n++)
4883 (Set lcobound/ucobound as above.)
4884 element_size = sizeof (array element);
4885 if (!rank)
4886 return element_size
4887 stride = (size_t) stride;
4888 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4889 stride = stride * element_size;
4890 return (stride);
4891 } */
4892 /*GCC ARRAYS*/
4893
4894 static tree
4895 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4896 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4897 stmtblock_t * descriptor_block, tree * overflow,
4898 tree expr3_elem_size, tree *nelems, gfc_expr *expr3,
4899 gfc_typespec *ts)
4900 {
4901 tree type;
4902 tree tmp;
4903 tree size;
4904 tree offset;
4905 tree stride;
4906 tree element_size;
4907 tree or_expr;
4908 tree thencase;
4909 tree elsecase;
4910 tree cond;
4911 tree var;
4912 stmtblock_t thenblock;
4913 stmtblock_t elseblock;
4914 gfc_expr *ubound;
4915 gfc_se se;
4916 int n;
4917
4918 type = TREE_TYPE (descriptor);
4919
4920 stride = gfc_index_one_node;
4921 offset = gfc_index_zero_node;
4922
4923 /* Set the dtype. */
4924 tmp = gfc_conv_descriptor_dtype (descriptor);
4925 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4926
4927 or_expr = boolean_false_node;
4928
4929 for (n = 0; n < rank; n++)
4930 {
4931 tree conv_lbound;
4932 tree conv_ubound;
4933
4934 /* We have 3 possibilities for determining the size of the array:
4935 lower == NULL => lbound = 1, ubound = upper[n]
4936 upper[n] = NULL => lbound = 1, ubound = lower[n]
4937 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4938 ubound = upper[n];
4939
4940 /* Set lower bound. */
4941 gfc_init_se (&se, NULL);
4942 if (lower == NULL)
4943 se.expr = gfc_index_one_node;
4944 else
4945 {
4946 gcc_assert (lower[n]);
4947 if (ubound)
4948 {
4949 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4950 gfc_add_block_to_block (pblock, &se.pre);
4951 }
4952 else
4953 {
4954 se.expr = gfc_index_one_node;
4955 ubound = lower[n];
4956 }
4957 }
4958 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4959 gfc_rank_cst[n], se.expr);
4960 conv_lbound = se.expr;
4961
4962 /* Work out the offset for this component. */
4963 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4964 se.expr, stride);
4965 offset = fold_build2_loc (input_location, MINUS_EXPR,
4966 gfc_array_index_type, offset, tmp);
4967
4968 /* Set upper bound. */
4969 gfc_init_se (&se, NULL);
4970 gcc_assert (ubound);
4971 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4972 gfc_add_block_to_block (pblock, &se.pre);
4973
4974 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4975 gfc_rank_cst[n], se.expr);
4976 conv_ubound = se.expr;
4977
4978 /* Store the stride. */
4979 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
4980 gfc_rank_cst[n], stride);
4981
4982 /* Calculate size and check whether extent is negative. */
4983 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4984 size = gfc_evaluate_now (size, pblock);
4985
4986 /* Check whether multiplying the stride by the number of
4987 elements in this dimension would overflow. We must also check
4988 whether the current dimension has zero size in order to avoid
4989 division by zero.
4990 */
4991 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4992 gfc_array_index_type,
4993 fold_convert (gfc_array_index_type,
4994 TYPE_MAX_VALUE (gfc_array_index_type)),
4995 size);
4996 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4997 boolean_type_node, tmp, stride),
4998 PRED_FORTRAN_OVERFLOW);
4999 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5000 integer_one_node, integer_zero_node);
5001 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5002 boolean_type_node, size,
5003 gfc_index_zero_node),
5004 PRED_FORTRAN_SIZE_ZERO);
5005 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5006 integer_zero_node, tmp);
5007 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5008 *overflow, tmp);
5009 *overflow = gfc_evaluate_now (tmp, pblock);
5010
5011 /* Multiply the stride by the number of elements in this dimension. */
5012 stride = fold_build2_loc (input_location, MULT_EXPR,
5013 gfc_array_index_type, stride, size);
5014 stride = gfc_evaluate_now (stride, pblock);
5015 }
5016
5017 for (n = rank; n < rank + corank; n++)
5018 {
5019 ubound = upper[n];
5020
5021 /* Set lower bound. */
5022 gfc_init_se (&se, NULL);
5023 if (lower == NULL || lower[n] == NULL)
5024 {
5025 gcc_assert (n == rank + corank - 1);
5026 se.expr = gfc_index_one_node;
5027 }
5028 else
5029 {
5030 if (ubound || n == rank + corank - 1)
5031 {
5032 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
5033 gfc_add_block_to_block (pblock, &se.pre);
5034 }
5035 else
5036 {
5037 se.expr = gfc_index_one_node;
5038 ubound = lower[n];
5039 }
5040 }
5041 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
5042 gfc_rank_cst[n], se.expr);
5043
5044 if (n < rank + corank - 1)
5045 {
5046 gfc_init_se (&se, NULL);
5047 gcc_assert (ubound);
5048 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
5049 gfc_add_block_to_block (pblock, &se.pre);
5050 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
5051 gfc_rank_cst[n], se.expr);
5052 }
5053 }
5054
5055 /* The stride is the number of elements in the array, so multiply by the
5056 size of an element to get the total size. Obviously, if there is a
5057 SOURCE expression (expr3) we must use its element size. */
5058 if (expr3_elem_size != NULL_TREE)
5059 tmp = expr3_elem_size;
5060 else if (expr3 != NULL)
5061 {
5062 if (expr3->ts.type == BT_CLASS)
5063 {
5064 gfc_se se_sz;
5065 gfc_expr *sz = gfc_copy_expr (expr3);
5066 gfc_add_vptr_component (sz);
5067 gfc_add_size_component (sz);
5068 gfc_init_se (&se_sz, NULL);
5069 gfc_conv_expr (&se_sz, sz);
5070 gfc_free_expr (sz);
5071 tmp = se_sz.expr;
5072 }
5073 else
5074 {
5075 tmp = gfc_typenode_for_spec (&expr3->ts);
5076 tmp = TYPE_SIZE_UNIT (tmp);
5077 }
5078 }
5079 else if (ts->type != BT_UNKNOWN && ts->type != BT_CHARACTER)
5080 /* FIXME: Properly handle characters. See PR 57456. */
5081 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (ts));
5082 else
5083 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5084
5085 /* Convert to size_t. */
5086 element_size = fold_convert (size_type_node, tmp);
5087
5088 if (rank == 0)
5089 return element_size;
5090
5091 *nelems = gfc_evaluate_now (stride, pblock);
5092 stride = fold_convert (size_type_node, stride);
5093
5094 /* First check for overflow. Since an array of type character can
5095 have zero element_size, we must check for that before
5096 dividing. */
5097 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5098 size_type_node,
5099 TYPE_MAX_VALUE (size_type_node), element_size);
5100 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5101 boolean_type_node, tmp, stride),
5102 PRED_FORTRAN_OVERFLOW);
5103 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5104 integer_one_node, integer_zero_node);
5105 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5106 boolean_type_node, element_size,
5107 build_int_cst (size_type_node, 0)),
5108 PRED_FORTRAN_SIZE_ZERO);
5109 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5110 integer_zero_node, tmp);
5111 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5112 *overflow, tmp);
5113 *overflow = gfc_evaluate_now (tmp, pblock);
5114
5115 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
5116 stride, element_size);
5117
5118 if (poffset != NULL)
5119 {
5120 offset = gfc_evaluate_now (offset, pblock);
5121 *poffset = offset;
5122 }
5123
5124 if (integer_zerop (or_expr))
5125 return size;
5126 if (integer_onep (or_expr))
5127 return build_int_cst (size_type_node, 0);
5128
5129 var = gfc_create_var (TREE_TYPE (size), "size");
5130 gfc_start_block (&thenblock);
5131 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
5132 thencase = gfc_finish_block (&thenblock);
5133
5134 gfc_start_block (&elseblock);
5135 gfc_add_modify (&elseblock, var, size);
5136 elsecase = gfc_finish_block (&elseblock);
5137
5138 tmp = gfc_evaluate_now (or_expr, pblock);
5139 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
5140 gfc_add_expr_to_block (pblock, tmp);
5141
5142 return var;
5143 }
5144
5145
5146 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
5147 the work for an ALLOCATE statement. */
5148 /*GCC ARRAYS*/
5149
5150 bool
5151 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
5152 tree errlen, tree label_finish, tree expr3_elem_size,
5153 tree *nelems, gfc_expr *expr3, gfc_typespec *ts)
5154 {
5155 tree tmp;
5156 tree pointer;
5157 tree offset = NULL_TREE;
5158 tree token = NULL_TREE;
5159 tree size;
5160 tree msg;
5161 tree error = NULL_TREE;
5162 tree overflow; /* Boolean storing whether size calculation overflows. */
5163 tree var_overflow = NULL_TREE;
5164 tree cond;
5165 tree set_descriptor;
5166 stmtblock_t set_descriptor_block;
5167 stmtblock_t elseblock;
5168 gfc_expr **lower;
5169 gfc_expr **upper;
5170 gfc_ref *ref, *prev_ref = NULL;
5171 bool allocatable, coarray, dimension;
5172
5173 ref = expr->ref;
5174
5175 /* Find the last reference in the chain. */
5176 while (ref && ref->next != NULL)
5177 {
5178 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
5179 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
5180 prev_ref = ref;
5181 ref = ref->next;
5182 }
5183
5184 if (ref == NULL || ref->type != REF_ARRAY)
5185 return false;
5186
5187 if (!prev_ref)
5188 {
5189 allocatable = expr->symtree->n.sym->attr.allocatable;
5190 coarray = expr->symtree->n.sym->attr.codimension;
5191 dimension = expr->symtree->n.sym->attr.dimension;
5192 }
5193 else
5194 {
5195 allocatable = prev_ref->u.c.component->attr.allocatable;
5196 coarray = prev_ref->u.c.component->attr.codimension;
5197 dimension = prev_ref->u.c.component->attr.dimension;
5198 }
5199
5200 if (!dimension)
5201 gcc_assert (coarray);
5202
5203 /* Figure out the size of the array. */
5204 switch (ref->u.ar.type)
5205 {
5206 case AR_ELEMENT:
5207 if (!coarray)
5208 {
5209 lower = NULL;
5210 upper = ref->u.ar.start;
5211 break;
5212 }
5213 /* Fall through. */
5214
5215 case AR_SECTION:
5216 lower = ref->u.ar.start;
5217 upper = ref->u.ar.end;
5218 break;
5219
5220 case AR_FULL:
5221 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
5222
5223 lower = ref->u.ar.as->lower;
5224 upper = ref->u.ar.as->upper;
5225 break;
5226
5227 default:
5228 gcc_unreachable ();
5229 break;
5230 }
5231
5232 overflow = integer_zero_node;
5233
5234 gfc_init_block (&set_descriptor_block);
5235 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
5236 ref->u.ar.as->corank, &offset, lower, upper,
5237 &se->pre, &set_descriptor_block, &overflow,
5238 expr3_elem_size, nelems, expr3, ts);
5239
5240 if (dimension)
5241 {
5242 var_overflow = gfc_create_var (integer_type_node, "overflow");
5243 gfc_add_modify (&se->pre, var_overflow, overflow);
5244
5245 if (status == NULL_TREE)
5246 {
5247 /* Generate the block of code handling overflow. */
5248 msg = gfc_build_addr_expr (pchar_type_node,
5249 gfc_build_localized_cstring_const
5250 ("Integer overflow when calculating the amount of "
5251 "memory to allocate"));
5252 error = build_call_expr_loc (input_location,
5253 gfor_fndecl_runtime_error, 1, msg);
5254 }
5255 else
5256 {
5257 tree status_type = TREE_TYPE (status);
5258 stmtblock_t set_status_block;
5259
5260 gfc_start_block (&set_status_block);
5261 gfc_add_modify (&set_status_block, status,
5262 build_int_cst (status_type, LIBERROR_ALLOCATION));
5263 error = gfc_finish_block (&set_status_block);
5264 }
5265 }
5266
5267 gfc_start_block (&elseblock);
5268
5269 /* Allocate memory to store the data. */
5270 if (POINTER_TYPE_P (TREE_TYPE (se->expr)))
5271 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
5272
5273 pointer = gfc_conv_descriptor_data_get (se->expr);
5274 STRIP_NOPS (pointer);
5275
5276 if (coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
5277 token = gfc_build_addr_expr (NULL_TREE,
5278 gfc_conv_descriptor_token (se->expr));
5279
5280 /* The allocatable variant takes the old pointer as first argument. */
5281 if (allocatable)
5282 gfc_allocate_allocatable (&elseblock, pointer, size, token,
5283 status, errmsg, errlen, label_finish, expr);
5284 else
5285 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
5286
5287 if (dimension)
5288 {
5289 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
5290 boolean_type_node, var_overflow, integer_zero_node),
5291 PRED_FORTRAN_OVERFLOW);
5292 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
5293 error, gfc_finish_block (&elseblock));
5294 }
5295 else
5296 tmp = gfc_finish_block (&elseblock);
5297
5298 gfc_add_expr_to_block (&se->pre, tmp);
5299
5300 /* Update the array descriptors. */
5301 if (dimension)
5302 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
5303
5304 set_descriptor = gfc_finish_block (&set_descriptor_block);
5305 if (status != NULL_TREE)
5306 {
5307 cond = fold_build2_loc (input_location, EQ_EXPR,
5308 boolean_type_node, status,
5309 build_int_cst (TREE_TYPE (status), 0));
5310 gfc_add_expr_to_block (&se->pre,
5311 fold_build3_loc (input_location, COND_EXPR, void_type_node,
5312 gfc_likely (cond, PRED_FORTRAN_FAIL_ALLOC),
5313 set_descriptor,
5314 build_empty_stmt (input_location)));
5315 }
5316 else
5317 gfc_add_expr_to_block (&se->pre, set_descriptor);
5318
5319 if ((expr->ts.type == BT_DERIVED)
5320 && expr->ts.u.derived->attr.alloc_comp)
5321 {
5322 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
5323 ref->u.ar.as->rank);
5324 gfc_add_expr_to_block (&se->pre, tmp);
5325 }
5326
5327 return true;
5328 }
5329
5330
5331 /* Deallocate an array variable. Also used when an allocated variable goes
5332 out of scope. */
5333 /*GCC ARRAYS*/
5334
5335 tree
5336 gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen,
5337 tree label_finish, gfc_expr* expr)
5338 {
5339 tree var;
5340 tree tmp;
5341 stmtblock_t block;
5342 bool coarray = gfc_is_coarray (expr);
5343
5344 gfc_start_block (&block);
5345
5346 /* Get a pointer to the data. */
5347 var = gfc_conv_descriptor_data_get (descriptor);
5348 STRIP_NOPS (var);
5349
5350 /* Parameter is the address of the data component. */
5351 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg,
5352 errlen, label_finish, false, expr, coarray);
5353 gfc_add_expr_to_block (&block, tmp);
5354
5355 /* Zero the data pointer; only for coarrays an error can occur and then
5356 the allocation status may not be changed. */
5357 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5358 var, build_int_cst (TREE_TYPE (var), 0));
5359 if (pstat != NULL_TREE && coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
5360 {
5361 tree cond;
5362 tree stat = build_fold_indirect_ref_loc (input_location, pstat);
5363
5364 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5365 stat, build_int_cst (TREE_TYPE (stat), 0));
5366 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5367 cond, tmp, build_empty_stmt (input_location));
5368 }
5369
5370 gfc_add_expr_to_block (&block, tmp);
5371
5372 return gfc_finish_block (&block);
5373 }
5374
5375
5376 /* Create an array constructor from an initialization expression.
5377 We assume the frontend already did any expansions and conversions. */
5378
5379 tree
5380 gfc_conv_array_initializer (tree type, gfc_expr * expr)
5381 {
5382 gfc_constructor *c;
5383 tree tmp;
5384 offset_int wtmp;
5385 gfc_se se;
5386 tree index, range;
5387 vec<constructor_elt, va_gc> *v = NULL;
5388
5389 if (expr->expr_type == EXPR_VARIABLE
5390 && expr->symtree->n.sym->attr.flavor == FL_PARAMETER
5391 && expr->symtree->n.sym->value)
5392 expr = expr->symtree->n.sym->value;
5393
5394 switch (expr->expr_type)
5395 {
5396 case EXPR_CONSTANT:
5397 case EXPR_STRUCTURE:
5398 /* A single scalar or derived type value. Create an array with all
5399 elements equal to that value. */
5400 gfc_init_se (&se, NULL);
5401
5402 if (expr->expr_type == EXPR_CONSTANT)
5403 gfc_conv_constant (&se, expr);
5404 else
5405 gfc_conv_structure (&se, expr, 1);
5406
5407 wtmp = wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
5408 gcc_assert (wtmp != 0);
5409 /* This will probably eat buckets of memory for large arrays. */
5410 while (wtmp != 0)
5411 {
5412 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
5413 wtmp -= 1;
5414 }
5415 break;
5416
5417 case EXPR_ARRAY:
5418 /* Create a vector of all the elements. */
5419 for (c = gfc_constructor_first (expr->value.constructor);
5420 c; c = gfc_constructor_next (c))
5421 {
5422 if (c->iterator)
5423 {
5424 /* Problems occur when we get something like
5425 integer :: a(lots) = (/(i, i=1, lots)/) */
5426 gfc_fatal_error ("The number of elements in the array constructor "
5427 "at %L requires an increase of the allowed %d "
5428 "upper limit. See -fmax-array-constructor "
5429 "option", &expr->where,
5430 gfc_option.flag_max_array_constructor);
5431 return NULL_TREE;
5432 }
5433 if (mpz_cmp_si (c->offset, 0) != 0)
5434 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5435 else
5436 index = NULL_TREE;
5437
5438 if (mpz_cmp_si (c->repeat, 1) > 0)
5439 {
5440 tree tmp1, tmp2;
5441 mpz_t maxval;
5442
5443 mpz_init (maxval);
5444 mpz_add (maxval, c->offset, c->repeat);
5445 mpz_sub_ui (maxval, maxval, 1);
5446 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5447 if (mpz_cmp_si (c->offset, 0) != 0)
5448 {
5449 mpz_add_ui (maxval, c->offset, 1);
5450 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5451 }
5452 else
5453 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5454
5455 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
5456 mpz_clear (maxval);
5457 }
5458 else
5459 range = NULL;
5460
5461 gfc_init_se (&se, NULL);
5462 switch (c->expr->expr_type)
5463 {
5464 case EXPR_CONSTANT:
5465 gfc_conv_constant (&se, c->expr);
5466 break;
5467
5468 case EXPR_STRUCTURE:
5469 gfc_conv_structure (&se, c->expr, 1);
5470 break;
5471
5472 default:
5473 /* Catch those occasional beasts that do not simplify
5474 for one reason or another, assuming that if they are
5475 standard defying the frontend will catch them. */
5476 gfc_conv_expr (&se, c->expr);
5477 break;
5478 }
5479
5480 if (range == NULL_TREE)
5481 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5482 else
5483 {
5484 if (index != NULL_TREE)
5485 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5486 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
5487 }
5488 }
5489 break;
5490
5491 case EXPR_NULL:
5492 return gfc_build_null_descriptor (type);
5493
5494 default:
5495 gcc_unreachable ();
5496 }
5497
5498 /* Create a constructor from the list of elements. */
5499 tmp = build_constructor (type, v);
5500 TREE_CONSTANT (tmp) = 1;
5501 return tmp;
5502 }
5503
5504
5505 /* Generate code to evaluate non-constant coarray cobounds. */
5506
5507 void
5508 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
5509 const gfc_symbol *sym)
5510 {
5511 int dim;
5512 tree ubound;
5513 tree lbound;
5514 gfc_se se;
5515 gfc_array_spec *as;
5516
5517 as = sym->as;
5518
5519 for (dim = as->rank; dim < as->rank + as->corank; dim++)
5520 {
5521 /* Evaluate non-constant array bound expressions. */
5522 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5523 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5524 {
5525 gfc_init_se (&se, NULL);
5526 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5527 gfc_add_block_to_block (pblock, &se.pre);
5528 gfc_add_modify (pblock, lbound, se.expr);
5529 }
5530 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5531 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5532 {
5533 gfc_init_se (&se, NULL);
5534 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5535 gfc_add_block_to_block (pblock, &se.pre);
5536 gfc_add_modify (pblock, ubound, se.expr);
5537 }
5538 }
5539 }
5540
5541
5542 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
5543 returns the size (in elements) of the array. */
5544
5545 static tree
5546 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
5547 stmtblock_t * pblock)
5548 {
5549 gfc_array_spec *as;
5550 tree size;
5551 tree stride;
5552 tree offset;
5553 tree ubound;
5554 tree lbound;
5555 tree tmp;
5556 gfc_se se;
5557
5558 int dim;
5559
5560 as = sym->as;
5561
5562 size = gfc_index_one_node;
5563 offset = gfc_index_zero_node;
5564 for (dim = 0; dim < as->rank; dim++)
5565 {
5566 /* Evaluate non-constant array bound expressions. */
5567 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5568 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5569 {
5570 gfc_init_se (&se, NULL);
5571 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5572 gfc_add_block_to_block (pblock, &se.pre);
5573 gfc_add_modify (pblock, lbound, se.expr);
5574 }
5575 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5576 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5577 {
5578 gfc_init_se (&se, NULL);
5579 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5580 gfc_add_block_to_block (pblock, &se.pre);
5581 gfc_add_modify (pblock, ubound, se.expr);
5582 }
5583 /* The offset of this dimension. offset = offset - lbound * stride. */
5584 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5585 lbound, size);
5586 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
5587 offset, tmp);
5588
5589 /* The size of this dimension, and the stride of the next. */
5590 if (dim + 1 < as->rank)
5591 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
5592 else
5593 stride = GFC_TYPE_ARRAY_SIZE (type);
5594
5595 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
5596 {
5597 /* Calculate stride = size * (ubound + 1 - lbound). */
5598 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5599 gfc_array_index_type,
5600 gfc_index_one_node, lbound);
5601 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5602 gfc_array_index_type, ubound, tmp);
5603 tmp = fold_build2_loc (input_location, MULT_EXPR,
5604 gfc_array_index_type, size, tmp);
5605 if (stride)
5606 gfc_add_modify (pblock, stride, tmp);
5607 else
5608 stride = gfc_evaluate_now (tmp, pblock);
5609
5610 /* Make sure that negative size arrays are translated
5611 to being zero size. */
5612 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
5613 stride, gfc_index_zero_node);
5614 tmp = fold_build3_loc (input_location, COND_EXPR,
5615 gfc_array_index_type, tmp,
5616 stride, gfc_index_zero_node);
5617 gfc_add_modify (pblock, stride, tmp);
5618 }
5619
5620 size = stride;
5621 }
5622
5623 gfc_trans_array_cobounds (type, pblock, sym);
5624 gfc_trans_vla_type_sizes (sym, pblock);
5625
5626 *poffset = offset;
5627 return size;
5628 }
5629
5630
5631 /* Generate code to initialize/allocate an array variable. */
5632
5633 void
5634 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
5635 gfc_wrapped_block * block)
5636 {
5637 stmtblock_t init;
5638 tree type;
5639 tree tmp = NULL_TREE;
5640 tree size;
5641 tree offset;
5642 tree space;
5643 tree inittree;
5644 bool onstack;
5645
5646 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
5647
5648 /* Do nothing for USEd variables. */
5649 if (sym->attr.use_assoc)
5650 return;
5651
5652 type = TREE_TYPE (decl);
5653 gcc_assert (GFC_ARRAY_TYPE_P (type));
5654 onstack = TREE_CODE (type) != POINTER_TYPE;
5655
5656 gfc_init_block (&init);
5657
5658 /* Evaluate character string length. */
5659 if (sym->ts.type == BT_CHARACTER
5660 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5661 {
5662 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5663
5664 gfc_trans_vla_type_sizes (sym, &init);
5665
5666 /* Emit a DECL_EXPR for this variable, which will cause the
5667 gimplifier to allocate storage, and all that good stuff. */
5668 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
5669 gfc_add_expr_to_block (&init, tmp);
5670 }
5671
5672 if (onstack)
5673 {
5674 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5675 return;
5676 }
5677
5678 type = TREE_TYPE (type);
5679
5680 gcc_assert (!sym->attr.use_assoc);
5681 gcc_assert (!TREE_STATIC (decl));
5682 gcc_assert (!sym->module);
5683
5684 if (sym->ts.type == BT_CHARACTER
5685 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5686 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5687
5688 size = gfc_trans_array_bounds (type, sym, &offset, &init);
5689
5690 /* Don't actually allocate space for Cray Pointees. */
5691 if (sym->attr.cray_pointee)
5692 {
5693 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5694 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5695
5696 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5697 return;
5698 }
5699
5700 if (gfc_option.flag_stack_arrays)
5701 {
5702 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
5703 space = build_decl (sym->declared_at.lb->location,
5704 VAR_DECL, create_tmp_var_name ("A"),
5705 TREE_TYPE (TREE_TYPE (decl)));
5706 gfc_trans_vla_type_sizes (sym, &init);
5707 }
5708 else
5709 {
5710 /* The size is the number of elements in the array, so multiply by the
5711 size of an element to get the total size. */
5712 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5713 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5714 size, fold_convert (gfc_array_index_type, tmp));
5715
5716 /* Allocate memory to hold the data. */
5717 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5718 gfc_add_modify (&init, decl, tmp);
5719
5720 /* Free the temporary. */
5721 tmp = gfc_call_free (convert (pvoid_type_node, decl));
5722 space = NULL_TREE;
5723 }
5724
5725 /* Set offset of the array. */
5726 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5727 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5728
5729 /* Automatic arrays should not have initializers. */
5730 gcc_assert (!sym->value);
5731
5732 inittree = gfc_finish_block (&init);
5733
5734 if (space)
5735 {
5736 tree addr;
5737 pushdecl (space);
5738
5739 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5740 where also space is located. */
5741 gfc_init_block (&init);
5742 tmp = fold_build1_loc (input_location, DECL_EXPR,
5743 TREE_TYPE (space), space);
5744 gfc_add_expr_to_block (&init, tmp);
5745 addr = fold_build1_loc (sym->declared_at.lb->location,
5746 ADDR_EXPR, TREE_TYPE (decl), space);
5747 gfc_add_modify (&init, decl, addr);
5748 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5749 tmp = NULL_TREE;
5750 }
5751 gfc_add_init_cleanup (block, inittree, tmp);
5752 }
5753
5754
5755 /* Generate entry and exit code for g77 calling convention arrays. */
5756
5757 void
5758 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5759 {
5760 tree parm;
5761 tree type;
5762 locus loc;
5763 tree offset;
5764 tree tmp;
5765 tree stmt;
5766 stmtblock_t init;
5767
5768 gfc_save_backend_locus (&loc);
5769 gfc_set_backend_locus (&sym->declared_at);
5770
5771 /* Descriptor type. */
5772 parm = sym->backend_decl;
5773 type = TREE_TYPE (parm);
5774 gcc_assert (GFC_ARRAY_TYPE_P (type));
5775
5776 gfc_start_block (&init);
5777
5778 if (sym->ts.type == BT_CHARACTER
5779 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5780 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5781
5782 /* Evaluate the bounds of the array. */
5783 gfc_trans_array_bounds (type, sym, &offset, &init);
5784
5785 /* Set the offset. */
5786 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5787 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5788
5789 /* Set the pointer itself if we aren't using the parameter directly. */
5790 if (TREE_CODE (parm) != PARM_DECL)
5791 {
5792 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5793 gfc_add_modify (&init, parm, tmp);
5794 }
5795 stmt = gfc_finish_block (&init);
5796
5797 gfc_restore_backend_locus (&loc);
5798
5799 /* Add the initialization code to the start of the function. */
5800
5801 if (sym->attr.optional || sym->attr.not_always_present)
5802 {
5803 tmp = gfc_conv_expr_present (sym);
5804 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5805 }
5806
5807 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5808 }
5809
5810
5811 /* Modify the descriptor of an array parameter so that it has the
5812 correct lower bound. Also move the upper bound accordingly.
5813 If the array is not packed, it will be copied into a temporary.
5814 For each dimension we set the new lower and upper bounds. Then we copy the
5815 stride and calculate the offset for this dimension. We also work out
5816 what the stride of a packed array would be, and see it the two match.
5817 If the array need repacking, we set the stride to the values we just
5818 calculated, recalculate the offset and copy the array data.
5819 Code is also added to copy the data back at the end of the function.
5820 */
5821
5822 void
5823 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5824 gfc_wrapped_block * block)
5825 {
5826 tree size;
5827 tree type;
5828 tree offset;
5829 locus loc;
5830 stmtblock_t init;
5831 tree stmtInit, stmtCleanup;
5832 tree lbound;
5833 tree ubound;
5834 tree dubound;
5835 tree dlbound;
5836 tree dumdesc;
5837 tree tmp;
5838 tree stride, stride2;
5839 tree stmt_packed;
5840 tree stmt_unpacked;
5841 tree partial;
5842 gfc_se se;
5843 int n;
5844 int checkparm;
5845 int no_repack;
5846 bool optional_arg;
5847
5848 /* Do nothing for pointer and allocatable arrays. */
5849 if (sym->attr.pointer || sym->attr.allocatable)
5850 return;
5851
5852 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
5853 {
5854 gfc_trans_g77_array (sym, block);
5855 return;
5856 }
5857
5858 gfc_save_backend_locus (&loc);
5859 gfc_set_backend_locus (&sym->declared_at);
5860
5861 /* Descriptor type. */
5862 type = TREE_TYPE (tmpdesc);
5863 gcc_assert (GFC_ARRAY_TYPE_P (type));
5864 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5865 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
5866 gfc_start_block (&init);
5867
5868 if (sym->ts.type == BT_CHARACTER
5869 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5870 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5871
5872 checkparm = (sym->as->type == AS_EXPLICIT
5873 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5874
5875 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5876 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5877
5878 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5879 {
5880 /* For non-constant shape arrays we only check if the first dimension
5881 is contiguous. Repacking higher dimensions wouldn't gain us
5882 anything as we still don't know the array stride. */
5883 partial = gfc_create_var (boolean_type_node, "partial");
5884 TREE_USED (partial) = 1;
5885 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5886 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5887 gfc_index_one_node);
5888 gfc_add_modify (&init, partial, tmp);
5889 }
5890 else
5891 partial = NULL_TREE;
5892
5893 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5894 here, however I think it does the right thing. */
5895 if (no_repack)
5896 {
5897 /* Set the first stride. */
5898 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5899 stride = gfc_evaluate_now (stride, &init);
5900
5901 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5902 stride, gfc_index_zero_node);
5903 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5904 tmp, gfc_index_one_node, stride);
5905 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5906 gfc_add_modify (&init, stride, tmp);
5907
5908 /* Allow the user to disable array repacking. */
5909 stmt_unpacked = NULL_TREE;
5910 }
5911 else
5912 {
5913 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5914 /* A library call to repack the array if necessary. */
5915 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5916 stmt_unpacked = build_call_expr_loc (input_location,
5917 gfor_fndecl_in_pack, 1, tmp);
5918
5919 stride = gfc_index_one_node;
5920
5921 if (gfc_option.warn_array_temp)
5922 gfc_warning ("Creating array temporary at %L", &loc);
5923 }
5924
5925 /* This is for the case where the array data is used directly without
5926 calling the repack function. */
5927 if (no_repack || partial != NULL_TREE)
5928 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5929 else
5930 stmt_packed = NULL_TREE;
5931
5932 /* Assign the data pointer. */
5933 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5934 {
5935 /* Don't repack unknown shape arrays when the first stride is 1. */
5936 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5937 partial, stmt_packed, stmt_unpacked);
5938 }
5939 else
5940 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5941 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5942
5943 offset = gfc_index_zero_node;
5944 size = gfc_index_one_node;
5945
5946 /* Evaluate the bounds of the array. */
5947 for (n = 0; n < sym->as->rank; n++)
5948 {
5949 if (checkparm || !sym->as->upper[n])
5950 {
5951 /* Get the bounds of the actual parameter. */
5952 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5953 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5954 }
5955 else
5956 {
5957 dubound = NULL_TREE;
5958 dlbound = NULL_TREE;
5959 }
5960
5961 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5962 if (!INTEGER_CST_P (lbound))
5963 {
5964 gfc_init_se (&se, NULL);
5965 gfc_conv_expr_type (&se, sym->as->lower[n],
5966 gfc_array_index_type);
5967 gfc_add_block_to_block (&init, &se.pre);
5968 gfc_add_modify (&init, lbound, se.expr);
5969 }
5970
5971 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5972 /* Set the desired upper bound. */
5973 if (sym->as->upper[n])
5974 {
5975 /* We know what we want the upper bound to be. */
5976 if (!INTEGER_CST_P (ubound))
5977 {
5978 gfc_init_se (&se, NULL);
5979 gfc_conv_expr_type (&se, sym->as->upper[n],
5980 gfc_array_index_type);
5981 gfc_add_block_to_block (&init, &se.pre);
5982 gfc_add_modify (&init, ubound, se.expr);
5983 }
5984
5985 /* Check the sizes match. */
5986 if (checkparm)
5987 {
5988 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5989 char * msg;
5990 tree temp;
5991
5992 temp = fold_build2_loc (input_location, MINUS_EXPR,
5993 gfc_array_index_type, ubound, lbound);
5994 temp = fold_build2_loc (input_location, PLUS_EXPR,
5995 gfc_array_index_type,
5996 gfc_index_one_node, temp);
5997 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5998 gfc_array_index_type, dubound,
5999 dlbound);
6000 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
6001 gfc_array_index_type,
6002 gfc_index_one_node, stride2);
6003 tmp = fold_build2_loc (input_location, NE_EXPR,
6004 gfc_array_index_type, temp, stride2);
6005 asprintf (&msg, "Dimension %d of array '%s' has extent "
6006 "%%ld instead of %%ld", n+1, sym->name);
6007
6008 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
6009 fold_convert (long_integer_type_node, temp),
6010 fold_convert (long_integer_type_node, stride2));
6011
6012 free (msg);
6013 }
6014 }
6015 else
6016 {
6017 /* For assumed shape arrays move the upper bound by the same amount
6018 as the lower bound. */
6019 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6020 gfc_array_index_type, dubound, dlbound);
6021 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6022 gfc_array_index_type, tmp, lbound);
6023 gfc_add_modify (&init, ubound, tmp);
6024 }
6025 /* The offset of this dimension. offset = offset - lbound * stride. */
6026 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6027 lbound, stride);
6028 offset = fold_build2_loc (input_location, MINUS_EXPR,
6029 gfc_array_index_type, offset, tmp);
6030
6031 /* The size of this dimension, and the stride of the next. */
6032 if (n + 1 < sym->as->rank)
6033 {
6034 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
6035
6036 if (no_repack || partial != NULL_TREE)
6037 stmt_unpacked =
6038 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
6039
6040 /* Figure out the stride if not a known constant. */
6041 if (!INTEGER_CST_P (stride))
6042 {
6043 if (no_repack)
6044 stmt_packed = NULL_TREE;
6045 else
6046 {
6047 /* Calculate stride = size * (ubound + 1 - lbound). */
6048 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6049 gfc_array_index_type,
6050 gfc_index_one_node, lbound);
6051 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6052 gfc_array_index_type, ubound, tmp);
6053 size = fold_build2_loc (input_location, MULT_EXPR,
6054 gfc_array_index_type, size, tmp);
6055 stmt_packed = size;
6056 }
6057
6058 /* Assign the stride. */
6059 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6060 tmp = fold_build3_loc (input_location, COND_EXPR,
6061 gfc_array_index_type, partial,
6062 stmt_unpacked, stmt_packed);
6063 else
6064 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
6065 gfc_add_modify (&init, stride, tmp);
6066 }
6067 }
6068 else
6069 {
6070 stride = GFC_TYPE_ARRAY_SIZE (type);
6071
6072 if (stride && !INTEGER_CST_P (stride))
6073 {
6074 /* Calculate size = stride * (ubound + 1 - lbound). */
6075 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6076 gfc_array_index_type,
6077 gfc_index_one_node, lbound);
6078 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6079 gfc_array_index_type,
6080 ubound, tmp);
6081 tmp = fold_build2_loc (input_location, MULT_EXPR,
6082 gfc_array_index_type,
6083 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
6084 gfc_add_modify (&init, stride, tmp);
6085 }
6086 }
6087 }
6088
6089 gfc_trans_array_cobounds (type, &init, sym);
6090
6091 /* Set the offset. */
6092 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
6093 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
6094
6095 gfc_trans_vla_type_sizes (sym, &init);
6096
6097 stmtInit = gfc_finish_block (&init);
6098
6099 /* Only do the entry/initialization code if the arg is present. */
6100 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6101 optional_arg = (sym->attr.optional
6102 || (sym->ns->proc_name->attr.entry_master
6103 && sym->attr.dummy));
6104 if (optional_arg)
6105 {
6106 tmp = gfc_conv_expr_present (sym);
6107 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
6108 build_empty_stmt (input_location));
6109 }
6110
6111 /* Cleanup code. */
6112 if (no_repack)
6113 stmtCleanup = NULL_TREE;
6114 else
6115 {
6116 stmtblock_t cleanup;
6117 gfc_start_block (&cleanup);
6118
6119 if (sym->attr.intent != INTENT_IN)
6120 {
6121 /* Copy the data back. */
6122 tmp = build_call_expr_loc (input_location,
6123 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
6124 gfc_add_expr_to_block (&cleanup, tmp);
6125 }
6126
6127 /* Free the temporary. */
6128 tmp = gfc_call_free (tmpdesc);
6129 gfc_add_expr_to_block (&cleanup, tmp);
6130
6131 stmtCleanup = gfc_finish_block (&cleanup);
6132
6133 /* Only do the cleanup if the array was repacked. */
6134 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
6135 tmp = gfc_conv_descriptor_data_get (tmp);
6136 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6137 tmp, tmpdesc);
6138 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6139 build_empty_stmt (input_location));
6140
6141 if (optional_arg)
6142 {
6143 tmp = gfc_conv_expr_present (sym);
6144 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6145 build_empty_stmt (input_location));
6146 }
6147 }
6148
6149 /* We don't need to free any memory allocated by internal_pack as it will
6150 be freed at the end of the function by pop_context. */
6151 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
6152
6153 gfc_restore_backend_locus (&loc);
6154 }
6155
6156
6157 /* Calculate the overall offset, including subreferences. */
6158 static void
6159 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
6160 bool subref, gfc_expr *expr)
6161 {
6162 tree tmp;
6163 tree field;
6164 tree stride;
6165 tree index;
6166 gfc_ref *ref;
6167 gfc_se start;
6168 int n;
6169
6170 /* If offset is NULL and this is not a subreferenced array, there is
6171 nothing to do. */
6172 if (offset == NULL_TREE)
6173 {
6174 if (subref)
6175 offset = gfc_index_zero_node;
6176 else
6177 return;
6178 }
6179
6180 tmp = build_array_ref (desc, offset, NULL);
6181
6182 /* Offset the data pointer for pointer assignments from arrays with
6183 subreferences; e.g. my_integer => my_type(:)%integer_component. */
6184 if (subref)
6185 {
6186 /* Go past the array reference. */
6187 for (ref = expr->ref; ref; ref = ref->next)
6188 if (ref->type == REF_ARRAY &&
6189 ref->u.ar.type != AR_ELEMENT)
6190 {
6191 ref = ref->next;
6192 break;
6193 }
6194
6195 /* Calculate the offset for each subsequent subreference. */
6196 for (; ref; ref = ref->next)
6197 {
6198 switch (ref->type)
6199 {
6200 case REF_COMPONENT:
6201 field = ref->u.c.component->backend_decl;
6202 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
6203 tmp = fold_build3_loc (input_location, COMPONENT_REF,
6204 TREE_TYPE (field),
6205 tmp, field, NULL_TREE);
6206 break;
6207
6208 case REF_SUBSTRING:
6209 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
6210 gfc_init_se (&start, NULL);
6211 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
6212 gfc_add_block_to_block (block, &start.pre);
6213 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
6214 break;
6215
6216 case REF_ARRAY:
6217 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
6218 && ref->u.ar.type == AR_ELEMENT);
6219
6220 /* TODO - Add bounds checking. */
6221 stride = gfc_index_one_node;
6222 index = gfc_index_zero_node;
6223 for (n = 0; n < ref->u.ar.dimen; n++)
6224 {
6225 tree itmp;
6226 tree jtmp;
6227
6228 /* Update the index. */
6229 gfc_init_se (&start, NULL);
6230 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
6231 itmp = gfc_evaluate_now (start.expr, block);
6232 gfc_init_se (&start, NULL);
6233 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
6234 jtmp = gfc_evaluate_now (start.expr, block);
6235 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6236 gfc_array_index_type, itmp, jtmp);
6237 itmp = fold_build2_loc (input_location, MULT_EXPR,
6238 gfc_array_index_type, itmp, stride);
6239 index = fold_build2_loc (input_location, PLUS_EXPR,
6240 gfc_array_index_type, itmp, index);
6241 index = gfc_evaluate_now (index, block);
6242
6243 /* Update the stride. */
6244 gfc_init_se (&start, NULL);
6245 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
6246 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6247 gfc_array_index_type, start.expr,
6248 jtmp);
6249 itmp = fold_build2_loc (input_location, PLUS_EXPR,
6250 gfc_array_index_type,
6251 gfc_index_one_node, itmp);
6252 stride = fold_build2_loc (input_location, MULT_EXPR,
6253 gfc_array_index_type, stride, itmp);
6254 stride = gfc_evaluate_now (stride, block);
6255 }
6256
6257 /* Apply the index to obtain the array element. */
6258 tmp = gfc_build_array_ref (tmp, index, NULL);
6259 break;
6260
6261 default:
6262 gcc_unreachable ();
6263 break;
6264 }
6265 }
6266 }
6267
6268 /* Set the target data pointer. */
6269 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
6270 gfc_conv_descriptor_data_set (block, parm, offset);
6271 }
6272
6273
6274 /* gfc_conv_expr_descriptor needs the string length an expression
6275 so that the size of the temporary can be obtained. This is done
6276 by adding up the string lengths of all the elements in the
6277 expression. Function with non-constant expressions have their
6278 string lengths mapped onto the actual arguments using the
6279 interface mapping machinery in trans-expr.c. */
6280 static void
6281 get_array_charlen (gfc_expr *expr, gfc_se *se)
6282 {
6283 gfc_interface_mapping mapping;
6284 gfc_formal_arglist *formal;
6285 gfc_actual_arglist *arg;
6286 gfc_se tse;
6287
6288 if (expr->ts.u.cl->length
6289 && gfc_is_constant_expr (expr->ts.u.cl->length))
6290 {
6291 if (!expr->ts.u.cl->backend_decl)
6292 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6293 return;
6294 }
6295
6296 switch (expr->expr_type)
6297 {
6298 case EXPR_OP:
6299 get_array_charlen (expr->value.op.op1, se);
6300
6301 /* For parentheses the expression ts.u.cl is identical. */
6302 if (expr->value.op.op == INTRINSIC_PARENTHESES)
6303 return;
6304
6305 expr->ts.u.cl->backend_decl =
6306 gfc_create_var (gfc_charlen_type_node, "sln");
6307
6308 if (expr->value.op.op2)
6309 {
6310 get_array_charlen (expr->value.op.op2, se);
6311
6312 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
6313
6314 /* Add the string lengths and assign them to the expression
6315 string length backend declaration. */
6316 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6317 fold_build2_loc (input_location, PLUS_EXPR,
6318 gfc_charlen_type_node,
6319 expr->value.op.op1->ts.u.cl->backend_decl,
6320 expr->value.op.op2->ts.u.cl->backend_decl));
6321 }
6322 else
6323 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6324 expr->value.op.op1->ts.u.cl->backend_decl);
6325 break;
6326
6327 case EXPR_FUNCTION:
6328 if (expr->value.function.esym == NULL
6329 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
6330 {
6331 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6332 break;
6333 }
6334
6335 /* Map expressions involving the dummy arguments onto the actual
6336 argument expressions. */
6337 gfc_init_interface_mapping (&mapping);
6338 formal = gfc_sym_get_dummy_args (expr->symtree->n.sym);
6339 arg = expr->value.function.actual;
6340
6341 /* Set se = NULL in the calls to the interface mapping, to suppress any
6342 backend stuff. */
6343 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
6344 {
6345 if (!arg->expr)
6346 continue;
6347 if (formal->sym)
6348 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
6349 }
6350
6351 gfc_init_se (&tse, NULL);
6352
6353 /* Build the expression for the character length and convert it. */
6354 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
6355
6356 gfc_add_block_to_block (&se->pre, &tse.pre);
6357 gfc_add_block_to_block (&se->post, &tse.post);
6358 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
6359 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
6360 gfc_charlen_type_node, tse.expr,
6361 build_int_cst (gfc_charlen_type_node, 0));
6362 expr->ts.u.cl->backend_decl = tse.expr;
6363 gfc_free_interface_mapping (&mapping);
6364 break;
6365
6366 default:
6367 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6368 break;
6369 }
6370 }
6371
6372
6373 /* Helper function to check dimensions. */
6374 static bool
6375 transposed_dims (gfc_ss *ss)
6376 {
6377 int n;
6378
6379 for (n = 0; n < ss->dimen; n++)
6380 if (ss->dim[n] != n)
6381 return true;
6382 return false;
6383 }
6384
6385
6386 /* Convert the last ref of a scalar coarray from an AR_ELEMENT to an
6387 AR_FULL, suitable for the scalarizer. */
6388
6389 static gfc_ss *
6390 walk_coarray (gfc_expr *e)
6391 {
6392 gfc_ss *ss;
6393
6394 gcc_assert (gfc_get_corank (e) > 0);
6395
6396 ss = gfc_walk_expr (e);
6397
6398 /* Fix scalar coarray. */
6399 if (ss == gfc_ss_terminator)
6400 {
6401 gfc_ref *ref;
6402
6403 ref = e->ref;
6404 while (ref)
6405 {
6406 if (ref->type == REF_ARRAY
6407 && ref->u.ar.codimen > 0)
6408 break;
6409
6410 ref = ref->next;
6411 }
6412
6413 gcc_assert (ref != NULL);
6414 if (ref->u.ar.type == AR_ELEMENT)
6415 ref->u.ar.type = AR_SECTION;
6416 ss = gfc_reverse_ss (gfc_walk_array_ref (ss, e, ref));
6417 }
6418
6419 return ss;
6420 }
6421
6422
6423 /* Convert an array for passing as an actual argument. Expressions and
6424 vector subscripts are evaluated and stored in a temporary, which is then
6425 passed. For whole arrays the descriptor is passed. For array sections
6426 a modified copy of the descriptor is passed, but using the original data.
6427
6428 This function is also used for array pointer assignments, and there
6429 are three cases:
6430
6431 - se->want_pointer && !se->direct_byref
6432 EXPR is an actual argument. On exit, se->expr contains a
6433 pointer to the array descriptor.
6434
6435 - !se->want_pointer && !se->direct_byref
6436 EXPR is an actual argument to an intrinsic function or the
6437 left-hand side of a pointer assignment. On exit, se->expr
6438 contains the descriptor for EXPR.
6439
6440 - !se->want_pointer && se->direct_byref
6441 EXPR is the right-hand side of a pointer assignment and
6442 se->expr is the descriptor for the previously-evaluated
6443 left-hand side. The function creates an assignment from
6444 EXPR to se->expr.
6445
6446
6447 The se->force_tmp flag disables the non-copying descriptor optimization
6448 that is used for transpose. It may be used in cases where there is an
6449 alias between the transpose argument and another argument in the same
6450 function call. */
6451
6452 void
6453 gfc_conv_expr_descriptor (gfc_se *se, gfc_expr *expr)
6454 {
6455 gfc_ss *ss;
6456 gfc_ss_type ss_type;
6457 gfc_ss_info *ss_info;
6458 gfc_loopinfo loop;
6459 gfc_array_info *info;
6460 int need_tmp;
6461 int n;
6462 tree tmp;
6463 tree desc;
6464 stmtblock_t block;
6465 tree start;
6466 tree offset;
6467 int full;
6468 bool subref_array_target = false;
6469 gfc_expr *arg, *ss_expr;
6470
6471 if (se->want_coarray)
6472 ss = walk_coarray (expr);
6473 else
6474 ss = gfc_walk_expr (expr);
6475
6476 gcc_assert (ss != NULL);
6477 gcc_assert (ss != gfc_ss_terminator);
6478
6479 ss_info = ss->info;
6480 ss_type = ss_info->type;
6481 ss_expr = ss_info->expr;
6482
6483 /* Special case: TRANSPOSE which needs no temporary. */
6484 while (expr->expr_type == EXPR_FUNCTION && expr->value.function.isym
6485 && NULL != (arg = gfc_get_noncopying_intrinsic_argument (expr)))
6486 {
6487 /* This is a call to transpose which has already been handled by the
6488 scalarizer, so that we just need to get its argument's descriptor. */
6489 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
6490 expr = expr->value.function.actual->expr;
6491 }
6492
6493 /* Special case things we know we can pass easily. */
6494 switch (expr->expr_type)
6495 {
6496 case EXPR_VARIABLE:
6497 /* If we have a linear array section, we can pass it directly.
6498 Otherwise we need to copy it into a temporary. */
6499
6500 gcc_assert (ss_type == GFC_SS_SECTION);
6501 gcc_assert (ss_expr == expr);
6502 info = &ss_info->data.array;
6503
6504 /* Get the descriptor for the array. */
6505 gfc_conv_ss_descriptor (&se->pre, ss, 0);
6506 desc = info->descriptor;
6507
6508 subref_array_target = se->direct_byref && is_subref_array (expr);
6509 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
6510 && !subref_array_target;
6511
6512 if (se->force_tmp)
6513 need_tmp = 1;
6514
6515 if (need_tmp)
6516 full = 0;
6517 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6518 {
6519 /* Create a new descriptor if the array doesn't have one. */
6520 full = 0;
6521 }
6522 else if (info->ref->u.ar.type == AR_FULL || se->descriptor_only)
6523 full = 1;
6524 else if (se->direct_byref)
6525 full = 0;
6526 else
6527 full = gfc_full_array_ref_p (info->ref, NULL);
6528
6529 if (full && !transposed_dims (ss))
6530 {
6531 if (se->direct_byref && !se->byref_noassign)
6532 {
6533 /* Copy the descriptor for pointer assignments. */
6534 gfc_add_modify (&se->pre, se->expr, desc);
6535
6536 /* Add any offsets from subreferences. */
6537 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
6538 subref_array_target, expr);
6539 }
6540 else if (se->want_pointer)
6541 {
6542 /* We pass full arrays directly. This means that pointers and
6543 allocatable arrays should also work. */
6544 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6545 }
6546 else
6547 {
6548 se->expr = desc;
6549 }
6550
6551 if (expr->ts.type == BT_CHARACTER)
6552 se->string_length = gfc_get_expr_charlen (expr);
6553
6554 gfc_free_ss_chain (ss);
6555 return;
6556 }
6557 break;
6558
6559 case EXPR_FUNCTION:
6560 /* A transformational function return value will be a temporary
6561 array descriptor. We still need to go through the scalarizer
6562 to create the descriptor. Elemental functions are handled as
6563 arbitrary expressions, i.e. copy to a temporary. */
6564
6565 if (se->direct_byref)
6566 {
6567 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
6568
6569 /* For pointer assignments pass the descriptor directly. */
6570 if (se->ss == NULL)
6571 se->ss = ss;
6572 else
6573 gcc_assert (se->ss == ss);
6574 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6575 gfc_conv_expr (se, expr);
6576 gfc_free_ss_chain (ss);
6577 return;
6578 }
6579
6580 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
6581 {
6582 if (ss_expr != expr)
6583 /* Elemental function. */
6584 gcc_assert ((expr->value.function.esym != NULL
6585 && expr->value.function.esym->attr.elemental)
6586 || (expr->value.function.isym != NULL
6587 && expr->value.function.isym->elemental)
6588 || gfc_inline_intrinsic_function_p (expr));
6589 else
6590 gcc_assert (ss_type == GFC_SS_INTRINSIC);
6591
6592 need_tmp = 1;
6593 if (expr->ts.type == BT_CHARACTER
6594 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
6595 get_array_charlen (expr, se);
6596
6597 info = NULL;
6598 }
6599 else
6600 {
6601 /* Transformational function. */
6602 info = &ss_info->data.array;
6603 need_tmp = 0;
6604 }
6605 break;
6606
6607 case EXPR_ARRAY:
6608 /* Constant array constructors don't need a temporary. */
6609 if (ss_type == GFC_SS_CONSTRUCTOR
6610 && expr->ts.type != BT_CHARACTER
6611 && gfc_constant_array_constructor_p (expr->value.constructor))
6612 {
6613 need_tmp = 0;
6614 info = &ss_info->data.array;
6615 }
6616 else
6617 {
6618 need_tmp = 1;
6619 info = NULL;
6620 }
6621 break;
6622
6623 default:
6624 /* Something complicated. Copy it into a temporary. */
6625 need_tmp = 1;
6626 info = NULL;
6627 break;
6628 }
6629
6630 /* If we are creating a temporary, we don't need to bother about aliases
6631 anymore. */
6632 if (need_tmp)
6633 se->force_tmp = 0;
6634
6635 gfc_init_loopinfo (&loop);
6636
6637 /* Associate the SS with the loop. */
6638 gfc_add_ss_to_loop (&loop, ss);
6639
6640 /* Tell the scalarizer not to bother creating loop variables, etc. */
6641 if (!need_tmp)
6642 loop.array_parameter = 1;
6643 else
6644 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6645 gcc_assert (!se->direct_byref);
6646
6647 /* Setup the scalarizing loops and bounds. */
6648 gfc_conv_ss_startstride (&loop);
6649
6650 if (need_tmp)
6651 {
6652 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
6653 get_array_charlen (expr, se);
6654
6655 /* Tell the scalarizer to make a temporary. */
6656 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
6657 ((expr->ts.type == BT_CHARACTER)
6658 ? expr->ts.u.cl->backend_decl
6659 : NULL),
6660 loop.dimen);
6661
6662 se->string_length = loop.temp_ss->info->string_length;
6663 gcc_assert (loop.temp_ss->dimen == loop.dimen);
6664 gfc_add_ss_to_loop (&loop, loop.temp_ss);
6665 }
6666
6667 gfc_conv_loop_setup (&loop, & expr->where);
6668
6669 if (need_tmp)
6670 {
6671 /* Copy into a temporary and pass that. We don't need to copy the data
6672 back because expressions and vector subscripts must be INTENT_IN. */
6673 /* TODO: Optimize passing function return values. */
6674 gfc_se lse;
6675 gfc_se rse;
6676
6677 /* Start the copying loops. */
6678 gfc_mark_ss_chain_used (loop.temp_ss, 1);
6679 gfc_mark_ss_chain_used (ss, 1);
6680 gfc_start_scalarized_body (&loop, &block);
6681
6682 /* Copy each data element. */
6683 gfc_init_se (&lse, NULL);
6684 gfc_copy_loopinfo_to_se (&lse, &loop);
6685 gfc_init_se (&rse, NULL);
6686 gfc_copy_loopinfo_to_se (&rse, &loop);
6687
6688 lse.ss = loop.temp_ss;
6689 rse.ss = ss;
6690
6691 gfc_conv_scalarized_array_ref (&lse, NULL);
6692 if (expr->ts.type == BT_CHARACTER)
6693 {
6694 gfc_conv_expr (&rse, expr);
6695 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
6696 rse.expr = build_fold_indirect_ref_loc (input_location,
6697 rse.expr);
6698 }
6699 else
6700 gfc_conv_expr_val (&rse, expr);
6701
6702 gfc_add_block_to_block (&block, &rse.pre);
6703 gfc_add_block_to_block (&block, &lse.pre);
6704
6705 lse.string_length = rse.string_length;
6706 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
6707 expr->expr_type == EXPR_VARIABLE
6708 || expr->expr_type == EXPR_ARRAY, true);
6709 gfc_add_expr_to_block (&block, tmp);
6710
6711 /* Finish the copying loops. */
6712 gfc_trans_scalarizing_loops (&loop, &block);
6713
6714 desc = loop.temp_ss->info->data.array.descriptor;
6715 }
6716 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
6717 {
6718 desc = info->descriptor;
6719 se->string_length = ss_info->string_length;
6720 }
6721 else
6722 {
6723 /* We pass sections without copying to a temporary. Make a new
6724 descriptor and point it at the section we want. The loop variable
6725 limits will be the limits of the section.
6726 A function may decide to repack the array to speed up access, but
6727 we're not bothered about that here. */
6728 int dim, ndim, codim;
6729 tree parm;
6730 tree parmtype;
6731 tree stride;
6732 tree from;
6733 tree to;
6734 tree base;
6735
6736 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
6737
6738 if (se->want_coarray)
6739 {
6740 gfc_array_ref *ar = &info->ref->u.ar;
6741
6742 codim = gfc_get_corank (expr);
6743 for (n = 0; n < codim - 1; n++)
6744 {
6745 /* Make sure we are not lost somehow. */
6746 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
6747
6748 /* Make sure the call to gfc_conv_section_startstride won't
6749 generate unnecessary code to calculate stride. */
6750 gcc_assert (ar->stride[n + ndim] == NULL);
6751
6752 gfc_conv_section_startstride (&loop.pre, ss, n + ndim);
6753 loop.from[n + loop.dimen] = info->start[n + ndim];
6754 loop.to[n + loop.dimen] = info->end[n + ndim];
6755 }
6756
6757 gcc_assert (n == codim - 1);
6758 evaluate_bound (&loop.pre, info->start, ar->start,
6759 info->descriptor, n + ndim, true);
6760 loop.from[n + loop.dimen] = info->start[n + ndim];
6761 }
6762 else
6763 codim = 0;
6764
6765 /* Set the string_length for a character array. */
6766 if (expr->ts.type == BT_CHARACTER)
6767 se->string_length = gfc_get_expr_charlen (expr);
6768
6769 desc = info->descriptor;
6770 if (se->direct_byref && !se->byref_noassign)
6771 {
6772 /* For pointer assignments we fill in the destination. */
6773 parm = se->expr;
6774 parmtype = TREE_TYPE (parm);
6775 }
6776 else
6777 {
6778 /* Otherwise make a new one. */
6779 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6780 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6781 loop.from, loop.to, 0,
6782 GFC_ARRAY_UNKNOWN, false);
6783 parm = gfc_create_var (parmtype, "parm");
6784 }
6785
6786 offset = gfc_index_zero_node;
6787
6788 /* The following can be somewhat confusing. We have two
6789 descriptors, a new one and the original array.
6790 {parm, parmtype, dim} refer to the new one.
6791 {desc, type, n, loop} refer to the original, which maybe
6792 a descriptorless array.
6793 The bounds of the scalarization are the bounds of the section.
6794 We don't have to worry about numeric overflows when calculating
6795 the offsets because all elements are within the array data. */
6796
6797 /* Set the dtype. */
6798 tmp = gfc_conv_descriptor_dtype (parm);
6799 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6800
6801 /* Set offset for assignments to pointer only to zero if it is not
6802 the full array. */
6803 if ((se->direct_byref || se->use_offset)
6804 && ((info->ref && info->ref->u.ar.type != AR_FULL)
6805 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
6806 base = gfc_index_zero_node;
6807 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6808 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
6809 else
6810 base = NULL_TREE;
6811
6812 for (n = 0; n < ndim; n++)
6813 {
6814 stride = gfc_conv_array_stride (desc, n);
6815
6816 /* Work out the offset. */
6817 if (info->ref
6818 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6819 {
6820 gcc_assert (info->subscript[n]
6821 && info->subscript[n]->info->type == GFC_SS_SCALAR);
6822 start = info->subscript[n]->info->data.scalar.value;
6823 }
6824 else
6825 {
6826 /* Evaluate and remember the start of the section. */
6827 start = info->start[n];
6828 stride = gfc_evaluate_now (stride, &loop.pre);
6829 }
6830
6831 tmp = gfc_conv_array_lbound (desc, n);
6832 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
6833 start, tmp);
6834 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
6835 tmp, stride);
6836 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
6837 offset, tmp);
6838
6839 if (info->ref
6840 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6841 {
6842 /* For elemental dimensions, we only need the offset. */
6843 continue;
6844 }
6845
6846 /* Vector subscripts need copying and are handled elsewhere. */
6847 if (info->ref)
6848 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
6849
6850 /* look for the corresponding scalarizer dimension: dim. */
6851 for (dim = 0; dim < ndim; dim++)
6852 if (ss->dim[dim] == n)
6853 break;
6854
6855 /* loop exited early: the DIM being looked for has been found. */
6856 gcc_assert (dim < ndim);
6857
6858 /* Set the new lower bound. */
6859 from = loop.from[dim];
6860 to = loop.to[dim];
6861
6862 /* If we have an array section or are assigning make sure that
6863 the lower bound is 1. References to the full
6864 array should otherwise keep the original bounds. */
6865 if ((!info->ref
6866 || info->ref->u.ar.type != AR_FULL)
6867 && !integer_onep (from))
6868 {
6869 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6870 gfc_array_index_type, gfc_index_one_node,
6871 from);
6872 to = fold_build2_loc (input_location, PLUS_EXPR,
6873 gfc_array_index_type, to, tmp);
6874 from = gfc_index_one_node;
6875 }
6876 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6877 gfc_rank_cst[dim], from);
6878
6879 /* Set the new upper bound. */
6880 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6881 gfc_rank_cst[dim], to);
6882
6883 /* Multiply the stride by the section stride to get the
6884 total stride. */
6885 stride = fold_build2_loc (input_location, MULT_EXPR,
6886 gfc_array_index_type,
6887 stride, info->stride[n]);
6888
6889 if (se->direct_byref
6890 && ((info->ref && info->ref->u.ar.type != AR_FULL)
6891 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
6892 {
6893 base = fold_build2_loc (input_location, MINUS_EXPR,
6894 TREE_TYPE (base), base, stride);
6895 }
6896 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)) || se->use_offset)
6897 {
6898 tmp = gfc_conv_array_lbound (desc, n);
6899 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6900 TREE_TYPE (base), tmp, loop.from[dim]);
6901 tmp = fold_build2_loc (input_location, MULT_EXPR,
6902 TREE_TYPE (base), tmp,
6903 gfc_conv_array_stride (desc, n));
6904 base = fold_build2_loc (input_location, PLUS_EXPR,
6905 TREE_TYPE (base), tmp, base);
6906 }
6907
6908 /* Store the new stride. */
6909 gfc_conv_descriptor_stride_set (&loop.pre, parm,
6910 gfc_rank_cst[dim], stride);
6911 }
6912
6913 for (n = loop.dimen; n < loop.dimen + codim; n++)
6914 {
6915 from = loop.from[n];
6916 to = loop.to[n];
6917 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6918 gfc_rank_cst[n], from);
6919 if (n < loop.dimen + codim - 1)
6920 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6921 gfc_rank_cst[n], to);
6922 }
6923
6924 if (se->data_not_needed)
6925 gfc_conv_descriptor_data_set (&loop.pre, parm,
6926 gfc_index_zero_node);
6927 else
6928 /* Point the data pointer at the 1st element in the section. */
6929 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6930 subref_array_target, expr);
6931
6932 if (((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6933 && !se->data_not_needed)
6934 || (se->use_offset && base != NULL_TREE))
6935 {
6936 /* Set the offset. */
6937 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6938 }
6939 else
6940 {
6941 /* Only the callee knows what the correct offset it, so just set
6942 it to zero here. */
6943 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6944 }
6945 desc = parm;
6946 }
6947
6948 if (!se->direct_byref || se->byref_noassign)
6949 {
6950 /* Get a pointer to the new descriptor. */
6951 if (se->want_pointer)
6952 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6953 else
6954 se->expr = desc;
6955 }
6956
6957 gfc_add_block_to_block (&se->pre, &loop.pre);
6958 gfc_add_block_to_block (&se->post, &loop.post);
6959
6960 /* Cleanup the scalarizer. */
6961 gfc_cleanup_loop (&loop);
6962 }
6963
6964 /* Helper function for gfc_conv_array_parameter if array size needs to be
6965 computed. */
6966
6967 static void
6968 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6969 {
6970 tree elem;
6971 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6972 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6973 else if (expr->rank > 1)
6974 *size = build_call_expr_loc (input_location,
6975 gfor_fndecl_size0, 1,
6976 gfc_build_addr_expr (NULL, desc));
6977 else
6978 {
6979 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6980 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6981
6982 *size = fold_build2_loc (input_location, MINUS_EXPR,
6983 gfc_array_index_type, ubound, lbound);
6984 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6985 *size, gfc_index_one_node);
6986 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6987 *size, gfc_index_zero_node);
6988 }
6989 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6990 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6991 *size, fold_convert (gfc_array_index_type, elem));
6992 }
6993
6994 /* Convert an array for passing as an actual parameter. */
6995 /* TODO: Optimize passing g77 arrays. */
6996
6997 void
6998 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, bool g77,
6999 const gfc_symbol *fsym, const char *proc_name,
7000 tree *size)
7001 {
7002 tree ptr;
7003 tree desc;
7004 tree tmp = NULL_TREE;
7005 tree stmt;
7006 tree parent = DECL_CONTEXT (current_function_decl);
7007 bool full_array_var;
7008 bool this_array_result;
7009 bool contiguous;
7010 bool no_pack;
7011 bool array_constructor;
7012 bool good_allocatable;
7013 bool ultimate_ptr_comp;
7014 bool ultimate_alloc_comp;
7015 gfc_symbol *sym;
7016 stmtblock_t block;
7017 gfc_ref *ref;
7018
7019 ultimate_ptr_comp = false;
7020 ultimate_alloc_comp = false;
7021
7022 for (ref = expr->ref; ref; ref = ref->next)
7023 {
7024 if (ref->next == NULL)
7025 break;
7026
7027 if (ref->type == REF_COMPONENT)
7028 {
7029 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
7030 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
7031 }
7032 }
7033
7034 full_array_var = false;
7035 contiguous = false;
7036
7037 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
7038 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
7039
7040 sym = full_array_var ? expr->symtree->n.sym : NULL;
7041
7042 /* The symbol should have an array specification. */
7043 gcc_assert (!sym || sym->as || ref->u.ar.as);
7044
7045 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
7046 {
7047 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
7048 expr->ts.u.cl->backend_decl = tmp;
7049 se->string_length = tmp;
7050 }
7051
7052 /* Is this the result of the enclosing procedure? */
7053 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
7054 if (this_array_result
7055 && (sym->backend_decl != current_function_decl)
7056 && (sym->backend_decl != parent))
7057 this_array_result = false;
7058
7059 /* Passing address of the array if it is not pointer or assumed-shape. */
7060 if (full_array_var && g77 && !this_array_result
7061 && sym->ts.type != BT_DERIVED && sym->ts.type != BT_CLASS)
7062 {
7063 tmp = gfc_get_symbol_decl (sym);
7064
7065 if (sym->ts.type == BT_CHARACTER)
7066 se->string_length = sym->ts.u.cl->backend_decl;
7067
7068 if (!sym->attr.pointer
7069 && sym->as
7070 && sym->as->type != AS_ASSUMED_SHAPE
7071 && sym->as->type != AS_DEFERRED
7072 && sym->as->type != AS_ASSUMED_RANK
7073 && !sym->attr.allocatable)
7074 {
7075 /* Some variables are declared directly, others are declared as
7076 pointers and allocated on the heap. */
7077 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
7078 se->expr = tmp;
7079 else
7080 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
7081 if (size)
7082 array_parameter_size (tmp, expr, size);
7083 return;
7084 }
7085
7086 if (sym->attr.allocatable)
7087 {
7088 if (sym->attr.dummy || sym->attr.result)
7089 {
7090 gfc_conv_expr_descriptor (se, expr);
7091 tmp = se->expr;
7092 }
7093 if (size)
7094 array_parameter_size (tmp, expr, size);
7095 se->expr = gfc_conv_array_data (tmp);
7096 return;
7097 }
7098 }
7099
7100 /* A convenient reduction in scope. */
7101 contiguous = g77 && !this_array_result && contiguous;
7102
7103 /* There is no need to pack and unpack the array, if it is contiguous
7104 and not a deferred- or assumed-shape array, or if it is simply
7105 contiguous. */
7106 no_pack = ((sym && sym->as
7107 && !sym->attr.pointer
7108 && sym->as->type != AS_DEFERRED
7109 && sym->as->type != AS_ASSUMED_RANK
7110 && sym->as->type != AS_ASSUMED_SHAPE)
7111 ||
7112 (ref && ref->u.ar.as
7113 && ref->u.ar.as->type != AS_DEFERRED
7114 && ref->u.ar.as->type != AS_ASSUMED_RANK
7115 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
7116 ||
7117 gfc_is_simply_contiguous (expr, false));
7118
7119 no_pack = contiguous && no_pack;
7120
7121 /* Array constructors are always contiguous and do not need packing. */
7122 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
7123
7124 /* Same is true of contiguous sections from allocatable variables. */
7125 good_allocatable = contiguous
7126 && expr->symtree
7127 && expr->symtree->n.sym->attr.allocatable;
7128
7129 /* Or ultimate allocatable components. */
7130 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
7131
7132 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
7133 {
7134 gfc_conv_expr_descriptor (se, expr);
7135 if (expr->ts.type == BT_CHARACTER)
7136 se->string_length = expr->ts.u.cl->backend_decl;
7137 if (size)
7138 array_parameter_size (se->expr, expr, size);
7139 se->expr = gfc_conv_array_data (se->expr);
7140 return;
7141 }
7142
7143 if (this_array_result)
7144 {
7145 /* Result of the enclosing function. */
7146 gfc_conv_expr_descriptor (se, expr);
7147 if (size)
7148 array_parameter_size (se->expr, expr, size);
7149 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
7150
7151 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
7152 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
7153 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
7154 se->expr));
7155
7156 return;
7157 }
7158 else
7159 {
7160 /* Every other type of array. */
7161 se->want_pointer = 1;
7162 gfc_conv_expr_descriptor (se, expr);
7163 if (size)
7164 array_parameter_size (build_fold_indirect_ref_loc (input_location,
7165 se->expr),
7166 expr, size);
7167 }
7168
7169 /* Deallocate the allocatable components of structures that are
7170 not variable. */
7171 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7172 && expr->ts.u.derived->attr.alloc_comp
7173 && expr->expr_type != EXPR_VARIABLE)
7174 {
7175 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
7176 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
7177
7178 /* The components shall be deallocated before their containing entity. */
7179 gfc_prepend_expr_to_block (&se->post, tmp);
7180 }
7181
7182 if (g77 || (fsym && fsym->attr.contiguous
7183 && !gfc_is_simply_contiguous (expr, false)))
7184 {
7185 tree origptr = NULL_TREE;
7186
7187 desc = se->expr;
7188
7189 /* For contiguous arrays, save the original value of the descriptor. */
7190 if (!g77)
7191 {
7192 origptr = gfc_create_var (pvoid_type_node, "origptr");
7193 tmp = build_fold_indirect_ref_loc (input_location, desc);
7194 tmp = gfc_conv_array_data (tmp);
7195 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7196 TREE_TYPE (origptr), origptr,
7197 fold_convert (TREE_TYPE (origptr), tmp));
7198 gfc_add_expr_to_block (&se->pre, tmp);
7199 }
7200
7201 /* Repack the array. */
7202 if (gfc_option.warn_array_temp)
7203 {
7204 if (fsym)
7205 gfc_warning ("Creating array temporary at %L for argument '%s'",
7206 &expr->where, fsym->name);
7207 else
7208 gfc_warning ("Creating array temporary at %L", &expr->where);
7209 }
7210
7211 ptr = build_call_expr_loc (input_location,
7212 gfor_fndecl_in_pack, 1, desc);
7213
7214 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7215 {
7216 tmp = gfc_conv_expr_present (sym);
7217 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
7218 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
7219 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
7220 }
7221
7222 ptr = gfc_evaluate_now (ptr, &se->pre);
7223
7224 /* Use the packed data for the actual argument, except for contiguous arrays,
7225 where the descriptor's data component is set. */
7226 if (g77)
7227 se->expr = ptr;
7228 else
7229 {
7230 tmp = build_fold_indirect_ref_loc (input_location, desc);
7231
7232 gfc_ss * ss = gfc_walk_expr (expr);
7233 if (!transposed_dims (ss))
7234 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
7235 else
7236 {
7237 tree old_field, new_field;
7238
7239 /* The original descriptor has transposed dims so we can't reuse
7240 it directly; we have to create a new one. */
7241 tree old_desc = tmp;
7242 tree new_desc = gfc_create_var (TREE_TYPE (old_desc), "arg_desc");
7243
7244 old_field = gfc_conv_descriptor_dtype (old_desc);
7245 new_field = gfc_conv_descriptor_dtype (new_desc);
7246 gfc_add_modify (&se->pre, new_field, old_field);
7247
7248 old_field = gfc_conv_descriptor_offset (old_desc);
7249 new_field = gfc_conv_descriptor_offset (new_desc);
7250 gfc_add_modify (&se->pre, new_field, old_field);
7251
7252 for (int i = 0; i < expr->rank; i++)
7253 {
7254 old_field = gfc_conv_descriptor_dimension (old_desc,
7255 gfc_rank_cst[get_array_ref_dim_for_loop_dim (ss, i)]);
7256 new_field = gfc_conv_descriptor_dimension (new_desc,
7257 gfc_rank_cst[i]);
7258 gfc_add_modify (&se->pre, new_field, old_field);
7259 }
7260
7261 if (gfc_option.coarray == GFC_FCOARRAY_LIB
7262 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (old_desc))
7263 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (old_desc))
7264 == GFC_ARRAY_ALLOCATABLE)
7265 {
7266 old_field = gfc_conv_descriptor_token (old_desc);
7267 new_field = gfc_conv_descriptor_token (new_desc);
7268 gfc_add_modify (&se->pre, new_field, old_field);
7269 }
7270
7271 gfc_conv_descriptor_data_set (&se->pre, new_desc, ptr);
7272 se->expr = gfc_build_addr_expr (NULL_TREE, new_desc);
7273 }
7274 gfc_free_ss (ss);
7275 }
7276
7277 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
7278 {
7279 char * msg;
7280
7281 if (fsym && proc_name)
7282 asprintf (&msg, "An array temporary was created for argument "
7283 "'%s' of procedure '%s'", fsym->name, proc_name);
7284 else
7285 asprintf (&msg, "An array temporary was created");
7286
7287 tmp = build_fold_indirect_ref_loc (input_location,
7288 desc);
7289 tmp = gfc_conv_array_data (tmp);
7290 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7291 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7292
7293 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7294 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7295 boolean_type_node,
7296 gfc_conv_expr_present (sym), tmp);
7297
7298 gfc_trans_runtime_check (false, true, tmp, &se->pre,
7299 &expr->where, msg);
7300 free (msg);
7301 }
7302
7303 gfc_start_block (&block);
7304
7305 /* Copy the data back. */
7306 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
7307 {
7308 tmp = build_call_expr_loc (input_location,
7309 gfor_fndecl_in_unpack, 2, desc, ptr);
7310 gfc_add_expr_to_block (&block, tmp);
7311 }
7312
7313 /* Free the temporary. */
7314 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
7315 gfc_add_expr_to_block (&block, tmp);
7316
7317 stmt = gfc_finish_block (&block);
7318
7319 gfc_init_block (&block);
7320 /* Only if it was repacked. This code needs to be executed before the
7321 loop cleanup code. */
7322 tmp = build_fold_indirect_ref_loc (input_location,
7323 desc);
7324 tmp = gfc_conv_array_data (tmp);
7325 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7326 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7327
7328 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7329 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7330 boolean_type_node,
7331 gfc_conv_expr_present (sym), tmp);
7332
7333 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
7334
7335 gfc_add_expr_to_block (&block, tmp);
7336 gfc_add_block_to_block (&block, &se->post);
7337
7338 gfc_init_block (&se->post);
7339
7340 /* Reset the descriptor pointer. */
7341 if (!g77)
7342 {
7343 tmp = build_fold_indirect_ref_loc (input_location, desc);
7344 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
7345 }
7346
7347 gfc_add_block_to_block (&se->post, &block);
7348 }
7349 }
7350
7351
7352 /* Generate code to deallocate an array, if it is allocated. */
7353
7354 tree
7355 gfc_trans_dealloc_allocated (tree descriptor, bool coarray, gfc_expr *expr)
7356 {
7357 tree tmp;
7358 tree var;
7359 stmtblock_t block;
7360
7361 gfc_start_block (&block);
7362
7363 var = gfc_conv_descriptor_data_get (descriptor);
7364 STRIP_NOPS (var);
7365
7366 /* Call array_deallocate with an int * present in the second argument.
7367 Although it is ignored here, it's presence ensures that arrays that
7368 are already deallocated are ignored. */
7369 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE,
7370 NULL_TREE, NULL_TREE, NULL_TREE, true,
7371 expr, coarray);
7372 gfc_add_expr_to_block (&block, tmp);
7373
7374 /* Zero the data pointer. */
7375 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7376 var, build_int_cst (TREE_TYPE (var), 0));
7377 gfc_add_expr_to_block (&block, tmp);
7378
7379 return gfc_finish_block (&block);
7380 }
7381
7382
7383 /* This helper function calculates the size in words of a full array. */
7384
7385 static tree
7386 get_full_array_size (stmtblock_t *block, tree decl, int rank)
7387 {
7388 tree idx;
7389 tree nelems;
7390 tree tmp;
7391 idx = gfc_rank_cst[rank - 1];
7392 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
7393 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
7394 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
7395 nelems, tmp);
7396 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7397 tmp, gfc_index_one_node);
7398 tmp = gfc_evaluate_now (tmp, block);
7399
7400 nelems = gfc_conv_descriptor_stride_get (decl, idx);
7401 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7402 nelems, tmp);
7403 return gfc_evaluate_now (tmp, block);
7404 }
7405
7406
7407 /* Allocate dest to the same size as src, and copy src -> dest.
7408 If no_malloc is set, only the copy is done. */
7409
7410 static tree
7411 duplicate_allocatable (tree dest, tree src, tree type, int rank,
7412 bool no_malloc, tree str_sz)
7413 {
7414 tree tmp;
7415 tree size;
7416 tree nelems;
7417 tree null_cond;
7418 tree null_data;
7419 stmtblock_t block;
7420
7421 /* If the source is null, set the destination to null. Then,
7422 allocate memory to the destination. */
7423 gfc_init_block (&block);
7424
7425 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7426 {
7427 tmp = null_pointer_node;
7428 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
7429 gfc_add_expr_to_block (&block, tmp);
7430 null_data = gfc_finish_block (&block);
7431
7432 gfc_init_block (&block);
7433 if (str_sz != NULL_TREE)
7434 size = str_sz;
7435 else
7436 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
7437
7438 if (!no_malloc)
7439 {
7440 tmp = gfc_call_malloc (&block, type, size);
7441 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7442 dest, fold_convert (type, tmp));
7443 gfc_add_expr_to_block (&block, tmp);
7444 }
7445
7446 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7447 tmp = build_call_expr_loc (input_location, tmp, 3, dest, src,
7448 fold_convert (size_type_node, size));
7449 }
7450 else
7451 {
7452 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
7453 null_data = gfc_finish_block (&block);
7454
7455 gfc_init_block (&block);
7456 if (rank)
7457 nelems = get_full_array_size (&block, src, rank);
7458 else
7459 nelems = gfc_index_one_node;
7460
7461 if (str_sz != NULL_TREE)
7462 tmp = fold_convert (gfc_array_index_type, str_sz);
7463 else
7464 tmp = fold_convert (gfc_array_index_type,
7465 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
7466 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7467 nelems, tmp);
7468 if (!no_malloc)
7469 {
7470 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
7471 tmp = gfc_call_malloc (&block, tmp, size);
7472 gfc_conv_descriptor_data_set (&block, dest, tmp);
7473 }
7474
7475 /* We know the temporary and the value will be the same length,
7476 so can use memcpy. */
7477 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7478 tmp = build_call_expr_loc (input_location,
7479 tmp, 3, gfc_conv_descriptor_data_get (dest),
7480 gfc_conv_descriptor_data_get (src),
7481 fold_convert (size_type_node, size));
7482 }
7483
7484 gfc_add_expr_to_block (&block, tmp);
7485 tmp = gfc_finish_block (&block);
7486
7487 /* Null the destination if the source is null; otherwise do
7488 the allocate and copy. */
7489 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (src)))
7490 null_cond = src;
7491 else
7492 null_cond = gfc_conv_descriptor_data_get (src);
7493
7494 null_cond = convert (pvoid_type_node, null_cond);
7495 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7496 null_cond, null_pointer_node);
7497 return build3_v (COND_EXPR, null_cond, tmp, null_data);
7498 }
7499
7500
7501 /* Allocate dest to the same size as src, and copy data src -> dest. */
7502
7503 tree
7504 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
7505 {
7506 return duplicate_allocatable (dest, src, type, rank, false, NULL_TREE);
7507 }
7508
7509
7510 /* Copy data src -> dest. */
7511
7512 tree
7513 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
7514 {
7515 return duplicate_allocatable (dest, src, type, rank, true, NULL_TREE);
7516 }
7517
7518
7519 /* Recursively traverse an object of derived type, generating code to
7520 deallocate, nullify or copy allocatable components. This is the work horse
7521 function for the functions named in this enum. */
7522
7523 enum {DEALLOCATE_ALLOC_COMP = 1, DEALLOCATE_ALLOC_COMP_NO_CAF,
7524 NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP, COPY_ONLY_ALLOC_COMP,
7525 COPY_ALLOC_COMP_CAF};
7526
7527 static tree
7528 structure_alloc_comps (gfc_symbol * der_type, tree decl,
7529 tree dest, int rank, int purpose)
7530 {
7531 gfc_component *c;
7532 gfc_loopinfo loop;
7533 stmtblock_t fnblock;
7534 stmtblock_t loopbody;
7535 stmtblock_t tmpblock;
7536 tree decl_type;
7537 tree tmp;
7538 tree comp;
7539 tree dcmp;
7540 tree nelems;
7541 tree index;
7542 tree var;
7543 tree cdecl;
7544 tree ctype;
7545 tree vref, dref;
7546 tree null_cond = NULL_TREE;
7547 bool called_dealloc_with_status;
7548
7549 gfc_init_block (&fnblock);
7550
7551 decl_type = TREE_TYPE (decl);
7552
7553 if ((POINTER_TYPE_P (decl_type) && rank != 0)
7554 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
7555 decl = build_fold_indirect_ref_loc (input_location, decl);
7556
7557 /* Just in case in gets dereferenced. */
7558 decl_type = TREE_TYPE (decl);
7559
7560 /* If this an array of derived types with allocatable components
7561 build a loop and recursively call this function. */
7562 if (TREE_CODE (decl_type) == ARRAY_TYPE
7563 || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0))
7564 {
7565 tmp = gfc_conv_array_data (decl);
7566 var = build_fold_indirect_ref_loc (input_location,
7567 tmp);
7568
7569 /* Get the number of elements - 1 and set the counter. */
7570 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
7571 {
7572 /* Use the descriptor for an allocatable array. Since this
7573 is a full array reference, we only need the descriptor
7574 information from dimension = rank. */
7575 tmp = get_full_array_size (&fnblock, decl, rank);
7576 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7577 gfc_array_index_type, tmp,
7578 gfc_index_one_node);
7579
7580 null_cond = gfc_conv_descriptor_data_get (decl);
7581 null_cond = fold_build2_loc (input_location, NE_EXPR,
7582 boolean_type_node, null_cond,
7583 build_int_cst (TREE_TYPE (null_cond), 0));
7584 }
7585 else
7586 {
7587 /* Otherwise use the TYPE_DOMAIN information. */
7588 tmp = array_type_nelts (decl_type);
7589 tmp = fold_convert (gfc_array_index_type, tmp);
7590 }
7591
7592 /* Remember that this is, in fact, the no. of elements - 1. */
7593 nelems = gfc_evaluate_now (tmp, &fnblock);
7594 index = gfc_create_var (gfc_array_index_type, "S");
7595
7596 /* Build the body of the loop. */
7597 gfc_init_block (&loopbody);
7598
7599 vref = gfc_build_array_ref (var, index, NULL);
7600
7601 if (purpose == COPY_ALLOC_COMP)
7602 {
7603 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7604 {
7605 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
7606 gfc_add_expr_to_block (&fnblock, tmp);
7607 }
7608 tmp = build_fold_indirect_ref_loc (input_location,
7609 gfc_conv_array_data (dest));
7610 dref = gfc_build_array_ref (tmp, index, NULL);
7611 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
7612 }
7613 else if (purpose == COPY_ONLY_ALLOC_COMP)
7614 {
7615 tmp = build_fold_indirect_ref_loc (input_location,
7616 gfc_conv_array_data (dest));
7617 dref = gfc_build_array_ref (tmp, index, NULL);
7618 tmp = structure_alloc_comps (der_type, vref, dref, rank,
7619 COPY_ALLOC_COMP);
7620 }
7621 else
7622 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
7623
7624 gfc_add_expr_to_block (&loopbody, tmp);
7625
7626 /* Build the loop and return. */
7627 gfc_init_loopinfo (&loop);
7628 loop.dimen = 1;
7629 loop.from[0] = gfc_index_zero_node;
7630 loop.loopvar[0] = index;
7631 loop.to[0] = nelems;
7632 gfc_trans_scalarizing_loops (&loop, &loopbody);
7633 gfc_add_block_to_block (&fnblock, &loop.pre);
7634
7635 tmp = gfc_finish_block (&fnblock);
7636 if (null_cond != NULL_TREE)
7637 tmp = build3_v (COND_EXPR, null_cond, tmp,
7638 build_empty_stmt (input_location));
7639
7640 return tmp;
7641 }
7642
7643 /* Otherwise, act on the components or recursively call self to
7644 act on a chain of components. */
7645 for (c = der_type->components; c; c = c->next)
7646 {
7647 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
7648 || c->ts.type == BT_CLASS)
7649 && c->ts.u.derived->attr.alloc_comp;
7650 cdecl = c->backend_decl;
7651 ctype = TREE_TYPE (cdecl);
7652
7653 switch (purpose)
7654 {
7655 case DEALLOCATE_ALLOC_COMP:
7656 case DEALLOCATE_ALLOC_COMP_NO_CAF:
7657
7658 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
7659 (i.e. this function) so generate all the calls and suppress the
7660 recursion from here, if necessary. */
7661 called_dealloc_with_status = false;
7662 gfc_init_block (&tmpblock);
7663
7664 if ((c->ts.type == BT_DERIVED && !c->attr.pointer)
7665 || (c->ts.type == BT_CLASS && !CLASS_DATA (c)->attr.class_pointer))
7666 {
7667 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7668 decl, cdecl, NULL_TREE);
7669
7670 /* The finalizer frees allocatable components. */
7671 called_dealloc_with_status
7672 = gfc_add_comp_finalizer_call (&tmpblock, comp, c,
7673 purpose == DEALLOCATE_ALLOC_COMP);
7674 }
7675 else
7676 comp = NULL_TREE;
7677
7678 if (c->attr.allocatable && !c->attr.proc_pointer
7679 && (c->attr.dimension
7680 || (c->attr.codimension
7681 && purpose != DEALLOCATE_ALLOC_COMP_NO_CAF)))
7682 {
7683 if (comp == NULL_TREE)
7684 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7685 decl, cdecl, NULL_TREE);
7686 tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension, NULL);
7687 gfc_add_expr_to_block (&tmpblock, tmp);
7688 }
7689 else if (c->attr.allocatable && !c->attr.codimension)
7690 {
7691 /* Allocatable scalar components. */
7692 if (comp == NULL_TREE)
7693 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7694 decl, cdecl, NULL_TREE);
7695
7696 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
7697 c->ts);
7698 gfc_add_expr_to_block (&tmpblock, tmp);
7699 called_dealloc_with_status = true;
7700
7701 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7702 void_type_node, comp,
7703 build_int_cst (TREE_TYPE (comp), 0));
7704 gfc_add_expr_to_block (&tmpblock, tmp);
7705 }
7706 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable
7707 && (!CLASS_DATA (c)->attr.codimension
7708 || purpose != DEALLOCATE_ALLOC_COMP_NO_CAF))
7709 {
7710 /* Allocatable CLASS components. */
7711
7712 /* Add reference to '_data' component. */
7713 tmp = CLASS_DATA (c)->backend_decl;
7714 comp = fold_build3_loc (input_location, COMPONENT_REF,
7715 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7716
7717 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
7718 tmp = gfc_trans_dealloc_allocated (comp,
7719 CLASS_DATA (c)->attr.codimension, NULL);
7720 else
7721 {
7722 tmp = gfc_deallocate_scalar_with_status (comp, NULL_TREE, true, NULL,
7723 CLASS_DATA (c)->ts);
7724 gfc_add_expr_to_block (&tmpblock, tmp);
7725 called_dealloc_with_status = true;
7726
7727 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7728 void_type_node, comp,
7729 build_int_cst (TREE_TYPE (comp), 0));
7730 }
7731 gfc_add_expr_to_block (&tmpblock, tmp);
7732 }
7733
7734 if (cmp_has_alloc_comps
7735 && !c->attr.pointer
7736 && !called_dealloc_with_status)
7737 {
7738 /* Do not deallocate the components of ultimate pointer
7739 components or iteratively call self if call has been made
7740 to gfc_trans_dealloc_allocated */
7741 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7742 decl, cdecl, NULL_TREE);
7743 rank = c->as ? c->as->rank : 0;
7744 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
7745 rank, purpose);
7746 gfc_add_expr_to_block (&fnblock, tmp);
7747 }
7748
7749 /* Now add the deallocation of this component. */
7750 gfc_add_block_to_block (&fnblock, &tmpblock);
7751 break;
7752
7753 case NULLIFY_ALLOC_COMP:
7754 if (c->attr.pointer)
7755 continue;
7756 else if (c->attr.allocatable
7757 && (c->attr.dimension|| c->attr.codimension))
7758 {
7759 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7760 decl, cdecl, NULL_TREE);
7761 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
7762 }
7763 else if (c->attr.allocatable)
7764 {
7765 /* Allocatable scalar components. */
7766 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7767 decl, cdecl, NULL_TREE);
7768 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7769 void_type_node, comp,
7770 build_int_cst (TREE_TYPE (comp), 0));
7771 gfc_add_expr_to_block (&fnblock, tmp);
7772 if (gfc_deferred_strlen (c, &comp))
7773 {
7774 comp = fold_build3_loc (input_location, COMPONENT_REF,
7775 TREE_TYPE (comp),
7776 decl, comp, NULL_TREE);
7777 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7778 TREE_TYPE (comp), comp,
7779 build_int_cst (TREE_TYPE (comp), 0));
7780 gfc_add_expr_to_block (&fnblock, tmp);
7781 }
7782 }
7783 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7784 {
7785 /* Allocatable CLASS components. */
7786 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7787 decl, cdecl, NULL_TREE);
7788 /* Add reference to '_data' component. */
7789 tmp = CLASS_DATA (c)->backend_decl;
7790 comp = fold_build3_loc (input_location, COMPONENT_REF,
7791 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7792 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
7793 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
7794 else
7795 {
7796 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7797 void_type_node, comp,
7798 build_int_cst (TREE_TYPE (comp), 0));
7799 gfc_add_expr_to_block (&fnblock, tmp);
7800 }
7801 }
7802 else if (cmp_has_alloc_comps)
7803 {
7804 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7805 decl, cdecl, NULL_TREE);
7806 rank = c->as ? c->as->rank : 0;
7807 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
7808 rank, purpose);
7809 gfc_add_expr_to_block (&fnblock, tmp);
7810 }
7811 break;
7812
7813 case COPY_ALLOC_COMP_CAF:
7814 if (!c->attr.codimension
7815 && (c->ts.type != BT_CLASS || CLASS_DATA (c)->attr.coarray_comp)
7816 && (c->ts.type != BT_DERIVED
7817 || !c->ts.u.derived->attr.coarray_comp))
7818 continue;
7819
7820 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
7821 cdecl, NULL_TREE);
7822 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
7823 cdecl, NULL_TREE);
7824
7825 if (c->attr.codimension)
7826 {
7827 if (c->ts.type == BT_CLASS)
7828 {
7829 comp = gfc_class_data_get (comp);
7830 dcmp = gfc_class_data_get (dcmp);
7831 }
7832 gfc_conv_descriptor_data_set (&fnblock, dcmp,
7833 gfc_conv_descriptor_data_get (comp));
7834 }
7835 else
7836 {
7837 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
7838 rank, purpose);
7839 gfc_add_expr_to_block (&fnblock, tmp);
7840
7841 }
7842 break;
7843
7844 case COPY_ALLOC_COMP:
7845 if (c->attr.pointer)
7846 continue;
7847
7848 /* We need source and destination components. */
7849 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
7850 cdecl, NULL_TREE);
7851 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
7852 cdecl, NULL_TREE);
7853 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
7854
7855 if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7856 {
7857 tree ftn_tree;
7858 tree size;
7859 tree dst_data;
7860 tree src_data;
7861 tree null_data;
7862
7863 dst_data = gfc_class_data_get (dcmp);
7864 src_data = gfc_class_data_get (comp);
7865 size = fold_convert (size_type_node, gfc_vtable_size_get (comp));
7866
7867 if (CLASS_DATA (c)->attr.dimension)
7868 {
7869 nelems = gfc_conv_descriptor_size (src_data,
7870 CLASS_DATA (c)->as->rank);
7871 size = fold_build2_loc (input_location, MULT_EXPR,
7872 size_type_node, size,
7873 fold_convert (size_type_node,
7874 nelems));
7875 }
7876 else
7877 nelems = build_int_cst (size_type_node, 1);
7878
7879 if (CLASS_DATA (c)->attr.dimension
7880 || CLASS_DATA (c)->attr.codimension)
7881 {
7882 src_data = gfc_conv_descriptor_data_get (src_data);
7883 dst_data = gfc_conv_descriptor_data_get (dst_data);
7884 }
7885
7886 gfc_init_block (&tmpblock);
7887
7888 /* Coarray component have to have the same allocation status and
7889 shape/type-parameter/effective-type on the LHS and RHS of an
7890 intrinsic assignment. Hence, we did not deallocated them - and
7891 do not allocate them here. */
7892 if (!CLASS_DATA (c)->attr.codimension)
7893 {
7894 ftn_tree = builtin_decl_explicit (BUILT_IN_MALLOC);
7895 tmp = build_call_expr_loc (input_location, ftn_tree, 1, size);
7896 gfc_add_modify (&tmpblock, dst_data,
7897 fold_convert (TREE_TYPE (dst_data), tmp));
7898 }
7899
7900 tmp = gfc_copy_class_to_class (comp, dcmp, nelems);
7901 gfc_add_expr_to_block (&tmpblock, tmp);
7902 tmp = gfc_finish_block (&tmpblock);
7903
7904 gfc_init_block (&tmpblock);
7905 gfc_add_modify (&tmpblock, dst_data,
7906 fold_convert (TREE_TYPE (dst_data),
7907 null_pointer_node));
7908 null_data = gfc_finish_block (&tmpblock);
7909
7910 null_cond = fold_build2_loc (input_location, NE_EXPR,
7911 boolean_type_node, src_data,
7912 null_pointer_node);
7913
7914 gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond,
7915 tmp, null_data));
7916 continue;
7917 }
7918
7919 if (gfc_deferred_strlen (c, &tmp))
7920 {
7921 tree len, size;
7922 len = tmp;
7923 tmp = fold_build3_loc (input_location, COMPONENT_REF,
7924 TREE_TYPE (len),
7925 decl, len, NULL_TREE);
7926 len = fold_build3_loc (input_location, COMPONENT_REF,
7927 TREE_TYPE (len),
7928 dest, len, NULL_TREE);
7929 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7930 TREE_TYPE (len), len, tmp);
7931 gfc_add_expr_to_block (&fnblock, tmp);
7932 size = size_of_string_in_bytes (c->ts.kind, len);
7933 tmp = duplicate_allocatable (dcmp, comp, ctype, rank,
7934 false, size);
7935 gfc_add_expr_to_block (&fnblock, tmp);
7936 }
7937 else if (c->attr.allocatable && !c->attr.proc_pointer
7938 && !cmp_has_alloc_comps)
7939 {
7940 rank = c->as ? c->as->rank : 0;
7941 if (c->attr.codimension)
7942 tmp = gfc_copy_allocatable_data (dcmp, comp, ctype, rank);
7943 else
7944 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
7945 gfc_add_expr_to_block (&fnblock, tmp);
7946 }
7947
7948 if (cmp_has_alloc_comps)
7949 {
7950 rank = c->as ? c->as->rank : 0;
7951 tmp = fold_convert (TREE_TYPE (dcmp), comp);
7952 gfc_add_modify (&fnblock, dcmp, tmp);
7953 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
7954 rank, purpose);
7955 gfc_add_expr_to_block (&fnblock, tmp);
7956 }
7957 break;
7958
7959 default:
7960 gcc_unreachable ();
7961 break;
7962 }
7963 }
7964
7965 return gfc_finish_block (&fnblock);
7966 }
7967
7968 /* Recursively traverse an object of derived type, generating code to
7969 nullify allocatable components. */
7970
7971 tree
7972 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7973 {
7974 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7975 NULLIFY_ALLOC_COMP);
7976 }
7977
7978
7979 /* Recursively traverse an object of derived type, generating code to
7980 deallocate allocatable components. */
7981
7982 tree
7983 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7984 {
7985 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7986 DEALLOCATE_ALLOC_COMP);
7987 }
7988
7989
7990 /* Recursively traverse an object of derived type, generating code to
7991 deallocate allocatable components. But do not deallocate coarrays.
7992 To be used for intrinsic assignment, which may not change the allocation
7993 status of coarrays. */
7994
7995 tree
7996 gfc_deallocate_alloc_comp_no_caf (gfc_symbol * der_type, tree decl, int rank)
7997 {
7998 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7999 DEALLOCATE_ALLOC_COMP_NO_CAF);
8000 }
8001
8002
8003 tree
8004 gfc_reassign_alloc_comp_caf (gfc_symbol *der_type, tree decl, tree dest)
8005 {
8006 return structure_alloc_comps (der_type, decl, dest, 0, COPY_ALLOC_COMP_CAF);
8007 }
8008
8009
8010 /* Recursively traverse an object of derived type, generating code to
8011 copy it and its allocatable components. */
8012
8013 tree
8014 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8015 {
8016 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
8017 }
8018
8019
8020 /* Recursively traverse an object of derived type, generating code to
8021 copy only its allocatable components. */
8022
8023 tree
8024 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8025 {
8026 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
8027 }
8028
8029
8030 /* Returns the value of LBOUND for an expression. This could be broken out
8031 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
8032 called by gfc_alloc_allocatable_for_assignment. */
8033 static tree
8034 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
8035 {
8036 tree lbound;
8037 tree ubound;
8038 tree stride;
8039 tree cond, cond1, cond3, cond4;
8040 tree tmp;
8041 gfc_ref *ref;
8042
8043 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
8044 {
8045 tmp = gfc_rank_cst[dim];
8046 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
8047 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
8048 stride = gfc_conv_descriptor_stride_get (desc, tmp);
8049 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8050 ubound, lbound);
8051 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8052 stride, gfc_index_zero_node);
8053 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
8054 boolean_type_node, cond3, cond1);
8055 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
8056 stride, gfc_index_zero_node);
8057 if (assumed_size)
8058 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8059 tmp, build_int_cst (gfc_array_index_type,
8060 expr->rank - 1));
8061 else
8062 cond = boolean_false_node;
8063
8064 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8065 boolean_type_node, cond3, cond4);
8066 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8067 boolean_type_node, cond, cond1);
8068
8069 return fold_build3_loc (input_location, COND_EXPR,
8070 gfc_array_index_type, cond,
8071 lbound, gfc_index_one_node);
8072 }
8073
8074 if (expr->expr_type == EXPR_FUNCTION)
8075 {
8076 /* A conversion function, so use the argument. */
8077 gcc_assert (expr->value.function.isym
8078 && expr->value.function.isym->conversion);
8079 expr = expr->value.function.actual->expr;
8080 }
8081
8082 if (expr->expr_type == EXPR_VARIABLE)
8083 {
8084 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
8085 for (ref = expr->ref; ref; ref = ref->next)
8086 {
8087 if (ref->type == REF_COMPONENT
8088 && ref->u.c.component->as
8089 && ref->next
8090 && ref->next->u.ar.type == AR_FULL)
8091 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
8092 }
8093 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
8094 }
8095
8096 return gfc_index_one_node;
8097 }
8098
8099
8100 /* Returns true if an expression represents an lhs that can be reallocated
8101 on assignment. */
8102
8103 bool
8104 gfc_is_reallocatable_lhs (gfc_expr *expr)
8105 {
8106 gfc_ref * ref;
8107
8108 if (!expr->ref)
8109 return false;
8110
8111 /* An allocatable variable. */
8112 if (expr->symtree->n.sym->attr.allocatable
8113 && expr->ref
8114 && expr->ref->type == REF_ARRAY
8115 && expr->ref->u.ar.type == AR_FULL)
8116 return true;
8117
8118 /* All that can be left are allocatable components. */
8119 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
8120 && expr->symtree->n.sym->ts.type != BT_CLASS)
8121 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
8122 return false;
8123
8124 /* Find a component ref followed by an array reference. */
8125 for (ref = expr->ref; ref; ref = ref->next)
8126 if (ref->next
8127 && ref->type == REF_COMPONENT
8128 && ref->next->type == REF_ARRAY
8129 && !ref->next->next)
8130 break;
8131
8132 if (!ref)
8133 return false;
8134
8135 /* Return true if valid reallocatable lhs. */
8136 if (ref->u.c.component->attr.allocatable
8137 && ref->next->u.ar.type == AR_FULL)
8138 return true;
8139
8140 return false;
8141 }
8142
8143
8144 /* Allocate the lhs of an assignment to an allocatable array, otherwise
8145 reallocate it. */
8146
8147 tree
8148 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
8149 gfc_expr *expr1,
8150 gfc_expr *expr2)
8151 {
8152 stmtblock_t realloc_block;
8153 stmtblock_t alloc_block;
8154 stmtblock_t fblock;
8155 gfc_ss *rss;
8156 gfc_ss *lss;
8157 gfc_array_info *linfo;
8158 tree realloc_expr;
8159 tree alloc_expr;
8160 tree size1;
8161 tree size2;
8162 tree array1;
8163 tree cond_null;
8164 tree cond;
8165 tree tmp;
8166 tree tmp2;
8167 tree lbound;
8168 tree ubound;
8169 tree desc;
8170 tree old_desc;
8171 tree desc2;
8172 tree offset;
8173 tree jump_label1;
8174 tree jump_label2;
8175 tree neq_size;
8176 tree lbd;
8177 int n;
8178 int dim;
8179 gfc_array_spec * as;
8180
8181 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
8182 Find the lhs expression in the loop chain and set expr1 and
8183 expr2 accordingly. */
8184 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
8185 {
8186 expr2 = expr1;
8187 /* Find the ss for the lhs. */
8188 lss = loop->ss;
8189 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8190 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
8191 break;
8192 if (lss == gfc_ss_terminator)
8193 return NULL_TREE;
8194 expr1 = lss->info->expr;
8195 }
8196
8197 /* Bail out if this is not a valid allocate on assignment. */
8198 if (!gfc_is_reallocatable_lhs (expr1)
8199 || (expr2 && !expr2->rank))
8200 return NULL_TREE;
8201
8202 /* Find the ss for the lhs. */
8203 lss = loop->ss;
8204 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8205 if (lss->info->expr == expr1)
8206 break;
8207
8208 if (lss == gfc_ss_terminator)
8209 return NULL_TREE;
8210
8211 linfo = &lss->info->data.array;
8212
8213 /* Find an ss for the rhs. For operator expressions, we see the
8214 ss's for the operands. Any one of these will do. */
8215 rss = loop->ss;
8216 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
8217 if (rss->info->expr != expr1 && rss != loop->temp_ss)
8218 break;
8219
8220 if (expr2 && rss == gfc_ss_terminator)
8221 return NULL_TREE;
8222
8223 gfc_start_block (&fblock);
8224
8225 /* Since the lhs is allocatable, this must be a descriptor type.
8226 Get the data and array size. */
8227 desc = linfo->descriptor;
8228 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
8229 array1 = gfc_conv_descriptor_data_get (desc);
8230
8231 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
8232 deallocated if expr is an array of different shape or any of the
8233 corresponding length type parameter values of variable and expr
8234 differ." This assures F95 compatibility. */
8235 jump_label1 = gfc_build_label_decl (NULL_TREE);
8236 jump_label2 = gfc_build_label_decl (NULL_TREE);
8237
8238 /* Allocate if data is NULL. */
8239 cond_null = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8240 array1, build_int_cst (TREE_TYPE (array1), 0));
8241 tmp = build3_v (COND_EXPR, cond_null,
8242 build1_v (GOTO_EXPR, jump_label1),
8243 build_empty_stmt (input_location));
8244 gfc_add_expr_to_block (&fblock, tmp);
8245
8246 /* Get arrayspec if expr is a full array. */
8247 if (expr2 && expr2->expr_type == EXPR_FUNCTION
8248 && expr2->value.function.isym
8249 && expr2->value.function.isym->conversion)
8250 {
8251 /* For conversion functions, take the arg. */
8252 gfc_expr *arg = expr2->value.function.actual->expr;
8253 as = gfc_get_full_arrayspec_from_expr (arg);
8254 }
8255 else if (expr2)
8256 as = gfc_get_full_arrayspec_from_expr (expr2);
8257 else
8258 as = NULL;
8259
8260 /* If the lhs shape is not the same as the rhs jump to setting the
8261 bounds and doing the reallocation....... */
8262 for (n = 0; n < expr1->rank; n++)
8263 {
8264 /* Check the shape. */
8265 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8266 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
8267 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8268 gfc_array_index_type,
8269 loop->to[n], loop->from[n]);
8270 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8271 gfc_array_index_type,
8272 tmp, lbound);
8273 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8274 gfc_array_index_type,
8275 tmp, ubound);
8276 cond = fold_build2_loc (input_location, NE_EXPR,
8277 boolean_type_node,
8278 tmp, gfc_index_zero_node);
8279 tmp = build3_v (COND_EXPR, cond,
8280 build1_v (GOTO_EXPR, jump_label1),
8281 build_empty_stmt (input_location));
8282 gfc_add_expr_to_block (&fblock, tmp);
8283 }
8284
8285 /* ....else jump past the (re)alloc code. */
8286 tmp = build1_v (GOTO_EXPR, jump_label2);
8287 gfc_add_expr_to_block (&fblock, tmp);
8288
8289 /* Add the label to start automatic (re)allocation. */
8290 tmp = build1_v (LABEL_EXPR, jump_label1);
8291 gfc_add_expr_to_block (&fblock, tmp);
8292
8293 /* If the lhs has not been allocated, its bounds will not have been
8294 initialized and so its size is set to zero. */
8295 size1 = gfc_create_var (gfc_array_index_type, NULL);
8296 gfc_init_block (&alloc_block);
8297 gfc_add_modify (&alloc_block, size1, gfc_index_zero_node);
8298 gfc_init_block (&realloc_block);
8299 gfc_add_modify (&realloc_block, size1,
8300 gfc_conv_descriptor_size (desc, expr1->rank));
8301 tmp = build3_v (COND_EXPR, cond_null,
8302 gfc_finish_block (&alloc_block),
8303 gfc_finish_block (&realloc_block));
8304 gfc_add_expr_to_block (&fblock, tmp);
8305
8306 /* Get the rhs size and fix it. */
8307 if (expr2)
8308 desc2 = rss->info->data.array.descriptor;
8309 else
8310 desc2 = NULL_TREE;
8311
8312 size2 = gfc_index_one_node;
8313 for (n = 0; n < expr2->rank; n++)
8314 {
8315 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8316 gfc_array_index_type,
8317 loop->to[n], loop->from[n]);
8318 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8319 gfc_array_index_type,
8320 tmp, gfc_index_one_node);
8321 size2 = fold_build2_loc (input_location, MULT_EXPR,
8322 gfc_array_index_type,
8323 tmp, size2);
8324 }
8325 size2 = gfc_evaluate_now (size2, &fblock);
8326
8327 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
8328 size1, size2);
8329 neq_size = gfc_evaluate_now (cond, &fblock);
8330
8331 /* Deallocation of allocatable components will have to occur on
8332 reallocation. Fix the old descriptor now. */
8333 if ((expr1->ts.type == BT_DERIVED)
8334 && expr1->ts.u.derived->attr.alloc_comp)
8335 old_desc = gfc_evaluate_now (desc, &fblock);
8336 else
8337 old_desc = NULL_TREE;
8338
8339 /* Now modify the lhs descriptor and the associated scalarizer
8340 variables. F2003 7.4.1.3: "If variable is or becomes an
8341 unallocated allocatable variable, then it is allocated with each
8342 deferred type parameter equal to the corresponding type parameters
8343 of expr , with the shape of expr , and with each lower bound equal
8344 to the corresponding element of LBOUND(expr)."
8345 Reuse size1 to keep a dimension-by-dimension track of the
8346 stride of the new array. */
8347 size1 = gfc_index_one_node;
8348 offset = gfc_index_zero_node;
8349
8350 for (n = 0; n < expr2->rank; n++)
8351 {
8352 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8353 gfc_array_index_type,
8354 loop->to[n], loop->from[n]);
8355 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8356 gfc_array_index_type,
8357 tmp, gfc_index_one_node);
8358
8359 lbound = gfc_index_one_node;
8360 ubound = tmp;
8361
8362 if (as)
8363 {
8364 lbd = get_std_lbound (expr2, desc2, n,
8365 as->type == AS_ASSUMED_SIZE);
8366 ubound = fold_build2_loc (input_location,
8367 MINUS_EXPR,
8368 gfc_array_index_type,
8369 ubound, lbound);
8370 ubound = fold_build2_loc (input_location,
8371 PLUS_EXPR,
8372 gfc_array_index_type,
8373 ubound, lbd);
8374 lbound = lbd;
8375 }
8376
8377 gfc_conv_descriptor_lbound_set (&fblock, desc,
8378 gfc_rank_cst[n],
8379 lbound);
8380 gfc_conv_descriptor_ubound_set (&fblock, desc,
8381 gfc_rank_cst[n],
8382 ubound);
8383 gfc_conv_descriptor_stride_set (&fblock, desc,
8384 gfc_rank_cst[n],
8385 size1);
8386 lbound = gfc_conv_descriptor_lbound_get (desc,
8387 gfc_rank_cst[n]);
8388 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
8389 gfc_array_index_type,
8390 lbound, size1);
8391 offset = fold_build2_loc (input_location, MINUS_EXPR,
8392 gfc_array_index_type,
8393 offset, tmp2);
8394 size1 = fold_build2_loc (input_location, MULT_EXPR,
8395 gfc_array_index_type,
8396 tmp, size1);
8397 }
8398
8399 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
8400 the array offset is saved and the info.offset is used for a
8401 running offset. Use the saved_offset instead. */
8402 tmp = gfc_conv_descriptor_offset (desc);
8403 gfc_add_modify (&fblock, tmp, offset);
8404 if (linfo->saved_offset
8405 && TREE_CODE (linfo->saved_offset) == VAR_DECL)
8406 gfc_add_modify (&fblock, linfo->saved_offset, tmp);
8407
8408 /* Now set the deltas for the lhs. */
8409 for (n = 0; n < expr1->rank; n++)
8410 {
8411 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8412 dim = lss->dim[n];
8413 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8414 gfc_array_index_type, tmp,
8415 loop->from[dim]);
8416 if (linfo->delta[dim]
8417 && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
8418 gfc_add_modify (&fblock, linfo->delta[dim], tmp);
8419 }
8420
8421 /* Get the new lhs size in bytes. */
8422 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8423 {
8424 if (expr2->ts.deferred)
8425 {
8426 if (TREE_CODE (expr2->ts.u.cl->backend_decl) == VAR_DECL)
8427 tmp = expr2->ts.u.cl->backend_decl;
8428 else
8429 tmp = rss->info->string_length;
8430 }
8431 else
8432 {
8433 tmp = expr2->ts.u.cl->backend_decl;
8434 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
8435 }
8436
8437 if (expr1->ts.u.cl->backend_decl
8438 && TREE_CODE (expr1->ts.u.cl->backend_decl) == VAR_DECL)
8439 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
8440 else
8441 gfc_add_modify (&fblock, lss->info->string_length, tmp);
8442 }
8443 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
8444 {
8445 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
8446 tmp = fold_build2_loc (input_location, MULT_EXPR,
8447 gfc_array_index_type, tmp,
8448 expr1->ts.u.cl->backend_decl);
8449 }
8450 else
8451 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
8452 tmp = fold_convert (gfc_array_index_type, tmp);
8453 size2 = fold_build2_loc (input_location, MULT_EXPR,
8454 gfc_array_index_type,
8455 tmp, size2);
8456 size2 = fold_convert (size_type_node, size2);
8457 size2 = fold_build2_loc (input_location, MAX_EXPR, size_type_node,
8458 size2, size_one_node);
8459 size2 = gfc_evaluate_now (size2, &fblock);
8460
8461 /* Realloc expression. Note that the scalarizer uses desc.data
8462 in the array reference - (*desc.data)[<element>]. */
8463 gfc_init_block (&realloc_block);
8464
8465 if ((expr1->ts.type == BT_DERIVED)
8466 && expr1->ts.u.derived->attr.alloc_comp)
8467 {
8468 tmp = gfc_deallocate_alloc_comp_no_caf (expr1->ts.u.derived, old_desc,
8469 expr1->rank);
8470 gfc_add_expr_to_block (&realloc_block, tmp);
8471 }
8472
8473 tmp = build_call_expr_loc (input_location,
8474 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
8475 fold_convert (pvoid_type_node, array1),
8476 size2);
8477 gfc_conv_descriptor_data_set (&realloc_block,
8478 desc, tmp);
8479
8480 if ((expr1->ts.type == BT_DERIVED)
8481 && expr1->ts.u.derived->attr.alloc_comp)
8482 {
8483 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8484 expr1->rank);
8485 gfc_add_expr_to_block (&realloc_block, tmp);
8486 }
8487
8488 realloc_expr = gfc_finish_block (&realloc_block);
8489
8490 /* Only reallocate if sizes are different. */
8491 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
8492 build_empty_stmt (input_location));
8493 realloc_expr = tmp;
8494
8495
8496 /* Malloc expression. */
8497 gfc_init_block (&alloc_block);
8498 tmp = build_call_expr_loc (input_location,
8499 builtin_decl_explicit (BUILT_IN_MALLOC),
8500 1, size2);
8501 gfc_conv_descriptor_data_set (&alloc_block,
8502 desc, tmp);
8503 tmp = gfc_conv_descriptor_dtype (desc);
8504 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
8505 if ((expr1->ts.type == BT_DERIVED)
8506 && expr1->ts.u.derived->attr.alloc_comp)
8507 {
8508 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8509 expr1->rank);
8510 gfc_add_expr_to_block (&alloc_block, tmp);
8511 }
8512 alloc_expr = gfc_finish_block (&alloc_block);
8513
8514 /* Malloc if not allocated; realloc otherwise. */
8515 tmp = build_int_cst (TREE_TYPE (array1), 0);
8516 cond = fold_build2_loc (input_location, EQ_EXPR,
8517 boolean_type_node,
8518 array1, tmp);
8519 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
8520 gfc_add_expr_to_block (&fblock, tmp);
8521
8522 /* Make sure that the scalarizer data pointer is updated. */
8523 if (linfo->data
8524 && TREE_CODE (linfo->data) == VAR_DECL)
8525 {
8526 tmp = gfc_conv_descriptor_data_get (desc);
8527 gfc_add_modify (&fblock, linfo->data, tmp);
8528 }
8529
8530 /* Add the exit label. */
8531 tmp = build1_v (LABEL_EXPR, jump_label2);
8532 gfc_add_expr_to_block (&fblock, tmp);
8533
8534 return gfc_finish_block (&fblock);
8535 }
8536
8537
8538 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
8539 Do likewise, recursively if necessary, with the allocatable components of
8540 derived types. */
8541
8542 void
8543 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
8544 {
8545 tree type;
8546 tree tmp;
8547 tree descriptor;
8548 stmtblock_t init;
8549 stmtblock_t cleanup;
8550 locus loc;
8551 int rank;
8552 bool sym_has_alloc_comp, has_finalizer;
8553
8554 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
8555 || sym->ts.type == BT_CLASS)
8556 && sym->ts.u.derived->attr.alloc_comp;
8557 has_finalizer = sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED
8558 ? gfc_is_finalizable (sym->ts.u.derived, NULL) : false;
8559
8560 /* Make sure the frontend gets these right. */
8561 gcc_assert (sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp
8562 || has_finalizer);
8563
8564 gfc_save_backend_locus (&loc);
8565 gfc_set_backend_locus (&sym->declared_at);
8566 gfc_init_block (&init);
8567
8568 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
8569 || TREE_CODE (sym->backend_decl) == PARM_DECL);
8570
8571 if (sym->ts.type == BT_CHARACTER
8572 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
8573 {
8574 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
8575 gfc_trans_vla_type_sizes (sym, &init);
8576 }
8577
8578 /* Dummy, use associated and result variables don't need anything special. */
8579 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
8580 {
8581 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8582 gfc_restore_backend_locus (&loc);
8583 return;
8584 }
8585
8586 descriptor = sym->backend_decl;
8587
8588 /* Although static, derived types with default initializers and
8589 allocatable components must not be nulled wholesale; instead they
8590 are treated component by component. */
8591 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp && !has_finalizer)
8592 {
8593 /* SAVEd variables are not freed on exit. */
8594 gfc_trans_static_array_pointer (sym);
8595
8596 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8597 gfc_restore_backend_locus (&loc);
8598 return;
8599 }
8600
8601 /* Get the descriptor type. */
8602 type = TREE_TYPE (sym->backend_decl);
8603
8604 if ((sym_has_alloc_comp || (has_finalizer && sym->ts.type != BT_CLASS))
8605 && !(sym->attr.pointer || sym->attr.allocatable))
8606 {
8607 if (!sym->attr.save
8608 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
8609 {
8610 if (sym->value == NULL
8611 || !gfc_has_default_initializer (sym->ts.u.derived))
8612 {
8613 rank = sym->as ? sym->as->rank : 0;
8614 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
8615 descriptor, rank);
8616 gfc_add_expr_to_block (&init, tmp);
8617 }
8618 else
8619 gfc_init_default_dt (sym, &init, false);
8620 }
8621 }
8622 else if (!GFC_DESCRIPTOR_TYPE_P (type))
8623 {
8624 /* If the backend_decl is not a descriptor, we must have a pointer
8625 to one. */
8626 descriptor = build_fold_indirect_ref_loc (input_location,
8627 sym->backend_decl);
8628 type = TREE_TYPE (descriptor);
8629 }
8630
8631 /* NULLIFY the data pointer. */
8632 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
8633 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
8634
8635 gfc_restore_backend_locus (&loc);
8636 gfc_init_block (&cleanup);
8637
8638 /* Allocatable arrays need to be freed when they go out of scope.
8639 The allocatable components of pointers must not be touched. */
8640 if (!sym->attr.allocatable && has_finalizer && sym->ts.type != BT_CLASS
8641 && !sym->attr.pointer && !sym->attr.artificial && !sym->attr.save
8642 && !sym->ns->proc_name->attr.is_main_program)
8643 {
8644 gfc_expr *e;
8645 sym->attr.referenced = 1;
8646 e = gfc_lval_expr_from_sym (sym);
8647 gfc_add_finalizer_call (&cleanup, e);
8648 gfc_free_expr (e);
8649 }
8650 else if ((!sym->attr.allocatable || !has_finalizer)
8651 && sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
8652 && !sym->attr.pointer && !sym->attr.save
8653 && !sym->ns->proc_name->attr.is_main_program)
8654 {
8655 int rank;
8656 rank = sym->as ? sym->as->rank : 0;
8657 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
8658 gfc_add_expr_to_block (&cleanup, tmp);
8659 }
8660
8661 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
8662 && !sym->attr.save && !sym->attr.result
8663 && !sym->ns->proc_name->attr.is_main_program)
8664 {
8665 gfc_expr *e;
8666 e = has_finalizer ? gfc_lval_expr_from_sym (sym) : NULL;
8667 tmp = gfc_trans_dealloc_allocated (sym->backend_decl,
8668 sym->attr.codimension, e);
8669 if (e)
8670 gfc_free_expr (e);
8671 gfc_add_expr_to_block (&cleanup, tmp);
8672 }
8673
8674 gfc_add_init_cleanup (block, gfc_finish_block (&init),
8675 gfc_finish_block (&cleanup));
8676 }
8677
8678 /************ Expression Walking Functions ******************/
8679
8680 /* Walk a variable reference.
8681
8682 Possible extension - multiple component subscripts.
8683 x(:,:) = foo%a(:)%b(:)
8684 Transforms to
8685 forall (i=..., j=...)
8686 x(i,j) = foo%a(j)%b(i)
8687 end forall
8688 This adds a fair amount of complexity because you need to deal with more
8689 than one ref. Maybe handle in a similar manner to vector subscripts.
8690 Maybe not worth the effort. */
8691
8692
8693 static gfc_ss *
8694 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
8695 {
8696 gfc_ref *ref;
8697
8698 for (ref = expr->ref; ref; ref = ref->next)
8699 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
8700 break;
8701
8702 return gfc_walk_array_ref (ss, expr, ref);
8703 }
8704
8705
8706 gfc_ss *
8707 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
8708 {
8709 gfc_array_ref *ar;
8710 gfc_ss *newss;
8711 int n;
8712
8713 for (; ref; ref = ref->next)
8714 {
8715 if (ref->type == REF_SUBSTRING)
8716 {
8717 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
8718 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
8719 }
8720
8721 /* We're only interested in array sections from now on. */
8722 if (ref->type != REF_ARRAY)
8723 continue;
8724
8725 ar = &ref->u.ar;
8726
8727 switch (ar->type)
8728 {
8729 case AR_ELEMENT:
8730 for (n = ar->dimen - 1; n >= 0; n--)
8731 ss = gfc_get_scalar_ss (ss, ar->start[n]);
8732 break;
8733
8734 case AR_FULL:
8735 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
8736 newss->info->data.array.ref = ref;
8737
8738 /* Make sure array is the same as array(:,:), this way
8739 we don't need to special case all the time. */
8740 ar->dimen = ar->as->rank;
8741 for (n = 0; n < ar->dimen; n++)
8742 {
8743 ar->dimen_type[n] = DIMEN_RANGE;
8744
8745 gcc_assert (ar->start[n] == NULL);
8746 gcc_assert (ar->end[n] == NULL);
8747 gcc_assert (ar->stride[n] == NULL);
8748 }
8749 ss = newss;
8750 break;
8751
8752 case AR_SECTION:
8753 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
8754 newss->info->data.array.ref = ref;
8755
8756 /* We add SS chains for all the subscripts in the section. */
8757 for (n = 0; n < ar->dimen; n++)
8758 {
8759 gfc_ss *indexss;
8760
8761 switch (ar->dimen_type[n])
8762 {
8763 case DIMEN_ELEMENT:
8764 /* Add SS for elemental (scalar) subscripts. */
8765 gcc_assert (ar->start[n]);
8766 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
8767 indexss->loop_chain = gfc_ss_terminator;
8768 newss->info->data.array.subscript[n] = indexss;
8769 break;
8770
8771 case DIMEN_RANGE:
8772 /* We don't add anything for sections, just remember this
8773 dimension for later. */
8774 newss->dim[newss->dimen] = n;
8775 newss->dimen++;
8776 break;
8777
8778 case DIMEN_VECTOR:
8779 /* Create a GFC_SS_VECTOR index in which we can store
8780 the vector's descriptor. */
8781 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
8782 1, GFC_SS_VECTOR);
8783 indexss->loop_chain = gfc_ss_terminator;
8784 newss->info->data.array.subscript[n] = indexss;
8785 newss->dim[newss->dimen] = n;
8786 newss->dimen++;
8787 break;
8788
8789 default:
8790 /* We should know what sort of section it is by now. */
8791 gcc_unreachable ();
8792 }
8793 }
8794 /* We should have at least one non-elemental dimension,
8795 unless we are creating a descriptor for a (scalar) coarray. */
8796 gcc_assert (newss->dimen > 0
8797 || newss->info->data.array.ref->u.ar.as->corank > 0);
8798 ss = newss;
8799 break;
8800
8801 default:
8802 /* We should know what sort of section it is by now. */
8803 gcc_unreachable ();
8804 }
8805
8806 }
8807 return ss;
8808 }
8809
8810
8811 /* Walk an expression operator. If only one operand of a binary expression is
8812 scalar, we must also add the scalar term to the SS chain. */
8813
8814 static gfc_ss *
8815 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
8816 {
8817 gfc_ss *head;
8818 gfc_ss *head2;
8819
8820 head = gfc_walk_subexpr (ss, expr->value.op.op1);
8821 if (expr->value.op.op2 == NULL)
8822 head2 = head;
8823 else
8824 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
8825
8826 /* All operands are scalar. Pass back and let the caller deal with it. */
8827 if (head2 == ss)
8828 return head2;
8829
8830 /* All operands require scalarization. */
8831 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
8832 return head2;
8833
8834 /* One of the operands needs scalarization, the other is scalar.
8835 Create a gfc_ss for the scalar expression. */
8836 if (head == ss)
8837 {
8838 /* First operand is scalar. We build the chain in reverse order, so
8839 add the scalar SS after the second operand. */
8840 head = head2;
8841 while (head && head->next != ss)
8842 head = head->next;
8843 /* Check we haven't somehow broken the chain. */
8844 gcc_assert (head);
8845 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
8846 }
8847 else /* head2 == head */
8848 {
8849 gcc_assert (head2 == head);
8850 /* Second operand is scalar. */
8851 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
8852 }
8853
8854 return head2;
8855 }
8856
8857
8858 /* Reverse a SS chain. */
8859
8860 gfc_ss *
8861 gfc_reverse_ss (gfc_ss * ss)
8862 {
8863 gfc_ss *next;
8864 gfc_ss *head;
8865
8866 gcc_assert (ss != NULL);
8867
8868 head = gfc_ss_terminator;
8869 while (ss != gfc_ss_terminator)
8870 {
8871 next = ss->next;
8872 /* Check we didn't somehow break the chain. */
8873 gcc_assert (next != NULL);
8874 ss->next = head;
8875 head = ss;
8876 ss = next;
8877 }
8878
8879 return (head);
8880 }
8881
8882
8883 /* Given an expression referring to a procedure, return the symbol of its
8884 interface. We can't get the procedure symbol directly as we have to handle
8885 the case of (deferred) type-bound procedures. */
8886
8887 gfc_symbol *
8888 gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref)
8889 {
8890 gfc_symbol *sym;
8891 gfc_ref *ref;
8892
8893 if (procedure_ref == NULL)
8894 return NULL;
8895
8896 /* Normal procedure case. */
8897 sym = procedure_ref->symtree->n.sym;
8898
8899 /* Typebound procedure case. */
8900 for (ref = procedure_ref->ref; ref; ref = ref->next)
8901 {
8902 if (ref->type == REF_COMPONENT
8903 && ref->u.c.component->attr.proc_pointer)
8904 sym = ref->u.c.component->ts.interface;
8905 else
8906 sym = NULL;
8907 }
8908
8909 return sym;
8910 }
8911
8912
8913 /* Walk the arguments of an elemental function.
8914 PROC_EXPR is used to check whether an argument is permitted to be absent. If
8915 it is NULL, we don't do the check and the argument is assumed to be present.
8916 */
8917
8918 gfc_ss *
8919 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
8920 gfc_symbol *proc_ifc, gfc_ss_type type)
8921 {
8922 gfc_formal_arglist *dummy_arg;
8923 int scalar;
8924 gfc_ss *head;
8925 gfc_ss *tail;
8926 gfc_ss *newss;
8927
8928 head = gfc_ss_terminator;
8929 tail = NULL;
8930
8931 if (proc_ifc)
8932 dummy_arg = gfc_sym_get_dummy_args (proc_ifc);
8933 else
8934 dummy_arg = NULL;
8935
8936 scalar = 1;
8937 for (; arg; arg = arg->next)
8938 {
8939 if (!arg->expr || arg->expr->expr_type == EXPR_NULL)
8940 continue;
8941
8942 newss = gfc_walk_subexpr (head, arg->expr);
8943 if (newss == head)
8944 {
8945 /* Scalar argument. */
8946 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
8947 newss = gfc_get_scalar_ss (head, arg->expr);
8948 newss->info->type = type;
8949
8950 }
8951 else
8952 scalar = 0;
8953
8954 if (dummy_arg != NULL
8955 && dummy_arg->sym->attr.optional
8956 && arg->expr->expr_type == EXPR_VARIABLE
8957 && (gfc_expr_attr (arg->expr).optional
8958 || gfc_expr_attr (arg->expr).allocatable
8959 || gfc_expr_attr (arg->expr).pointer))
8960 newss->info->can_be_null_ref = true;
8961
8962 head = newss;
8963 if (!tail)
8964 {
8965 tail = head;
8966 while (tail->next != gfc_ss_terminator)
8967 tail = tail->next;
8968 }
8969
8970 if (dummy_arg != NULL)
8971 dummy_arg = dummy_arg->next;
8972 }
8973
8974 if (scalar)
8975 {
8976 /* If all the arguments are scalar we don't need the argument SS. */
8977 gfc_free_ss_chain (head);
8978 /* Pass it back. */
8979 return ss;
8980 }
8981
8982 /* Add it onto the existing chain. */
8983 tail->next = ss;
8984 return head;
8985 }
8986
8987
8988 /* Walk a function call. Scalar functions are passed back, and taken out of
8989 scalarization loops. For elemental functions we walk their arguments.
8990 The result of functions returning arrays is stored in a temporary outside
8991 the loop, so that the function is only called once. Hence we do not need
8992 to walk their arguments. */
8993
8994 static gfc_ss *
8995 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
8996 {
8997 gfc_intrinsic_sym *isym;
8998 gfc_symbol *sym;
8999 gfc_component *comp = NULL;
9000
9001 isym = expr->value.function.isym;
9002
9003 /* Handle intrinsic functions separately. */
9004 if (isym)
9005 return gfc_walk_intrinsic_function (ss, expr, isym);
9006
9007 sym = expr->value.function.esym;
9008 if (!sym)
9009 sym = expr->symtree->n.sym;
9010
9011 /* A function that returns arrays. */
9012 comp = gfc_get_proc_ptr_comp (expr);
9013 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
9014 || (comp && comp->attr.dimension))
9015 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
9016
9017 /* Walk the parameters of an elemental function. For now we always pass
9018 by reference. */
9019 if (sym->attr.elemental || (comp && comp->attr.elemental))
9020 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
9021 gfc_get_proc_ifc_for_expr (expr),
9022 GFC_SS_REFERENCE);
9023
9024 /* Scalar functions are OK as these are evaluated outside the scalarization
9025 loop. Pass back and let the caller deal with it. */
9026 return ss;
9027 }
9028
9029
9030 /* An array temporary is constructed for array constructors. */
9031
9032 static gfc_ss *
9033 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
9034 {
9035 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
9036 }
9037
9038
9039 /* Walk an expression. Add walked expressions to the head of the SS chain.
9040 A wholly scalar expression will not be added. */
9041
9042 gfc_ss *
9043 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
9044 {
9045 gfc_ss *head;
9046
9047 switch (expr->expr_type)
9048 {
9049 case EXPR_VARIABLE:
9050 head = gfc_walk_variable_expr (ss, expr);
9051 return head;
9052
9053 case EXPR_OP:
9054 head = gfc_walk_op_expr (ss, expr);
9055 return head;
9056
9057 case EXPR_FUNCTION:
9058 head = gfc_walk_function_expr (ss, expr);
9059 return head;
9060
9061 case EXPR_CONSTANT:
9062 case EXPR_NULL:
9063 case EXPR_STRUCTURE:
9064 /* Pass back and let the caller deal with it. */
9065 break;
9066
9067 case EXPR_ARRAY:
9068 head = gfc_walk_array_constructor (ss, expr);
9069 return head;
9070
9071 case EXPR_SUBSTRING:
9072 /* Pass back and let the caller deal with it. */
9073 break;
9074
9075 default:
9076 internal_error ("bad expression type during walk (%d)",
9077 expr->expr_type);
9078 }
9079 return ss;
9080 }
9081
9082
9083 /* Entry point for expression walking.
9084 A return value equal to the passed chain means this is
9085 a scalar expression. It is up to the caller to take whatever action is
9086 necessary to translate these. */
9087
9088 gfc_ss *
9089 gfc_walk_expr (gfc_expr * expr)
9090 {
9091 gfc_ss *res;
9092
9093 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
9094 return gfc_reverse_ss (res);
9095 }