4075a27014c1b122b8773b7b94442299317f848a
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2011, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "ggc.h"
33 #include "output.h"
34 #include "tree-inline.h"
35
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "elists.h"
40 #include "namet.h"
41 #include "nlists.h"
42 #include "snames.h"
43 #include "stringt.h"
44 #include "uintp.h"
45 #include "fe.h"
46 #include "sinfo.h"
47 #include "einfo.h"
48 #include "ada-tree.h"
49 #include "gigi.h"
50
51 /* Return the base type of TYPE. */
52
53 tree
54 get_base_type (tree type)
55 {
56 if (TREE_CODE (type) == RECORD_TYPE
57 && TYPE_JUSTIFIED_MODULAR_P (type))
58 type = TREE_TYPE (TYPE_FIELDS (type));
59
60 while (TREE_TYPE (type)
61 && (TREE_CODE (type) == INTEGER_TYPE
62 || TREE_CODE (type) == REAL_TYPE))
63 type = TREE_TYPE (type);
64
65 return type;
66 }
67 \f
68 /* EXP is a GCC tree representing an address. See if we can find how
69 strictly the object at that address is aligned. Return that alignment
70 in bits. If we don't know anything about the alignment, return 0. */
71
72 unsigned int
73 known_alignment (tree exp)
74 {
75 unsigned int this_alignment;
76 unsigned int lhs, rhs;
77
78 switch (TREE_CODE (exp))
79 {
80 CASE_CONVERT:
81 case VIEW_CONVERT_EXPR:
82 case NON_LVALUE_EXPR:
83 /* Conversions between pointers and integers don't change the alignment
84 of the underlying object. */
85 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
86 break;
87
88 case COMPOUND_EXPR:
89 /* The value of a COMPOUND_EXPR is that of it's second operand. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
91 break;
92
93 case PLUS_EXPR:
94 case MINUS_EXPR:
95 /* If two address are added, the alignment of the result is the
96 minimum of the two alignments. */
97 lhs = known_alignment (TREE_OPERAND (exp, 0));
98 rhs = known_alignment (TREE_OPERAND (exp, 1));
99 this_alignment = MIN (lhs, rhs);
100 break;
101
102 case POINTER_PLUS_EXPR:
103 lhs = known_alignment (TREE_OPERAND (exp, 0));
104 rhs = known_alignment (TREE_OPERAND (exp, 1));
105 /* If we don't know the alignment of the offset, we assume that
106 of the base. */
107 if (rhs == 0)
108 this_alignment = lhs;
109 else
110 this_alignment = MIN (lhs, rhs);
111 break;
112
113 case COND_EXPR:
114 /* If there is a choice between two values, use the smallest one. */
115 lhs = known_alignment (TREE_OPERAND (exp, 1));
116 rhs = known_alignment (TREE_OPERAND (exp, 2));
117 this_alignment = MIN (lhs, rhs);
118 break;
119
120 case INTEGER_CST:
121 {
122 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
123 /* The first part of this represents the lowest bit in the constant,
124 but it is originally in bytes, not bits. */
125 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
126 }
127 break;
128
129 case MULT_EXPR:
130 /* If we know the alignment of just one side, use it. Otherwise,
131 use the product of the alignments. */
132 lhs = known_alignment (TREE_OPERAND (exp, 0));
133 rhs = known_alignment (TREE_OPERAND (exp, 1));
134
135 if (lhs == 0)
136 this_alignment = rhs;
137 else if (rhs == 0)
138 this_alignment = lhs;
139 else
140 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
141 break;
142
143 case BIT_AND_EXPR:
144 /* A bit-and expression is as aligned as the maximum alignment of the
145 operands. We typically get here for a complex lhs and a constant
146 negative power of two on the rhs to force an explicit alignment, so
147 don't bother looking at the lhs. */
148 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
149 break;
150
151 case ADDR_EXPR:
152 this_alignment = expr_align (TREE_OPERAND (exp, 0));
153 break;
154
155 case CALL_EXPR:
156 {
157 tree t = maybe_inline_call_in_expr (exp);
158 if (t)
159 return known_alignment (t);
160 }
161
162 /* Fall through... */
163
164 default:
165 /* For other pointer expressions, we assume that the pointed-to object
166 is at least as aligned as the pointed-to type. Beware that we can
167 have a dummy type here (e.g. a Taft Amendment type), for which the
168 alignment is meaningless and should be ignored. */
169 if (POINTER_TYPE_P (TREE_TYPE (exp))
170 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
171 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
172 else
173 this_alignment = 0;
174 break;
175 }
176
177 return this_alignment;
178 }
179 \f
180 /* We have a comparison or assignment operation on two types, T1 and T2, which
181 are either both array types or both record types. T1 is assumed to be for
182 the left hand side operand, and T2 for the right hand side. Return the
183 type that both operands should be converted to for the operation, if any.
184 Otherwise return zero. */
185
186 static tree
187 find_common_type (tree t1, tree t2)
188 {
189 /* ??? As of today, various constructs lead to here with types of different
190 sizes even when both constants (e.g. tagged types, packable vs regular
191 component types, padded vs unpadded types, ...). While some of these
192 would better be handled upstream (types should be made consistent before
193 calling into build_binary_op), some others are really expected and we
194 have to be careful. */
195
196 /* We must avoid writing more than what the target can hold if this is for
197 an assignment and the case of tagged types is handled in build_binary_op
198 so we use the lhs type if it is known to be smaller or of constant size
199 and the rhs type is not, whatever the modes. We also force t1 in case of
200 constant size equality to minimize occurrences of view conversions on the
201 lhs of an assignment, except for the case of record types with a variant
202 part on the lhs but not on the rhs to make the conversion simpler. */
203 if (TREE_CONSTANT (TYPE_SIZE (t1))
204 && (!TREE_CONSTANT (TYPE_SIZE (t2))
205 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
206 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
207 && !(TREE_CODE (t1) == RECORD_TYPE
208 && TREE_CODE (t2) == RECORD_TYPE
209 && get_variant_part (t1) != NULL_TREE
210 && get_variant_part (t2) == NULL_TREE))))
211 return t1;
212
213 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
214 that we will not have any alignment problems since, if we did, the
215 non-BLKmode type could not have been used. */
216 if (TYPE_MODE (t1) != BLKmode)
217 return t1;
218
219 /* If the rhs type is of constant size, use it whatever the modes. At
220 this point it is known to be smaller, or of constant size and the
221 lhs type is not. */
222 if (TREE_CONSTANT (TYPE_SIZE (t2)))
223 return t2;
224
225 /* Otherwise, if the rhs type is non-BLKmode, use it. */
226 if (TYPE_MODE (t2) != BLKmode)
227 return t2;
228
229 /* In this case, both types have variable size and BLKmode. It's
230 probably best to leave the "type mismatch" because changing it
231 could cause a bad self-referential reference. */
232 return NULL_TREE;
233 }
234 \f
235 /* Return an expression tree representing an equality comparison of A1 and A2,
236 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
237
238 Two arrays are equal in one of two ways: (1) if both have zero length in
239 some dimension (not necessarily the same dimension) or (2) if the lengths
240 in each dimension are equal and the data is equal. We perform the length
241 tests in as efficient a manner as possible. */
242
243 static tree
244 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
245 {
246 tree result = convert (result_type, boolean_true_node);
247 tree a1_is_null = convert (result_type, boolean_false_node);
248 tree a2_is_null = convert (result_type, boolean_false_node);
249 tree t1 = TREE_TYPE (a1);
250 tree t2 = TREE_TYPE (a2);
251 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
252 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
253 bool length_zero_p = false;
254
255 /* If either operand has side-effects, they have to be evaluated only once
256 in spite of the multiple references to the operand in the comparison. */
257 if (a1_side_effects_p)
258 a1 = gnat_protect_expr (a1);
259
260 if (a2_side_effects_p)
261 a2 = gnat_protect_expr (a2);
262
263 /* Process each dimension separately and compare the lengths. If any
264 dimension has a length known to be zero, set LENGTH_ZERO_P to true
265 in order to suppress the comparison of the data at the end. */
266 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
267 {
268 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
269 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
270 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
271 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
272 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
273 size_one_node);
274 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
275 size_one_node);
276 tree comparison, this_a1_is_null, this_a2_is_null;
277
278 /* If the length of the first array is a constant, swap our operands
279 unless the length of the second array is the constant zero. */
280 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
281 {
282 tree tem;
283 bool btem;
284
285 tem = a1, a1 = a2, a2 = tem;
286 tem = t1, t1 = t2, t2 = tem;
287 tem = lb1, lb1 = lb2, lb2 = tem;
288 tem = ub1, ub1 = ub2, ub2 = tem;
289 tem = length1, length1 = length2, length2 = tem;
290 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
291 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
292 a2_side_effects_p = btem;
293 }
294
295 /* If the length of the second array is the constant zero, we can just
296 use the original stored bounds for the first array and see whether
297 last < first holds. */
298 if (integer_zerop (length2))
299 {
300 length_zero_p = true;
301
302 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
303 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
304
305 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
306 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
307 if (EXPR_P (comparison))
308 SET_EXPR_LOCATION (comparison, loc);
309
310 this_a1_is_null = comparison;
311 this_a2_is_null = convert (result_type, boolean_true_node);
312 }
313
314 /* Otherwise, if the length is some other constant value, we know that
315 this dimension in the second array cannot be superflat, so we can
316 just use its length computed from the actual stored bounds. */
317 else if (TREE_CODE (length2) == INTEGER_CST)
318 {
319 tree bt;
320
321 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
322 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323 /* Note that we know that UB2 and LB2 are constant and hence
324 cannot contain a PLACEHOLDER_EXPR. */
325 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
326 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
327 bt = get_base_type (TREE_TYPE (ub1));
328
329 comparison
330 = fold_build2_loc (loc, EQ_EXPR, result_type,
331 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
332 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
333 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
334 if (EXPR_P (comparison))
335 SET_EXPR_LOCATION (comparison, loc);
336
337 this_a1_is_null
338 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
339
340 this_a2_is_null = convert (result_type, boolean_false_node);
341 }
342
343 /* Otherwise, compare the computed lengths. */
344 else
345 {
346 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
347 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
348
349 comparison
350 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
351
352 /* If the length expression is of the form (cond ? val : 0), assume
353 that cond is equivalent to (length != 0). That's guaranteed by
354 construction of the array types in gnat_to_gnu_entity. */
355 if (TREE_CODE (length1) == COND_EXPR
356 && integer_zerop (TREE_OPERAND (length1, 2)))
357 this_a1_is_null
358 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
359 else
360 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
361 length1, size_zero_node);
362
363 /* Likewise for the second array. */
364 if (TREE_CODE (length2) == COND_EXPR
365 && integer_zerop (TREE_OPERAND (length2, 2)))
366 this_a2_is_null
367 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
368 else
369 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
370 length2, size_zero_node);
371 }
372
373 /* Append expressions for this dimension to the final expressions. */
374 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
375 result, comparison);
376
377 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
378 this_a1_is_null, a1_is_null);
379
380 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a2_is_null, a2_is_null);
382
383 t1 = TREE_TYPE (t1);
384 t2 = TREE_TYPE (t2);
385 }
386
387 /* Unless the length of some dimension is known to be zero, compare the
388 data in the array. */
389 if (!length_zero_p)
390 {
391 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
392 tree comparison;
393
394 if (type)
395 {
396 a1 = convert (type, a1),
397 a2 = convert (type, a2);
398 }
399
400 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
401
402 result
403 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
404 }
405
406 /* The result is also true if both sizes are zero. */
407 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
408 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
409 a1_is_null, a2_is_null),
410 result);
411
412 /* If either operand has side-effects, they have to be evaluated before
413 starting the comparison above since the place they would be otherwise
414 evaluated could be wrong. */
415 if (a1_side_effects_p)
416 result = build2 (COMPOUND_EXPR, result_type, a1, result);
417
418 if (a2_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a2, result);
420
421 return result;
422 }
423
424 /* Return an expression tree representing an equality comparison of P1 and P2,
425 two objects of fat pointer type. The result should be of type RESULT_TYPE.
426
427 Two fat pointers are equal in one of two ways: (1) if both have a null
428 pointer to the array or (2) if they contain the same couple of pointers.
429 We perform the comparison in as efficient a manner as possible. */
430
431 static tree
432 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
433 {
434 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
435 tree p1_array_is_null, p2_array_is_null;
436
437 /* If either operand has side-effects, they have to be evaluated only once
438 in spite of the multiple references to the operand in the comparison. */
439 p1 = gnat_protect_expr (p1);
440 p2 = gnat_protect_expr (p2);
441
442 /* The constant folder doesn't fold fat pointer types so we do it here. */
443 if (TREE_CODE (p1) == CONSTRUCTOR)
444 p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0)->value;
445 else
446 p1_array = build_component_ref (p1, NULL_TREE,
447 TYPE_FIELDS (TREE_TYPE (p1)), true);
448
449 p1_array_is_null
450 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
451 fold_convert_loc (loc, TREE_TYPE (p1_array),
452 null_pointer_node));
453
454 if (TREE_CODE (p2) == CONSTRUCTOR)
455 p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0)->value;
456 else
457 p2_array = build_component_ref (p2, NULL_TREE,
458 TYPE_FIELDS (TREE_TYPE (p2)), true);
459
460 p2_array_is_null
461 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
462 fold_convert_loc (loc, TREE_TYPE (p2_array),
463 null_pointer_node));
464
465 /* If one of the pointers to the array is null, just compare the other. */
466 if (integer_zerop (p1_array))
467 return p2_array_is_null;
468 else if (integer_zerop (p2_array))
469 return p1_array_is_null;
470
471 /* Otherwise, do the fully-fledged comparison. */
472 same_array
473 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
474
475 if (TREE_CODE (p1) == CONSTRUCTOR)
476 p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1)->value;
477 else
478 p1_bounds
479 = build_component_ref (p1, NULL_TREE,
480 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
481
482 if (TREE_CODE (p2) == CONSTRUCTOR)
483 p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1)->value;
484 else
485 p2_bounds
486 = build_component_ref (p2, NULL_TREE,
487 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
488
489 same_bounds
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
491
492 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
493 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
494 build_binary_op (TRUTH_ORIF_EXPR, result_type,
495 p1_array_is_null, same_bounds));
496 }
497 \f
498 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
499 type TYPE. We know that TYPE is a modular type with a nonbinary
500 modulus. */
501
502 static tree
503 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
504 tree rhs)
505 {
506 tree modulus = TYPE_MODULUS (type);
507 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
508 unsigned int precision;
509 bool unsignedp = true;
510 tree op_type = type;
511 tree result;
512
513 /* If this is an addition of a constant, convert it to a subtraction
514 of a constant since we can do that faster. */
515 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
516 {
517 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
518 op_code = MINUS_EXPR;
519 }
520
521 /* For the logical operations, we only need PRECISION bits. For
522 addition and subtraction, we need one more and for multiplication we
523 need twice as many. But we never want to make a size smaller than
524 our size. */
525 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
526 needed_precision += 1;
527 else if (op_code == MULT_EXPR)
528 needed_precision *= 2;
529
530 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
531
532 /* Unsigned will do for everything but subtraction. */
533 if (op_code == MINUS_EXPR)
534 unsignedp = false;
535
536 /* If our type is the wrong signedness or isn't wide enough, make a new
537 type and convert both our operands to it. */
538 if (TYPE_PRECISION (op_type) < precision
539 || TYPE_UNSIGNED (op_type) != unsignedp)
540 {
541 /* Copy the node so we ensure it can be modified to make it modular. */
542 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
543 modulus = convert (op_type, modulus);
544 SET_TYPE_MODULUS (op_type, modulus);
545 TYPE_MODULAR_P (op_type) = 1;
546 lhs = convert (op_type, lhs);
547 rhs = convert (op_type, rhs);
548 }
549
550 /* Do the operation, then we'll fix it up. */
551 result = fold_build2 (op_code, op_type, lhs, rhs);
552
553 /* For multiplication, we have no choice but to do a full modulus
554 operation. However, we want to do this in the narrowest
555 possible size. */
556 if (op_code == MULT_EXPR)
557 {
558 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
559 modulus = convert (div_type, modulus);
560 SET_TYPE_MODULUS (div_type, modulus);
561 TYPE_MODULAR_P (div_type) = 1;
562 result = convert (op_type,
563 fold_build2 (TRUNC_MOD_EXPR, div_type,
564 convert (div_type, result), modulus));
565 }
566
567 /* For subtraction, add the modulus back if we are negative. */
568 else if (op_code == MINUS_EXPR)
569 {
570 result = gnat_protect_expr (result);
571 result = fold_build3 (COND_EXPR, op_type,
572 fold_build2 (LT_EXPR, boolean_type_node, result,
573 convert (op_type, integer_zero_node)),
574 fold_build2 (PLUS_EXPR, op_type, result, modulus),
575 result);
576 }
577
578 /* For the other operations, subtract the modulus if we are >= it. */
579 else
580 {
581 result = gnat_protect_expr (result);
582 result = fold_build3 (COND_EXPR, op_type,
583 fold_build2 (GE_EXPR, boolean_type_node,
584 result, modulus),
585 fold_build2 (MINUS_EXPR, op_type,
586 result, modulus),
587 result);
588 }
589
590 return convert (type, result);
591 }
592 \f
593 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
594 desired for the result. Usually the operation is to be performed
595 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
596 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
597 case the type to be used will be derived from the operands.
598
599 This function is very much unlike the ones for C and C++ since we
600 have already done any type conversion and matching required. All we
601 have to do here is validate the work done by SEM and handle subtypes. */
602
603 tree
604 build_binary_op (enum tree_code op_code, tree result_type,
605 tree left_operand, tree right_operand)
606 {
607 tree left_type = TREE_TYPE (left_operand);
608 tree right_type = TREE_TYPE (right_operand);
609 tree left_base_type = get_base_type (left_type);
610 tree right_base_type = get_base_type (right_type);
611 tree operation_type = result_type;
612 tree best_type = NULL_TREE;
613 tree modulus, result;
614 bool has_side_effects = false;
615
616 if (operation_type
617 && TREE_CODE (operation_type) == RECORD_TYPE
618 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
619 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
620
621 if (operation_type
622 && TREE_CODE (operation_type) == INTEGER_TYPE
623 && TYPE_EXTRA_SUBTYPE_P (operation_type))
624 operation_type = get_base_type (operation_type);
625
626 modulus = (operation_type
627 && TREE_CODE (operation_type) == INTEGER_TYPE
628 && TYPE_MODULAR_P (operation_type)
629 ? TYPE_MODULUS (operation_type) : NULL_TREE);
630
631 switch (op_code)
632 {
633 case INIT_EXPR:
634 case MODIFY_EXPR:
635 #ifdef ENABLE_CHECKING
636 gcc_assert (result_type == NULL_TREE);
637 #endif
638 /* If there were integral or pointer conversions on the LHS, remove
639 them; we'll be putting them back below if needed. Likewise for
640 conversions between array and record types, except for justified
641 modular types. But don't do this if the right operand is not
642 BLKmode (for packed arrays) unless we are not changing the mode. */
643 while ((CONVERT_EXPR_P (left_operand)
644 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
645 && (((INTEGRAL_TYPE_P (left_type)
646 || POINTER_TYPE_P (left_type))
647 && (INTEGRAL_TYPE_P (TREE_TYPE
648 (TREE_OPERAND (left_operand, 0)))
649 || POINTER_TYPE_P (TREE_TYPE
650 (TREE_OPERAND (left_operand, 0)))))
651 || (((TREE_CODE (left_type) == RECORD_TYPE
652 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
653 || TREE_CODE (left_type) == ARRAY_TYPE)
654 && ((TREE_CODE (TREE_TYPE
655 (TREE_OPERAND (left_operand, 0)))
656 == RECORD_TYPE)
657 || (TREE_CODE (TREE_TYPE
658 (TREE_OPERAND (left_operand, 0)))
659 == ARRAY_TYPE))
660 && (TYPE_MODE (right_type) == BLKmode
661 || (TYPE_MODE (left_type)
662 == TYPE_MODE (TREE_TYPE
663 (TREE_OPERAND
664 (left_operand, 0))))))))
665 {
666 left_operand = TREE_OPERAND (left_operand, 0);
667 left_type = TREE_TYPE (left_operand);
668 }
669
670 /* If a class-wide type may be involved, force use of the RHS type. */
671 if ((TREE_CODE (right_type) == RECORD_TYPE
672 || TREE_CODE (right_type) == UNION_TYPE)
673 && TYPE_ALIGN_OK (right_type))
674 operation_type = right_type;
675
676 /* If we are copying between padded objects with compatible types, use
677 the padded view of the objects, this is very likely more efficient.
678 Likewise for a padded object that is assigned a constructor, if we
679 can convert the constructor to the inner type, to avoid putting a
680 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
681 actually copied anything. */
682 else if (TYPE_IS_PADDING_P (left_type)
683 && TREE_CONSTANT (TYPE_SIZE (left_type))
684 && ((TREE_CODE (right_operand) == COMPONENT_REF
685 && TYPE_IS_PADDING_P
686 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
687 && gnat_types_compatible_p
688 (left_type,
689 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
690 || (TREE_CODE (right_operand) == CONSTRUCTOR
691 && !CONTAINS_PLACEHOLDER_P
692 (DECL_SIZE (TYPE_FIELDS (left_type)))))
693 && !integer_zerop (TYPE_SIZE (right_type)))
694 operation_type = left_type;
695
696 /* If we have a call to a function that returns an unconstrained type
697 with default discriminant on the RHS, use the RHS type (which is
698 padded) as we cannot compute the size of the actual assignment. */
699 else if (TREE_CODE (right_operand) == CALL_EXPR
700 && TYPE_IS_PADDING_P (right_type)
701 && CONTAINS_PLACEHOLDER_P
702 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
703 operation_type = right_type;
704
705 /* Find the best type to use for copying between aggregate types. */
706 else if (((TREE_CODE (left_type) == ARRAY_TYPE
707 && TREE_CODE (right_type) == ARRAY_TYPE)
708 || (TREE_CODE (left_type) == RECORD_TYPE
709 && TREE_CODE (right_type) == RECORD_TYPE))
710 && (best_type = find_common_type (left_type, right_type)))
711 operation_type = best_type;
712
713 /* Otherwise use the LHS type. */
714 else
715 operation_type = left_type;
716
717 /* Ensure everything on the LHS is valid. If we have a field reference,
718 strip anything that get_inner_reference can handle. Then remove any
719 conversions between types having the same code and mode. And mark
720 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
721 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
722 result = left_operand;
723 while (true)
724 {
725 tree restype = TREE_TYPE (result);
726
727 if (TREE_CODE (result) == COMPONENT_REF
728 || TREE_CODE (result) == ARRAY_REF
729 || TREE_CODE (result) == ARRAY_RANGE_REF)
730 while (handled_component_p (result))
731 result = TREE_OPERAND (result, 0);
732 else if (TREE_CODE (result) == REALPART_EXPR
733 || TREE_CODE (result) == IMAGPART_EXPR
734 || (CONVERT_EXPR_P (result)
735 && (((TREE_CODE (restype)
736 == TREE_CODE (TREE_TYPE
737 (TREE_OPERAND (result, 0))))
738 && (TYPE_MODE (TREE_TYPE
739 (TREE_OPERAND (result, 0)))
740 == TYPE_MODE (restype)))
741 || TYPE_ALIGN_OK (restype))))
742 result = TREE_OPERAND (result, 0);
743 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
744 {
745 TREE_ADDRESSABLE (result) = 1;
746 result = TREE_OPERAND (result, 0);
747 }
748 else
749 break;
750 }
751
752 gcc_assert (TREE_CODE (result) == INDIRECT_REF
753 || TREE_CODE (result) == NULL_EXPR
754 || DECL_P (result));
755
756 /* Convert the right operand to the operation type unless it is
757 either already of the correct type or if the type involves a
758 placeholder, since the RHS may not have the same record type. */
759 if (operation_type != right_type
760 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
761 {
762 right_operand = convert (operation_type, right_operand);
763 right_type = operation_type;
764 }
765
766 /* If the left operand is not of the same type as the operation
767 type, wrap it up in a VIEW_CONVERT_EXPR. */
768 if (left_type != operation_type)
769 left_operand = unchecked_convert (operation_type, left_operand, false);
770
771 has_side_effects = true;
772 modulus = NULL_TREE;
773 break;
774
775 case ARRAY_REF:
776 if (!operation_type)
777 operation_type = TREE_TYPE (left_type);
778
779 /* ... fall through ... */
780
781 case ARRAY_RANGE_REF:
782 /* First look through conversion between type variants. Note that
783 this changes neither the operation type nor the type domain. */
784 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
785 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
786 == TYPE_MAIN_VARIANT (left_type))
787 {
788 left_operand = TREE_OPERAND (left_operand, 0);
789 left_type = TREE_TYPE (left_operand);
790 }
791
792 /* For a range, make sure the element type is consistent. */
793 if (op_code == ARRAY_RANGE_REF
794 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
795 operation_type = build_array_type (TREE_TYPE (left_type),
796 TYPE_DOMAIN (operation_type));
797
798 /* Then convert the right operand to its base type. This will prevent
799 unneeded sign conversions when sizetype is wider than integer. */
800 right_operand = convert (right_base_type, right_operand);
801 right_operand = convert_to_index_type (right_operand);
802 modulus = NULL_TREE;
803 break;
804
805 case TRUTH_ANDIF_EXPR:
806 case TRUTH_ORIF_EXPR:
807 case TRUTH_AND_EXPR:
808 case TRUTH_OR_EXPR:
809 case TRUTH_XOR_EXPR:
810 #ifdef ENABLE_CHECKING
811 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
812 #endif
813 operation_type = left_base_type;
814 left_operand = convert (operation_type, left_operand);
815 right_operand = convert (operation_type, right_operand);
816 break;
817
818 case GE_EXPR:
819 case LE_EXPR:
820 case GT_EXPR:
821 case LT_EXPR:
822 case EQ_EXPR:
823 case NE_EXPR:
824 #ifdef ENABLE_CHECKING
825 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
826 #endif
827 /* If either operand is a NULL_EXPR, just return a new one. */
828 if (TREE_CODE (left_operand) == NULL_EXPR)
829 return build2 (op_code, result_type,
830 build1 (NULL_EXPR, integer_type_node,
831 TREE_OPERAND (left_operand, 0)),
832 integer_zero_node);
833
834 else if (TREE_CODE (right_operand) == NULL_EXPR)
835 return build2 (op_code, result_type,
836 build1 (NULL_EXPR, integer_type_node,
837 TREE_OPERAND (right_operand, 0)),
838 integer_zero_node);
839
840 /* If either object is a justified modular types, get the
841 fields from within. */
842 if (TREE_CODE (left_type) == RECORD_TYPE
843 && TYPE_JUSTIFIED_MODULAR_P (left_type))
844 {
845 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
846 left_operand);
847 left_type = TREE_TYPE (left_operand);
848 left_base_type = get_base_type (left_type);
849 }
850
851 if (TREE_CODE (right_type) == RECORD_TYPE
852 && TYPE_JUSTIFIED_MODULAR_P (right_type))
853 {
854 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
855 right_operand);
856 right_type = TREE_TYPE (right_operand);
857 right_base_type = get_base_type (right_type);
858 }
859
860 /* If both objects are arrays, compare them specially. */
861 if ((TREE_CODE (left_type) == ARRAY_TYPE
862 || (TREE_CODE (left_type) == INTEGER_TYPE
863 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
864 && (TREE_CODE (right_type) == ARRAY_TYPE
865 || (TREE_CODE (right_type) == INTEGER_TYPE
866 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
867 {
868 result = compare_arrays (input_location,
869 result_type, left_operand, right_operand);
870 if (op_code == NE_EXPR)
871 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
872 else
873 gcc_assert (op_code == EQ_EXPR);
874
875 return result;
876 }
877
878 /* Otherwise, the base types must be the same, unless they are both fat
879 pointer types or record types. In the latter case, use the best type
880 and convert both operands to that type. */
881 if (left_base_type != right_base_type)
882 {
883 if (TYPE_IS_FAT_POINTER_P (left_base_type)
884 && TYPE_IS_FAT_POINTER_P (right_base_type))
885 {
886 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
887 == TYPE_MAIN_VARIANT (right_base_type));
888 best_type = left_base_type;
889 }
890
891 else if (TREE_CODE (left_base_type) == RECORD_TYPE
892 && TREE_CODE (right_base_type) == RECORD_TYPE)
893 {
894 /* The only way this is permitted is if both types have the same
895 name. In that case, one of them must not be self-referential.
896 Use it as the best type. Even better with a fixed size. */
897 gcc_assert (TYPE_NAME (left_base_type)
898 && TYPE_NAME (left_base_type)
899 == TYPE_NAME (right_base_type));
900
901 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
902 best_type = left_base_type;
903 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
904 best_type = right_base_type;
905 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
906 best_type = left_base_type;
907 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
908 best_type = right_base_type;
909 else
910 gcc_unreachable ();
911 }
912
913 else
914 gcc_unreachable ();
915
916 left_operand = convert (best_type, left_operand);
917 right_operand = convert (best_type, right_operand);
918 }
919 else
920 {
921 left_operand = convert (left_base_type, left_operand);
922 right_operand = convert (right_base_type, right_operand);
923 }
924
925 /* If both objects are fat pointers, compare them specially. */
926 if (TYPE_IS_FAT_POINTER_P (left_base_type))
927 {
928 result
929 = compare_fat_pointers (input_location,
930 result_type, left_operand, right_operand);
931 if (op_code == NE_EXPR)
932 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
933 else
934 gcc_assert (op_code == EQ_EXPR);
935
936 return result;
937 }
938
939 modulus = NULL_TREE;
940 break;
941
942 case LSHIFT_EXPR:
943 case RSHIFT_EXPR:
944 case LROTATE_EXPR:
945 case RROTATE_EXPR:
946 /* The RHS of a shift can be any type. Also, ignore any modulus
947 (we used to abort, but this is needed for unchecked conversion
948 to modular types). Otherwise, processing is the same as normal. */
949 gcc_assert (operation_type == left_base_type);
950 modulus = NULL_TREE;
951 left_operand = convert (operation_type, left_operand);
952 break;
953
954 case BIT_AND_EXPR:
955 case BIT_IOR_EXPR:
956 case BIT_XOR_EXPR:
957 /* For binary modulus, if the inputs are in range, so are the
958 outputs. */
959 if (modulus && integer_pow2p (modulus))
960 modulus = NULL_TREE;
961 goto common;
962
963 case COMPLEX_EXPR:
964 gcc_assert (TREE_TYPE (result_type) == left_base_type
965 && TREE_TYPE (result_type) == right_base_type);
966 left_operand = convert (left_base_type, left_operand);
967 right_operand = convert (right_base_type, right_operand);
968 break;
969
970 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
971 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
972 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
973 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
974 /* These always produce results lower than either operand. */
975 modulus = NULL_TREE;
976 goto common;
977
978 case POINTER_PLUS_EXPR:
979 gcc_assert (operation_type == left_base_type
980 && sizetype == right_base_type);
981 left_operand = convert (operation_type, left_operand);
982 right_operand = convert (sizetype, right_operand);
983 break;
984
985 case PLUS_NOMOD_EXPR:
986 case MINUS_NOMOD_EXPR:
987 if (op_code == PLUS_NOMOD_EXPR)
988 op_code = PLUS_EXPR;
989 else
990 op_code = MINUS_EXPR;
991 modulus = NULL_TREE;
992
993 /* ... fall through ... */
994
995 case PLUS_EXPR:
996 case MINUS_EXPR:
997 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
998 other compilers. Contrary to C, Ada doesn't allow arithmetics in
999 these types but can generate addition/subtraction for Succ/Pred. */
1000 if (operation_type
1001 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1002 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1003 operation_type = left_base_type = right_base_type
1004 = gnat_type_for_mode (TYPE_MODE (operation_type),
1005 TYPE_UNSIGNED (operation_type));
1006
1007 /* ... fall through ... */
1008
1009 default:
1010 common:
1011 /* The result type should be the same as the base types of the
1012 both operands (and they should be the same). Convert
1013 everything to the result type. */
1014
1015 gcc_assert (operation_type == left_base_type
1016 && left_base_type == right_base_type);
1017 left_operand = convert (operation_type, left_operand);
1018 right_operand = convert (operation_type, right_operand);
1019 }
1020
1021 if (modulus && !integer_pow2p (modulus))
1022 {
1023 result = nonbinary_modular_operation (op_code, operation_type,
1024 left_operand, right_operand);
1025 modulus = NULL_TREE;
1026 }
1027 /* If either operand is a NULL_EXPR, just return a new one. */
1028 else if (TREE_CODE (left_operand) == NULL_EXPR)
1029 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1030 else if (TREE_CODE (right_operand) == NULL_EXPR)
1031 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1032 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1033 result = fold (build4 (op_code, operation_type, left_operand,
1034 right_operand, NULL_TREE, NULL_TREE));
1035 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1036 result = build2 (op_code, void_type_node, left_operand, right_operand);
1037 else
1038 result
1039 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1040
1041 if (TREE_CONSTANT (result))
1042 ;
1043 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1044 {
1045 TREE_THIS_NOTRAP (result) = 1;
1046 if (TYPE_VOLATILE (operation_type))
1047 TREE_THIS_VOLATILE (result) = 1;
1048 }
1049 else
1050 TREE_CONSTANT (result)
1051 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1052
1053 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1054
1055 /* If we are working with modular types, perform the MOD operation
1056 if something above hasn't eliminated the need for it. */
1057 if (modulus)
1058 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1059 convert (operation_type, modulus));
1060
1061 if (result_type && result_type != operation_type)
1062 result = convert (result_type, result);
1063
1064 return result;
1065 }
1066 \f
1067 /* Similar, but for unary operations. */
1068
1069 tree
1070 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1071 {
1072 tree type = TREE_TYPE (operand);
1073 tree base_type = get_base_type (type);
1074 tree operation_type = result_type;
1075 tree result;
1076
1077 if (operation_type
1078 && TREE_CODE (operation_type) == RECORD_TYPE
1079 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1080 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1081
1082 if (operation_type
1083 && TREE_CODE (operation_type) == INTEGER_TYPE
1084 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1085 operation_type = get_base_type (operation_type);
1086
1087 switch (op_code)
1088 {
1089 case REALPART_EXPR:
1090 case IMAGPART_EXPR:
1091 if (!operation_type)
1092 result_type = operation_type = TREE_TYPE (type);
1093 else
1094 gcc_assert (result_type == TREE_TYPE (type));
1095
1096 result = fold_build1 (op_code, operation_type, operand);
1097 break;
1098
1099 case TRUTH_NOT_EXPR:
1100 #ifdef ENABLE_CHECKING
1101 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1102 #endif
1103 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1104 /* When not optimizing, fold the result as invert_truthvalue_loc
1105 doesn't fold the result of comparisons. This is intended to undo
1106 the trick used for boolean rvalues in gnat_to_gnu. */
1107 if (!optimize)
1108 result = fold (result);
1109 break;
1110
1111 case ATTR_ADDR_EXPR:
1112 case ADDR_EXPR:
1113 switch (TREE_CODE (operand))
1114 {
1115 case INDIRECT_REF:
1116 case UNCONSTRAINED_ARRAY_REF:
1117 result = TREE_OPERAND (operand, 0);
1118
1119 /* Make sure the type here is a pointer, not a reference.
1120 GCC wants pointer types for function addresses. */
1121 if (!result_type)
1122 result_type = build_pointer_type (type);
1123
1124 /* If the underlying object can alias everything, propagate the
1125 property since we are effectively retrieving the object. */
1126 if (POINTER_TYPE_P (TREE_TYPE (result))
1127 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1128 {
1129 if (TREE_CODE (result_type) == POINTER_TYPE
1130 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1131 result_type
1132 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1133 TYPE_MODE (result_type),
1134 true);
1135 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1136 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1137 result_type
1138 = build_reference_type_for_mode (TREE_TYPE (result_type),
1139 TYPE_MODE (result_type),
1140 true);
1141 }
1142 break;
1143
1144 case NULL_EXPR:
1145 result = operand;
1146 TREE_TYPE (result) = type = build_pointer_type (type);
1147 break;
1148
1149 case COMPOUND_EXPR:
1150 /* Fold a compound expression if it has unconstrained array type
1151 since the middle-end cannot handle it. But we don't it in the
1152 general case because it may introduce aliasing issues if the
1153 first operand is an indirect assignment and the second operand
1154 the corresponding address, e.g. for an allocator. */
1155 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1156 {
1157 result = build_unary_op (ADDR_EXPR, result_type,
1158 TREE_OPERAND (operand, 1));
1159 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1160 TREE_OPERAND (operand, 0), result);
1161 break;
1162 }
1163 goto common;
1164
1165 case ARRAY_REF:
1166 case ARRAY_RANGE_REF:
1167 case COMPONENT_REF:
1168 case BIT_FIELD_REF:
1169 /* If this is for 'Address, find the address of the prefix and add
1170 the offset to the field. Otherwise, do this the normal way. */
1171 if (op_code == ATTR_ADDR_EXPR)
1172 {
1173 HOST_WIDE_INT bitsize;
1174 HOST_WIDE_INT bitpos;
1175 tree offset, inner;
1176 enum machine_mode mode;
1177 int unsignedp, volatilep;
1178
1179 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1180 &mode, &unsignedp, &volatilep,
1181 false);
1182
1183 /* If INNER is a padding type whose field has a self-referential
1184 size, convert to that inner type. We know the offset is zero
1185 and we need to have that type visible. */
1186 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1187 && CONTAINS_PLACEHOLDER_P
1188 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1189 (TREE_TYPE (inner))))))
1190 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1191 inner);
1192
1193 /* Compute the offset as a byte offset from INNER. */
1194 if (!offset)
1195 offset = size_zero_node;
1196
1197 offset = size_binop (PLUS_EXPR, offset,
1198 size_int (bitpos / BITS_PER_UNIT));
1199
1200 /* Take the address of INNER, convert the offset to void *, and
1201 add then. It will later be converted to the desired result
1202 type, if any. */
1203 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1204 inner = convert (ptr_void_type_node, inner);
1205 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1206 inner, offset);
1207 result = convert (build_pointer_type (TREE_TYPE (operand)),
1208 result);
1209 break;
1210 }
1211 goto common;
1212
1213 case CONSTRUCTOR:
1214 /* If this is just a constructor for a padded record, we can
1215 just take the address of the single field and convert it to
1216 a pointer to our type. */
1217 if (TYPE_IS_PADDING_P (type))
1218 {
1219 result = VEC_index (constructor_elt,
1220 CONSTRUCTOR_ELTS (operand),
1221 0)->value;
1222 result = convert (build_pointer_type (TREE_TYPE (operand)),
1223 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1224 break;
1225 }
1226
1227 goto common;
1228
1229 case NOP_EXPR:
1230 if (AGGREGATE_TYPE_P (type)
1231 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1232 return build_unary_op (ADDR_EXPR, result_type,
1233 TREE_OPERAND (operand, 0));
1234
1235 /* ... fallthru ... */
1236
1237 case VIEW_CONVERT_EXPR:
1238 /* If this just a variant conversion or if the conversion doesn't
1239 change the mode, get the result type from this type and go down.
1240 This is needed for conversions of CONST_DECLs, to eventually get
1241 to the address of their CORRESPONDING_VARs. */
1242 if ((TYPE_MAIN_VARIANT (type)
1243 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1244 || (TYPE_MODE (type) != BLKmode
1245 && (TYPE_MODE (type)
1246 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1247 return build_unary_op (ADDR_EXPR,
1248 (result_type ? result_type
1249 : build_pointer_type (type)),
1250 TREE_OPERAND (operand, 0));
1251 goto common;
1252
1253 case CONST_DECL:
1254 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1255
1256 /* ... fall through ... */
1257
1258 default:
1259 common:
1260
1261 /* If we are taking the address of a padded record whose field is
1262 contains a template, take the address of the template. */
1263 if (TYPE_IS_PADDING_P (type)
1264 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1265 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1266 {
1267 type = TREE_TYPE (TYPE_FIELDS (type));
1268 operand = convert (type, operand);
1269 }
1270
1271 gnat_mark_addressable (operand);
1272 result = build_fold_addr_expr (operand);
1273 }
1274
1275 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1276 break;
1277
1278 case INDIRECT_REF:
1279 {
1280 tree t = remove_conversions (operand, false);
1281 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1282
1283 /* If TYPE is a thin pointer, first convert to the fat pointer. */
1284 if (TYPE_IS_THIN_POINTER_P (type)
1285 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1286 {
1287 operand = convert
1288 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1289 operand);
1290 type = TREE_TYPE (operand);
1291 }
1292
1293 /* If we want to refer to an unconstrained array, use the appropriate
1294 expression. But this will never survive down to the back-end. */
1295 if (TYPE_IS_FAT_POINTER_P (type))
1296 {
1297 result = build1 (UNCONSTRAINED_ARRAY_REF,
1298 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1299 TREE_READONLY (result)
1300 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1301 }
1302
1303 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1304 else if (TREE_CODE (operand) == ADDR_EXPR)
1305 result = TREE_OPERAND (operand, 0);
1306
1307 /* Otherwise, build and fold the indirect reference. */
1308 else
1309 {
1310 result = build_fold_indirect_ref (operand);
1311 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1312 }
1313
1314 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1315 {
1316 TREE_SIDE_EFFECTS (result) = 1;
1317 if (TREE_CODE (result) == INDIRECT_REF)
1318 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1319 }
1320
1321 if ((TREE_CODE (result) == INDIRECT_REF
1322 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1323 && can_never_be_null)
1324 TREE_THIS_NOTRAP (result) = 1;
1325
1326 break;
1327 }
1328
1329 case NEGATE_EXPR:
1330 case BIT_NOT_EXPR:
1331 {
1332 tree modulus = ((operation_type
1333 && TREE_CODE (operation_type) == INTEGER_TYPE
1334 && TYPE_MODULAR_P (operation_type))
1335 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1336 int mod_pow2 = modulus && integer_pow2p (modulus);
1337
1338 /* If this is a modular type, there are various possibilities
1339 depending on the operation and whether the modulus is a
1340 power of two or not. */
1341
1342 if (modulus)
1343 {
1344 gcc_assert (operation_type == base_type);
1345 operand = convert (operation_type, operand);
1346
1347 /* The fastest in the negate case for binary modulus is
1348 the straightforward code; the TRUNC_MOD_EXPR below
1349 is an AND operation. */
1350 if (op_code == NEGATE_EXPR && mod_pow2)
1351 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1352 fold_build1 (NEGATE_EXPR, operation_type,
1353 operand),
1354 modulus);
1355
1356 /* For nonbinary negate case, return zero for zero operand,
1357 else return the modulus minus the operand. If the modulus
1358 is a power of two minus one, we can do the subtraction
1359 as an XOR since it is equivalent and faster on most machines. */
1360 else if (op_code == NEGATE_EXPR && !mod_pow2)
1361 {
1362 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1363 modulus,
1364 convert (operation_type,
1365 integer_one_node))))
1366 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1367 operand, modulus);
1368 else
1369 result = fold_build2 (MINUS_EXPR, operation_type,
1370 modulus, operand);
1371
1372 result = fold_build3 (COND_EXPR, operation_type,
1373 fold_build2 (NE_EXPR,
1374 boolean_type_node,
1375 operand,
1376 convert
1377 (operation_type,
1378 integer_zero_node)),
1379 result, operand);
1380 }
1381 else
1382 {
1383 /* For the NOT cases, we need a constant equal to
1384 the modulus minus one. For a binary modulus, we
1385 XOR against the constant and subtract the operand from
1386 that constant for nonbinary modulus. */
1387
1388 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1389 convert (operation_type,
1390 integer_one_node));
1391
1392 if (mod_pow2)
1393 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1394 operand, cnst);
1395 else
1396 result = fold_build2 (MINUS_EXPR, operation_type,
1397 cnst, operand);
1398 }
1399
1400 break;
1401 }
1402 }
1403
1404 /* ... fall through ... */
1405
1406 default:
1407 gcc_assert (operation_type == base_type);
1408 result = fold_build1 (op_code, operation_type,
1409 convert (operation_type, operand));
1410 }
1411
1412 if (result_type && TREE_TYPE (result) != result_type)
1413 result = convert (result_type, result);
1414
1415 return result;
1416 }
1417 \f
1418 /* Similar, but for COND_EXPR. */
1419
1420 tree
1421 build_cond_expr (tree result_type, tree condition_operand,
1422 tree true_operand, tree false_operand)
1423 {
1424 bool addr_p = false;
1425 tree result;
1426
1427 /* The front-end verified that result, true and false operands have
1428 same base type. Convert everything to the result type. */
1429 true_operand = convert (result_type, true_operand);
1430 false_operand = convert (result_type, false_operand);
1431
1432 /* If the result type is unconstrained, take the address of the operands and
1433 then dereference the result. Likewise if the result type is passed by
1434 reference, but this is natively handled in the gimplifier. */
1435 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1436 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1437 {
1438 result_type = build_pointer_type (result_type);
1439 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1440 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1441 addr_p = true;
1442 }
1443
1444 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1445 true_operand, false_operand);
1446
1447 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1448 in both arms, make sure it gets evaluated by moving it ahead of the
1449 conditional expression. This is necessary because it is evaluated
1450 in only one place at run time and would otherwise be uninitialized
1451 in one of the arms. */
1452 true_operand = skip_simple_arithmetic (true_operand);
1453 false_operand = skip_simple_arithmetic (false_operand);
1454
1455 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1456 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1457
1458 if (addr_p)
1459 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1460
1461 return result;
1462 }
1463
1464 /* Similar, but for COMPOUND_EXPR. */
1465
1466 tree
1467 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1468 {
1469 bool addr_p = false;
1470 tree result;
1471
1472 /* If the result type is unconstrained, take the address of the operand and
1473 then dereference the result. Likewise if the result type is passed by
1474 reference, but this is natively handled in the gimplifier. */
1475 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1476 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1477 {
1478 result_type = build_pointer_type (result_type);
1479 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1480 addr_p = true;
1481 }
1482
1483 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1484 expr_operand);
1485
1486 if (addr_p)
1487 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1488
1489 return result;
1490 }
1491 \f
1492 /* Conveniently construct a function call expression. FNDECL names the
1493 function to be called, N is the number of arguments, and the "..."
1494 parameters are the argument expressions. Unlike build_call_expr
1495 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1496
1497 tree
1498 build_call_n_expr (tree fndecl, int n, ...)
1499 {
1500 va_list ap;
1501 tree fntype = TREE_TYPE (fndecl);
1502 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1503
1504 va_start (ap, n);
1505 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1506 va_end (ap);
1507 return fn;
1508 }
1509 \f
1510 /* Call a function that raises an exception and pass the line number and file
1511 name, if requested. MSG says which exception function to call.
1512
1513 GNAT_NODE is the gnat node conveying the source location for which the
1514 error should be signaled, or Empty in which case the error is signaled on
1515 the current ref_file_name/input_line.
1516
1517 KIND says which kind of exception this is for
1518 (N_Raise_{Constraint,Storage,Program}_Error). */
1519
1520 tree
1521 build_call_raise (int msg, Node_Id gnat_node, char kind)
1522 {
1523 tree fndecl = gnat_raise_decls[msg];
1524 tree label = get_exception_label (kind);
1525 tree filename;
1526 int line_number;
1527 const char *str;
1528 int len;
1529
1530 /* If this is to be done as a goto, handle that case. */
1531 if (label)
1532 {
1533 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1534 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1535
1536 /* If Local_Raise is present, generate
1537 Local_Raise (exception'Identity); */
1538 if (Present (local_raise))
1539 {
1540 tree gnu_local_raise
1541 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1542 tree gnu_exception_entity
1543 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1544 tree gnu_call
1545 = build_call_n_expr (gnu_local_raise, 1,
1546 build_unary_op (ADDR_EXPR, NULL_TREE,
1547 gnu_exception_entity));
1548
1549 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1550 gnu_call, gnu_result);}
1551
1552 return gnu_result;
1553 }
1554
1555 str
1556 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1557 ? ""
1558 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1559 ? IDENTIFIER_POINTER
1560 (get_identifier (Get_Name_String
1561 (Debug_Source_Name
1562 (Get_Source_File_Index (Sloc (gnat_node))))))
1563 : ref_filename;
1564
1565 len = strlen (str);
1566 filename = build_string (len, str);
1567 line_number
1568 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1569 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1570
1571 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1572 build_index_type (size_int (len)));
1573
1574 return
1575 build_call_n_expr (fndecl, 2,
1576 build1 (ADDR_EXPR,
1577 build_pointer_type (unsigned_char_type_node),
1578 filename),
1579 build_int_cst (NULL_TREE, line_number));
1580 }
1581
1582 /* Similar to build_call_raise, for an index or range check exception as
1583 determined by MSG, with extra information generated of the form
1584 "INDEX out of range FIRST..LAST". */
1585
1586 tree
1587 build_call_raise_range (int msg, Node_Id gnat_node,
1588 tree index, tree first, tree last)
1589 {
1590 tree fndecl = gnat_raise_decls_ext[msg];
1591 tree filename;
1592 int line_number, column_number;
1593 const char *str;
1594 int len;
1595
1596 str
1597 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1598 ? ""
1599 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1600 ? IDENTIFIER_POINTER
1601 (get_identifier (Get_Name_String
1602 (Debug_Source_Name
1603 (Get_Source_File_Index (Sloc (gnat_node))))))
1604 : ref_filename;
1605
1606 len = strlen (str);
1607 filename = build_string (len, str);
1608 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1609 {
1610 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1611 column_number = Get_Column_Number (Sloc (gnat_node));
1612 }
1613 else
1614 {
1615 line_number = input_line;
1616 column_number = 0;
1617 }
1618
1619 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1620 build_index_type (size_int (len)));
1621
1622 return
1623 build_call_n_expr (fndecl, 6,
1624 build1 (ADDR_EXPR,
1625 build_pointer_type (unsigned_char_type_node),
1626 filename),
1627 build_int_cst (NULL_TREE, line_number),
1628 build_int_cst (NULL_TREE, column_number),
1629 convert (integer_type_node, index),
1630 convert (integer_type_node, first),
1631 convert (integer_type_node, last));
1632 }
1633
1634 /* Similar to build_call_raise, with extra information about the column
1635 where the check failed. */
1636
1637 tree
1638 build_call_raise_column (int msg, Node_Id gnat_node)
1639 {
1640 tree fndecl = gnat_raise_decls_ext[msg];
1641 tree filename;
1642 int line_number, column_number;
1643 const char *str;
1644 int len;
1645
1646 str
1647 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1648 ? ""
1649 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1650 ? IDENTIFIER_POINTER
1651 (get_identifier (Get_Name_String
1652 (Debug_Source_Name
1653 (Get_Source_File_Index (Sloc (gnat_node))))))
1654 : ref_filename;
1655
1656 len = strlen (str);
1657 filename = build_string (len, str);
1658 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1659 {
1660 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1661 column_number = Get_Column_Number (Sloc (gnat_node));
1662 }
1663 else
1664 {
1665 line_number = input_line;
1666 column_number = 0;
1667 }
1668
1669 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1670 build_index_type (size_int (len)));
1671
1672 return
1673 build_call_n_expr (fndecl, 3,
1674 build1 (ADDR_EXPR,
1675 build_pointer_type (unsigned_char_type_node),
1676 filename),
1677 build_int_cst (NULL_TREE, line_number),
1678 build_int_cst (NULL_TREE, column_number));
1679 }
1680 \f
1681 /* qsort comparer for the bit positions of two constructor elements
1682 for record components. */
1683
1684 static int
1685 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1686 {
1687 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1688 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1689 const_tree const field1 = elmt1->index;
1690 const_tree const field2 = elmt2->index;
1691 const int ret
1692 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1693
1694 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1695 }
1696
1697 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1698
1699 tree
1700 gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
1701 {
1702 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1703 bool side_effects = false;
1704 tree result, obj, val;
1705 unsigned int n_elmts;
1706
1707 /* Scan the elements to see if they are all constant or if any has side
1708 effects, to let us set global flags on the resulting constructor. Count
1709 the elements along the way for possible sorting purposes below. */
1710 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1711 {
1712 /* The predicate must be in keeping with output_constructor. */
1713 if (!TREE_CONSTANT (val)
1714 || (TREE_CODE (type) == RECORD_TYPE
1715 && CONSTRUCTOR_BITFIELD_P (obj)
1716 && !initializer_constant_valid_for_bitfield_p (val))
1717 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1718 allconstant = false;
1719
1720 if (TREE_SIDE_EFFECTS (val))
1721 side_effects = true;
1722 }
1723
1724 /* For record types with constant components only, sort field list
1725 by increasing bit position. This is necessary to ensure the
1726 constructor can be output as static data. */
1727 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1728 VEC_qsort (constructor_elt, v, compare_elmt_bitpos);
1729
1730 result = build_constructor (type, v);
1731 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1732 TREE_SIDE_EFFECTS (result) = side_effects;
1733 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1734 return result;
1735 }
1736 \f
1737 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1738 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1739 for the field. Don't fold the result if NO_FOLD_P is true.
1740
1741 We also handle the fact that we might have been passed a pointer to the
1742 actual record and know how to look for fields in variant parts. */
1743
1744 static tree
1745 build_simple_component_ref (tree record_variable, tree component,
1746 tree field, bool no_fold_p)
1747 {
1748 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1749 tree ref, inner_variable;
1750
1751 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1752 && COMPLETE_TYPE_P (record_type)
1753 && (component == NULL_TREE) != (field == NULL_TREE));
1754
1755 /* If no field was specified, look for a field with the specified name in
1756 the current record only. */
1757 if (!field)
1758 for (field = TYPE_FIELDS (record_type);
1759 field;
1760 field = DECL_CHAIN (field))
1761 if (DECL_NAME (field) == component)
1762 break;
1763
1764 if (!field)
1765 return NULL_TREE;
1766
1767 /* If this field is not in the specified record, see if we can find a field
1768 in the specified record whose original field is the same as this one. */
1769 if (DECL_CONTEXT (field) != record_type)
1770 {
1771 tree new_field;
1772
1773 /* First loop thru normal components. */
1774 for (new_field = TYPE_FIELDS (record_type);
1775 new_field;
1776 new_field = DECL_CHAIN (new_field))
1777 if (SAME_FIELD_P (field, new_field))
1778 break;
1779
1780 /* Next, see if we're looking for an inherited component in an extension.
1781 If so, look thru the extension directly. */
1782 if (!new_field
1783 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1784 && TYPE_ALIGN_OK (record_type)
1785 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1786 == RECORD_TYPE
1787 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1788 {
1789 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1790 NULL_TREE, field, no_fold_p);
1791 if (ref)
1792 return ref;
1793 }
1794
1795 /* Next, loop thru DECL_INTERNAL_P components if we haven't found the
1796 component in the first search. Doing this search in two steps is
1797 required to avoid hidden homonymous fields in the _Parent field. */
1798 if (!new_field)
1799 for (new_field = TYPE_FIELDS (record_type);
1800 new_field;
1801 new_field = DECL_CHAIN (new_field))
1802 if (DECL_INTERNAL_P (new_field))
1803 {
1804 tree field_ref
1805 = build_simple_component_ref (record_variable,
1806 NULL_TREE, new_field, no_fold_p);
1807 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1808 no_fold_p);
1809 if (ref)
1810 return ref;
1811 }
1812
1813 field = new_field;
1814 }
1815
1816 if (!field)
1817 return NULL_TREE;
1818
1819 /* If the field's offset has overflowed, do not try to access it, as doing
1820 so may trigger sanity checks deeper in the back-end. Note that we don't
1821 need to warn since this will be done on trying to declare the object. */
1822 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1823 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1824 return NULL_TREE;
1825
1826 /* Look through conversion between type variants. This is transparent as
1827 far as the field is concerned. */
1828 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1829 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1830 == record_type)
1831 inner_variable = TREE_OPERAND (record_variable, 0);
1832 else
1833 inner_variable = record_variable;
1834
1835 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1836 NULL_TREE);
1837
1838 if (TREE_READONLY (record_variable)
1839 || TREE_READONLY (field)
1840 || TYPE_READONLY (record_type))
1841 TREE_READONLY (ref) = 1;
1842
1843 if (TREE_THIS_VOLATILE (record_variable)
1844 || TREE_THIS_VOLATILE (field)
1845 || TYPE_VOLATILE (record_type))
1846 TREE_THIS_VOLATILE (ref) = 1;
1847
1848 if (no_fold_p)
1849 return ref;
1850
1851 /* The generic folder may punt in this case because the inner array type
1852 can be self-referential, but folding is in fact not problematic. */
1853 if (TREE_CODE (record_variable) == CONSTRUCTOR
1854 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1855 {
1856 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1857 unsigned HOST_WIDE_INT idx;
1858 tree index, value;
1859 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1860 if (index == field)
1861 return value;
1862 return ref;
1863 }
1864
1865 return fold (ref);
1866 }
1867 \f
1868 /* Like build_simple_component_ref, except that we give an error if the
1869 reference could not be found. */
1870
1871 tree
1872 build_component_ref (tree record_variable, tree component,
1873 tree field, bool no_fold_p)
1874 {
1875 tree ref = build_simple_component_ref (record_variable, component, field,
1876 no_fold_p);
1877
1878 if (ref)
1879 return ref;
1880
1881 /* If FIELD was specified, assume this is an invalid user field so raise
1882 Constraint_Error. Otherwise, we have no type to return so abort. */
1883 gcc_assert (field);
1884 return build1 (NULL_EXPR, TREE_TYPE (field),
1885 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1886 N_Raise_Constraint_Error));
1887 }
1888 \f
1889 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1890 identically. Process the case where a GNAT_PROC to call is provided. */
1891
1892 static inline tree
1893 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1894 Entity_Id gnat_proc, Entity_Id gnat_pool)
1895 {
1896 tree gnu_proc = gnat_to_gnu (gnat_proc);
1897 tree gnu_call;
1898
1899 /* The storage pools are obviously always tagged types, but the
1900 secondary stack uses the same mechanism and is not tagged. */
1901 if (Is_Tagged_Type (Etype (gnat_pool)))
1902 {
1903 /* The size is the third parameter; the alignment is the
1904 same type. */
1905 Entity_Id gnat_size_type
1906 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1907 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1908
1909 tree gnu_pool = gnat_to_gnu (gnat_pool);
1910 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1911 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1912
1913 gnu_size = convert (gnu_size_type, gnu_size);
1914 gnu_align = convert (gnu_size_type, gnu_align);
1915
1916 /* The first arg is always the address of the storage pool; next
1917 comes the address of the object, for a deallocator, then the
1918 size and alignment. */
1919 if (gnu_obj)
1920 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
1921 gnu_size, gnu_align);
1922 else
1923 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
1924 gnu_size, gnu_align);
1925 }
1926
1927 /* Secondary stack case. */
1928 else
1929 {
1930 /* The size is the second parameter. */
1931 Entity_Id gnat_size_type
1932 = Etype (Next_Formal (First_Formal (gnat_proc)));
1933 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1934
1935 gnu_size = convert (gnu_size_type, gnu_size);
1936
1937 /* The first arg is the address of the object, for a deallocator,
1938 then the size. */
1939 if (gnu_obj)
1940 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
1941 else
1942 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
1943 }
1944
1945 return gnu_call;
1946 }
1947
1948 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1949 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1950 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1951 latter offers. */
1952
1953 static inline tree
1954 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1955 {
1956 /* When the DATA_TYPE alignment is stricter than what malloc offers
1957 (super-aligned case), we allocate an "aligning" wrapper type and return
1958 the address of its single data field with the malloc's return value
1959 stored just in front. */
1960
1961 unsigned int data_align = TYPE_ALIGN (data_type);
1962 unsigned int system_allocator_alignment
1963 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
1964
1965 tree aligning_type
1966 = ((data_align > system_allocator_alignment)
1967 ? make_aligning_type (data_type, data_align, data_size,
1968 system_allocator_alignment,
1969 POINTER_SIZE / BITS_PER_UNIT)
1970 : NULL_TREE);
1971
1972 tree size_to_malloc
1973 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1974
1975 tree malloc_ptr;
1976
1977 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
1978 Convention C, allocate 32-bit memory. */
1979 if (TARGET_ABI_OPEN_VMS
1980 && POINTER_SIZE == 64
1981 && Nkind (gnat_node) == N_Allocator
1982 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1983 || Convention (Etype (gnat_node)) == Convention_C))
1984 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
1985 else
1986 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
1987
1988 if (aligning_type)
1989 {
1990 /* Latch malloc's return value and get a pointer to the aligning field
1991 first. */
1992 tree storage_ptr = gnat_protect_expr (malloc_ptr);
1993
1994 tree aligning_record_addr
1995 = convert (build_pointer_type (aligning_type), storage_ptr);
1996
1997 tree aligning_record
1998 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1999
2000 tree aligning_field
2001 = build_component_ref (aligning_record, NULL_TREE,
2002 TYPE_FIELDS (aligning_type), false);
2003
2004 tree aligning_field_addr
2005 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2006
2007 /* Then arrange to store the allocator's return value ahead
2008 and return. */
2009 tree storage_ptr_slot_addr
2010 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2011 convert (ptr_void_type_node, aligning_field_addr),
2012 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2013 / BITS_PER_UNIT));
2014
2015 tree storage_ptr_slot
2016 = build_unary_op (INDIRECT_REF, NULL_TREE,
2017 convert (build_pointer_type (ptr_void_type_node),
2018 storage_ptr_slot_addr));
2019
2020 return
2021 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2022 build_binary_op (MODIFY_EXPR, NULL_TREE,
2023 storage_ptr_slot, storage_ptr),
2024 aligning_field_addr);
2025 }
2026 else
2027 return malloc_ptr;
2028 }
2029
2030 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2031 designated by DATA_PTR using the __gnat_free entry point. */
2032
2033 static inline tree
2034 maybe_wrap_free (tree data_ptr, tree data_type)
2035 {
2036 /* In the regular alignment case, we pass the data pointer straight to free.
2037 In the superaligned case, we need to retrieve the initial allocator
2038 return value, stored in front of the data block at allocation time. */
2039
2040 unsigned int data_align = TYPE_ALIGN (data_type);
2041 unsigned int system_allocator_alignment
2042 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2043
2044 tree free_ptr;
2045
2046 if (data_align > system_allocator_alignment)
2047 {
2048 /* DATA_FRONT_PTR (void *)
2049 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2050 tree data_front_ptr
2051 = build_binary_op
2052 (POINTER_PLUS_EXPR, ptr_void_type_node,
2053 convert (ptr_void_type_node, data_ptr),
2054 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2055
2056 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2057 free_ptr
2058 = build_unary_op
2059 (INDIRECT_REF, NULL_TREE,
2060 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2061 }
2062 else
2063 free_ptr = data_ptr;
2064
2065 return build_call_n_expr (free_decl, 1, free_ptr);
2066 }
2067
2068 /* Build a GCC tree to call an allocation or deallocation function.
2069 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2070 generate an allocator.
2071
2072 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2073 object type, used to determine the to-be-honored address alignment.
2074 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2075 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2076 to provide an error location for restriction violation messages. */
2077
2078 tree
2079 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2080 Entity_Id gnat_proc, Entity_Id gnat_pool,
2081 Node_Id gnat_node)
2082 {
2083 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2084
2085 /* Explicit proc to call ? This one is assumed to deal with the type
2086 alignment constraints. */
2087 if (Present (gnat_proc))
2088 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2089 gnat_proc, gnat_pool);
2090
2091 /* Otherwise, object to "free" or "malloc" with possible special processing
2092 for alignments stricter than what the default allocator honors. */
2093 else if (gnu_obj)
2094 return maybe_wrap_free (gnu_obj, gnu_type);
2095 else
2096 {
2097 /* Assert that we no longer can be called with this special pool. */
2098 gcc_assert (gnat_pool != -1);
2099
2100 /* Check that we aren't violating the associated restriction. */
2101 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2102 Check_No_Implicit_Heap_Alloc (gnat_node);
2103
2104 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2105 }
2106 }
2107 \f
2108 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2109 initial value is INIT, if INIT is nonzero. Convert the expression to
2110 RESULT_TYPE, which must be some pointer type, and return the result.
2111
2112 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2113 the storage pool to use. GNAT_NODE is used to provide an error
2114 location for restriction violation messages. If IGNORE_INIT_TYPE is
2115 true, ignore the type of INIT for the purpose of determining the size;
2116 this will cause the maximum size to be allocated if TYPE is of
2117 self-referential size. */
2118
2119 tree
2120 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2121 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2122 {
2123 tree size, storage, storage_deref, storage_init;
2124
2125 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2126 if (init && TREE_CODE (init) == NULL_EXPR)
2127 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2128
2129 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2130 else if (init && TREE_CODE (init) == COND_EXPR)
2131 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2132 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2133 gnat_proc, gnat_pool, gnat_node,
2134 ignore_init_type),
2135 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2136 gnat_proc, gnat_pool, gnat_node,
2137 ignore_init_type));
2138
2139 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2140 sizes of the object and its template. Allocate the whole thing and
2141 fill in the parts that are known. */
2142 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2143 {
2144 tree storage_type
2145 = build_unc_object_type_from_ptr (result_type, type,
2146 get_identifier ("ALLOC"), false);
2147 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2148 tree storage_ptr_type = build_pointer_type (storage_type);
2149
2150 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2151 init);
2152
2153 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2154 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2155 size = ssize_int (-1);
2156
2157 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2158 gnat_proc, gnat_pool, gnat_node);
2159 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2160 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2161 TREE_THIS_NOTRAP (storage_deref) = 1;
2162
2163 /* If there is an initializing expression, then make a constructor for
2164 the entire object including the bounds and copy it into the object.
2165 If there is no initializing expression, just set the bounds. */
2166 if (init)
2167 {
2168 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
2169
2170 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2171 build_template (template_type, type, init));
2172 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2173 init);
2174 storage_init
2175 = build_binary_op (MODIFY_EXPR, NULL_TREE, storage_deref,
2176 gnat_build_constructor (storage_type, v));
2177 }
2178 else
2179 storage_init
2180 = build_binary_op (MODIFY_EXPR, NULL_TREE,
2181 build_component_ref (storage_deref, NULL_TREE,
2182 TYPE_FIELDS (storage_type),
2183 false),
2184 build_template (template_type, type, NULL_TREE));
2185
2186 return build2 (COMPOUND_EXPR, result_type,
2187 storage_init, convert (result_type, storage));
2188 }
2189
2190 size = TYPE_SIZE_UNIT (type);
2191
2192 /* If we have an initializing expression, see if its size is simpler
2193 than the size from the type. */
2194 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2195 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2196 || CONTAINS_PLACEHOLDER_P (size)))
2197 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2198
2199 /* If the size is still self-referential, reference the initializing
2200 expression, if it is present. If not, this must have been a
2201 call to allocate a library-level object, in which case we use
2202 the maximum size. */
2203 if (CONTAINS_PLACEHOLDER_P (size))
2204 {
2205 if (!ignore_init_type && init)
2206 size = substitute_placeholder_in_expr (size, init);
2207 else
2208 size = max_size (size, true);
2209 }
2210
2211 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2212 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2213 size = ssize_int (-1);
2214
2215 storage = convert (result_type,
2216 build_call_alloc_dealloc (NULL_TREE, size, type,
2217 gnat_proc, gnat_pool,
2218 gnat_node));
2219
2220 /* If we have an initial value, protect the new address, assign the value
2221 and return the address with a COMPOUND_EXPR. */
2222 if (init)
2223 {
2224 storage = gnat_protect_expr (storage);
2225 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2226 TREE_THIS_NOTRAP (storage_deref) = 1;
2227 storage_init
2228 = build_binary_op (MODIFY_EXPR, NULL_TREE, storage_deref, init);
2229 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2230 }
2231
2232 return storage;
2233 }
2234 \f
2235 /* Indicate that we need to take the address of T and that it therefore
2236 should not be allocated in a register. Returns true if successful. */
2237
2238 bool
2239 gnat_mark_addressable (tree t)
2240 {
2241 while (true)
2242 switch (TREE_CODE (t))
2243 {
2244 case ADDR_EXPR:
2245 case COMPONENT_REF:
2246 case ARRAY_REF:
2247 case ARRAY_RANGE_REF:
2248 case REALPART_EXPR:
2249 case IMAGPART_EXPR:
2250 case VIEW_CONVERT_EXPR:
2251 case NON_LVALUE_EXPR:
2252 CASE_CONVERT:
2253 t = TREE_OPERAND (t, 0);
2254 break;
2255
2256 case COMPOUND_EXPR:
2257 t = TREE_OPERAND (t, 1);
2258 break;
2259
2260 case CONSTRUCTOR:
2261 TREE_ADDRESSABLE (t) = 1;
2262 return true;
2263
2264 case VAR_DECL:
2265 case PARM_DECL:
2266 case RESULT_DECL:
2267 TREE_ADDRESSABLE (t) = 1;
2268 return true;
2269
2270 case FUNCTION_DECL:
2271 TREE_ADDRESSABLE (t) = 1;
2272 return true;
2273
2274 case CONST_DECL:
2275 return DECL_CONST_CORRESPONDING_VAR (t)
2276 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2277
2278 default:
2279 return true;
2280 }
2281 }
2282 \f
2283 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2284 but we know how to handle our own nodes. */
2285
2286 tree
2287 gnat_save_expr (tree exp)
2288 {
2289 tree type = TREE_TYPE (exp);
2290 enum tree_code code = TREE_CODE (exp);
2291
2292 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2293 return exp;
2294
2295 if (code == UNCONSTRAINED_ARRAY_REF)
2296 {
2297 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2298 TREE_READONLY (t) = TYPE_READONLY (type);
2299 return t;
2300 }
2301
2302 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2303 This may be more efficient, but will also allow us to more easily find
2304 the match for the PLACEHOLDER_EXPR. */
2305 if (code == COMPONENT_REF
2306 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2307 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2308 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2309
2310 return save_expr (exp);
2311 }
2312
2313 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2314 is optimized under the assumption that EXP's value doesn't change before
2315 its subsequent reuse(s) except through its potential reevaluation. */
2316
2317 tree
2318 gnat_protect_expr (tree exp)
2319 {
2320 tree type = TREE_TYPE (exp);
2321 enum tree_code code = TREE_CODE (exp);
2322
2323 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2324 return exp;
2325
2326 /* If EXP has no side effects, we theoretically don't need to do anything.
2327 However, we may be recursively passed more and more complex expressions
2328 involving checks which will be reused multiple times and eventually be
2329 unshared for gimplification; in order to avoid a complexity explosion
2330 at that point, we protect any expressions more complex than a simple
2331 arithmetic expression. */
2332 if (!TREE_SIDE_EFFECTS (exp))
2333 {
2334 tree inner = skip_simple_arithmetic (exp);
2335 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2336 return exp;
2337 }
2338
2339 /* If this is a conversion, protect what's inside the conversion. */
2340 if (code == NON_LVALUE_EXPR
2341 || CONVERT_EXPR_CODE_P (code)
2342 || code == VIEW_CONVERT_EXPR)
2343 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2344
2345 /* If we're indirectly referencing something, we only need to protect the
2346 address since the data itself can't change in these situations. */
2347 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2348 {
2349 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2350 TREE_READONLY (t) = TYPE_READONLY (type);
2351 return t;
2352 }
2353
2354 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2355 This may be more efficient, but will also allow us to more easily find
2356 the match for the PLACEHOLDER_EXPR. */
2357 if (code == COMPONENT_REF
2358 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2359 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2360 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2361
2362 /* If this is a fat pointer or something that can be placed in a register,
2363 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2364 returned via invisible reference in most ABIs so the temporary will
2365 directly be filled by the callee. */
2366 if (TYPE_IS_FAT_POINTER_P (type)
2367 || TYPE_MODE (type) != BLKmode
2368 || code == CALL_EXPR)
2369 return save_expr (exp);
2370
2371 /* Otherwise reference, protect the address and dereference. */
2372 return
2373 build_unary_op (INDIRECT_REF, type,
2374 save_expr (build_unary_op (ADDR_EXPR,
2375 build_reference_type (type),
2376 exp)));
2377 }
2378
2379 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2380 argument to force evaluation of everything. */
2381
2382 static tree
2383 gnat_stabilize_reference_1 (tree e, bool force)
2384 {
2385 enum tree_code code = TREE_CODE (e);
2386 tree type = TREE_TYPE (e);
2387 tree result;
2388
2389 /* We cannot ignore const expressions because it might be a reference
2390 to a const array but whose index contains side-effects. But we can
2391 ignore things that are actual constant or that already have been
2392 handled by this function. */
2393 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2394 return e;
2395
2396 switch (TREE_CODE_CLASS (code))
2397 {
2398 case tcc_exceptional:
2399 case tcc_declaration:
2400 case tcc_comparison:
2401 case tcc_expression:
2402 case tcc_reference:
2403 case tcc_vl_exp:
2404 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2405 fat pointer. This may be more efficient, but will also allow
2406 us to more easily find the match for the PLACEHOLDER_EXPR. */
2407 if (code == COMPONENT_REF
2408 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2409 result
2410 = build3 (code, type,
2411 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2412 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2413 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2414 so that it will only be evaluated once. */
2415 /* The tcc_reference and tcc_comparison classes could be handled as
2416 below, but it is generally faster to only evaluate them once. */
2417 else if (TREE_SIDE_EFFECTS (e) || force)
2418 return save_expr (e);
2419 else
2420 return e;
2421 break;
2422
2423 case tcc_binary:
2424 /* Recursively stabilize each operand. */
2425 result
2426 = build2 (code, type,
2427 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2428 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2429 break;
2430
2431 case tcc_unary:
2432 /* Recursively stabilize each operand. */
2433 result
2434 = build1 (code, type,
2435 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2436 break;
2437
2438 default:
2439 gcc_unreachable ();
2440 }
2441
2442 /* See similar handling in gnat_stabilize_reference. */
2443 TREE_READONLY (result) = TREE_READONLY (e);
2444 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2445 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2446
2447 if (code == INDIRECT_REF
2448 || code == UNCONSTRAINED_ARRAY_REF
2449 || code == ARRAY_REF
2450 || code == ARRAY_RANGE_REF)
2451 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2452
2453 return result;
2454 }
2455
2456 /* This is equivalent to stabilize_reference in tree.c but we know how to
2457 handle our own nodes and we take extra arguments. FORCE says whether to
2458 force evaluation of everything. We set SUCCESS to true unless we walk
2459 through something we don't know how to stabilize. */
2460
2461 tree
2462 gnat_stabilize_reference (tree ref, bool force, bool *success)
2463 {
2464 tree type = TREE_TYPE (ref);
2465 enum tree_code code = TREE_CODE (ref);
2466 tree result;
2467
2468 /* Assume we'll success unless proven otherwise. */
2469 if (success)
2470 *success = true;
2471
2472 switch (code)
2473 {
2474 case CONST_DECL:
2475 case VAR_DECL:
2476 case PARM_DECL:
2477 case RESULT_DECL:
2478 /* No action is needed in this case. */
2479 return ref;
2480
2481 case ADDR_EXPR:
2482 CASE_CONVERT:
2483 case FLOAT_EXPR:
2484 case FIX_TRUNC_EXPR:
2485 case VIEW_CONVERT_EXPR:
2486 result
2487 = build1 (code, type,
2488 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2489 success));
2490 break;
2491
2492 case INDIRECT_REF:
2493 case UNCONSTRAINED_ARRAY_REF:
2494 result = build1 (code, type,
2495 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2496 force));
2497 break;
2498
2499 case COMPONENT_REF:
2500 result = build3 (COMPONENT_REF, type,
2501 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2502 success),
2503 TREE_OPERAND (ref, 1), NULL_TREE);
2504 break;
2505
2506 case BIT_FIELD_REF:
2507 result = build3 (BIT_FIELD_REF, type,
2508 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2509 success),
2510 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2511 force),
2512 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2513 force));
2514 break;
2515
2516 case ARRAY_REF:
2517 case ARRAY_RANGE_REF:
2518 result = build4 (code, type,
2519 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2520 success),
2521 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2522 force),
2523 NULL_TREE, NULL_TREE);
2524 break;
2525
2526 case CALL_EXPR:
2527 result = gnat_stabilize_reference_1 (ref, force);
2528 break;
2529
2530 case COMPOUND_EXPR:
2531 result = build2 (COMPOUND_EXPR, type,
2532 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2533 success),
2534 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2535 success));
2536 break;
2537
2538 case CONSTRUCTOR:
2539 /* Constructors with 1 element are used extensively to formally
2540 convert objects to special wrapping types. */
2541 if (TREE_CODE (type) == RECORD_TYPE
2542 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2543 {
2544 tree index
2545 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2546 tree value
2547 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2548 result
2549 = build_constructor_single (type, index,
2550 gnat_stabilize_reference_1 (value,
2551 force));
2552 }
2553 else
2554 {
2555 if (success)
2556 *success = false;
2557 return ref;
2558 }
2559 break;
2560
2561 case ERROR_MARK:
2562 ref = error_mark_node;
2563
2564 /* ... fall through to failure ... */
2565
2566 /* If arg isn't a kind of lvalue we recognize, make no change.
2567 Caller should recognize the error for an invalid lvalue. */
2568 default:
2569 if (success)
2570 *success = false;
2571 return ref;
2572 }
2573
2574 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2575 may not be sustained across some paths, such as the way via build1 for
2576 INDIRECT_REF. We reset those flags here in the general case, which is
2577 consistent with the GCC version of this routine.
2578
2579 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2580 paths introduce side-effects where there was none initially (e.g. if a
2581 SAVE_EXPR is built) and we also want to keep track of that. */
2582 TREE_READONLY (result) = TREE_READONLY (ref);
2583 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2584 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2585
2586 if (code == INDIRECT_REF
2587 || code == UNCONSTRAINED_ARRAY_REF
2588 || code == ARRAY_REF
2589 || code == ARRAY_RANGE_REF)
2590 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2591
2592 return result;
2593 }
2594
2595 /* If EXPR is an expression that is invariant in the current function, in the
2596 sense that it can be evaluated anywhere in the function and any number of
2597 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2598
2599 tree
2600 gnat_invariant_expr (tree expr)
2601 {
2602 tree type = TREE_TYPE (expr), t;
2603
2604 expr = remove_conversions (expr, false);
2605
2606 while ((TREE_CODE (expr) == CONST_DECL
2607 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2608 && decl_function_context (expr) == current_function_decl
2609 && DECL_INITIAL (expr))
2610 expr = remove_conversions (DECL_INITIAL (expr), false);
2611
2612 if (TREE_CONSTANT (expr))
2613 return fold_convert (type, expr);
2614
2615 t = expr;
2616
2617 while (true)
2618 {
2619 switch (TREE_CODE (t))
2620 {
2621 case COMPONENT_REF:
2622 if (TREE_OPERAND (t, 2) != NULL_TREE)
2623 return NULL_TREE;
2624 break;
2625
2626 case ARRAY_REF:
2627 case ARRAY_RANGE_REF:
2628 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2629 || TREE_OPERAND (t, 2) != NULL_TREE
2630 || TREE_OPERAND (t, 3) != NULL_TREE)
2631 return NULL_TREE;
2632 break;
2633
2634 case BIT_FIELD_REF:
2635 case VIEW_CONVERT_EXPR:
2636 case REALPART_EXPR:
2637 case IMAGPART_EXPR:
2638 break;
2639
2640 case INDIRECT_REF:
2641 if (!TREE_READONLY (t)
2642 || TREE_SIDE_EFFECTS (t)
2643 || !TREE_THIS_NOTRAP (t))
2644 return NULL_TREE;
2645 break;
2646
2647 default:
2648 goto object;
2649 }
2650
2651 t = TREE_OPERAND (t, 0);
2652 }
2653
2654 object:
2655 if (TREE_SIDE_EFFECTS (t))
2656 return NULL_TREE;
2657
2658 if (TREE_CODE (t) == CONST_DECL
2659 && (DECL_EXTERNAL (t)
2660 || decl_function_context (t) != current_function_decl))
2661 return fold_convert (type, expr);
2662
2663 if (!TREE_READONLY (t))
2664 return NULL_TREE;
2665
2666 if (TREE_CODE (t) == PARM_DECL)
2667 return fold_convert (type, expr);
2668
2669 if (TREE_CODE (t) == VAR_DECL
2670 && (DECL_EXTERNAL (t)
2671 || decl_function_context (t) != current_function_decl))
2672 return fold_convert (type, expr);
2673
2674 return NULL_TREE;
2675 }