1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
42 #include "diagnostic.h"
44 #include "tree-dump.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
62 /* If nonzero, pretend we are allocating at global level. */
65 /* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68 int double_float_alignment
;
70 /* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73 int double_scalar_alignment
;
75 /* Tree nodes for the various types and decls we create. */
76 tree gnat_std_decls
[(int) ADT_LAST
];
78 /* Functions to call for each of the possible raise reasons. */
79 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
81 /* Likewise, but with extra info for each of the possible raise reasons. */
82 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
84 /* Forward declarations for handlers of attributes. */
85 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
86 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
87 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
88 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
96 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table
[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "const", 0, 0, true, false, false, handle_const_attribute
,
110 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
112 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
116 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
122 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
124 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
126 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
129 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
131 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
133 { "may_alias", 0, 0, false, true, false, NULL
, false },
135 /* ??? format and format_arg are heavy and not supported, which actually
136 prevents support for stdio builtins, which we however declare as part
137 of the common builtins.def contents. */
138 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
139 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
141 { NULL
, 0, 0, false, false, false, NULL
, false }
144 /* Associates a GNAT tree node to a GCC tree node. It is used in
145 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
146 of `save_gnu_tree' for more info. */
147 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
149 #define GET_GNU_TREE(GNAT_ENTITY) \
150 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
152 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
155 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
156 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
158 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
159 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
161 #define GET_DUMMY_NODE(GNAT_ENTITY) \
162 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
164 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
167 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
168 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
170 /* This variable keeps a table for types for each precision so that we only
171 allocate each of them once. Signed and unsigned types are kept separate.
173 Note that these types are only used when fold-const requests something
174 special. Perhaps we should NOT share these types; we'll see how it
176 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
178 /* Likewise for float types, but record these by mode. */
179 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
181 /* For each binding contour we allocate a binding_level structure to indicate
182 the binding depth. */
184 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
185 /* The binding level containing this one (the enclosing binding level). */
186 struct gnat_binding_level
*chain
;
187 /* The BLOCK node for this level. */
189 /* If nonzero, the setjmp buffer that needs to be updated for any
190 variable-sized definition within this context. */
194 /* The binding level currently in effect. */
195 static GTY(()) struct gnat_binding_level
*current_binding_level
;
197 /* A chain of gnat_binding_level structures awaiting reuse. */
198 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
200 /* The context to be used for global declarations. */
201 static GTY(()) tree global_context
;
203 /* An array of global declarations. */
204 static GTY(()) vec
<tree
, va_gc
> *global_decls
;
206 /* An array of builtin function declarations. */
207 static GTY(()) vec
<tree
, va_gc
> *builtin_decls
;
209 /* An array of global renaming pointers. */
210 static GTY(()) vec
<tree
, va_gc
> *global_renaming_pointers
;
212 /* A chain of unused BLOCK nodes. */
213 static GTY((deletable
)) tree free_block_chain
;
215 static int pad_type_hash_marked_p (const void *p
);
216 static hashval_t
pad_type_hash_hash (const void *p
);
217 static int pad_type_hash_eq (const void *p1
, const void *p2
);
219 /* A hash table of padded types. It is modelled on the generic type
220 hash table in tree.c, which must thus be used as a reference. */
221 struct GTY(()) pad_type_hash
{
226 static GTY ((if_marked ("pad_type_hash_marked_p"),
227 param_is (struct pad_type_hash
)))
228 htab_t pad_type_hash_table
;
230 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
231 static tree
compute_related_constant (tree
, tree
);
232 static tree
split_plus (tree
, tree
*);
233 static tree
float_type_for_precision (int, enum machine_mode
);
234 static tree
convert_to_fat_pointer (tree
, tree
);
235 static bool potential_alignment_gap (tree
, tree
, tree
);
237 /* Initialize data structures of the utils.c module. */
240 init_gnat_utils (void)
242 /* Initialize the association of GNAT nodes to GCC trees. */
243 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
245 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
246 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
248 /* Initialize the hash table of padded types. */
249 pad_type_hash_table
= htab_create_ggc (512, pad_type_hash_hash
,
250 pad_type_hash_eq
, 0);
253 /* Destroy data structures of the utils.c module. */
256 destroy_gnat_utils (void)
258 /* Destroy the association of GNAT nodes to GCC trees. */
259 ggc_free (associate_gnat_to_gnu
);
260 associate_gnat_to_gnu
= NULL
;
262 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
263 ggc_free (dummy_node_table
);
264 dummy_node_table
= NULL
;
266 /* Destroy the hash table of padded types. */
267 htab_delete (pad_type_hash_table
);
268 pad_type_hash_table
= NULL
;
270 /* Invalidate the global renaming pointers. */
271 invalidate_global_renaming_pointers ();
274 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
275 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
276 If NO_CHECK is true, the latter check is suppressed.
278 If GNU_DECL is zero, reset a previous association. */
281 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
283 /* Check that GNAT_ENTITY is not already defined and that it is being set
284 to something which is a decl. If that is not the case, this usually
285 means GNAT_ENTITY is defined twice, but occasionally is due to some
287 gcc_assert (!(gnu_decl
288 && (PRESENT_GNU_TREE (gnat_entity
)
289 || (!no_check
&& !DECL_P (gnu_decl
)))));
291 SET_GNU_TREE (gnat_entity
, gnu_decl
);
294 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
295 that was associated with it. If there is no such tree node, abort.
297 In some cases, such as delayed elaboration or expressions that need to
298 be elaborated only once, GNAT_ENTITY is really not an entity. */
301 get_gnu_tree (Entity_Id gnat_entity
)
303 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
304 return GET_GNU_TREE (gnat_entity
);
307 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
310 present_gnu_tree (Entity_Id gnat_entity
)
312 return PRESENT_GNU_TREE (gnat_entity
);
315 /* Make a dummy type corresponding to GNAT_TYPE. */
318 make_dummy_type (Entity_Id gnat_type
)
320 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
323 /* If there is an equivalent type, get its underlying type. */
324 if (Present (gnat_underlying
))
325 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
327 /* If there was no equivalent type (can only happen when just annotating
328 types) or underlying type, go back to the original type. */
329 if (No (gnat_underlying
))
330 gnat_underlying
= gnat_type
;
332 /* If it there already a dummy type, use that one. Else make one. */
333 if (PRESENT_DUMMY_NODE (gnat_underlying
))
334 return GET_DUMMY_NODE (gnat_underlying
);
336 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
338 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
339 ? tree_code_for_record_type (gnat_underlying
)
341 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
342 TYPE_DUMMY_P (gnu_type
) = 1;
343 TYPE_STUB_DECL (gnu_type
)
344 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
345 if (Is_By_Reference_Type (gnat_underlying
))
346 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
348 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
353 /* Return the dummy type that was made for GNAT_TYPE, if any. */
356 get_dummy_type (Entity_Id gnat_type
)
358 return GET_DUMMY_NODE (gnat_type
);
361 /* Build dummy fat and thin pointer types whose designated type is specified
362 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
365 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
367 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
368 tree gnu_fat_type
, fields
, gnu_object_type
;
370 gnu_template_type
= make_node (RECORD_TYPE
);
371 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
372 TYPE_DUMMY_P (gnu_template_type
) = 1;
373 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
375 gnu_array_type
= make_node (ENUMERAL_TYPE
);
376 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
377 TYPE_DUMMY_P (gnu_array_type
) = 1;
378 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
380 gnu_fat_type
= make_node (RECORD_TYPE
);
381 /* Build a stub DECL to trigger the special processing for fat pointer types
383 TYPE_NAME (gnu_fat_type
)
384 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
386 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
387 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
389 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
390 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
391 finish_fat_pointer_type (gnu_fat_type
, fields
);
392 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
393 /* Suppress debug info until after the type is completed. */
394 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
396 gnu_object_type
= make_node (RECORD_TYPE
);
397 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
398 TYPE_DUMMY_P (gnu_object_type
) = 1;
400 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
401 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
404 /* Return true if we are in the global binding level. */
407 global_bindings_p (void)
409 return force_global
|| current_function_decl
== NULL_TREE
;
412 /* Enter a new binding level. */
415 gnat_pushlevel (void)
417 struct gnat_binding_level
*newlevel
= NULL
;
419 /* Reuse a struct for this binding level, if there is one. */
420 if (free_binding_level
)
422 newlevel
= free_binding_level
;
423 free_binding_level
= free_binding_level
->chain
;
426 newlevel
= ggc_alloc_gnat_binding_level ();
428 /* Use a free BLOCK, if any; otherwise, allocate one. */
429 if (free_block_chain
)
431 newlevel
->block
= free_block_chain
;
432 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
433 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
436 newlevel
->block
= make_node (BLOCK
);
438 /* Point the BLOCK we just made to its parent. */
439 if (current_binding_level
)
440 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
442 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
443 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
444 TREE_USED (newlevel
->block
) = 1;
446 /* Add this level to the front of the chain (stack) of active levels. */
447 newlevel
->chain
= current_binding_level
;
448 newlevel
->jmpbuf_decl
= NULL_TREE
;
449 current_binding_level
= newlevel
;
452 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
453 and point FNDECL to this BLOCK. */
456 set_current_block_context (tree fndecl
)
458 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
459 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
460 set_block_for_group (current_binding_level
->block
);
463 /* Set the jmpbuf_decl for the current binding level to DECL. */
466 set_block_jmpbuf_decl (tree decl
)
468 current_binding_level
->jmpbuf_decl
= decl
;
471 /* Get the jmpbuf_decl, if any, for the current binding level. */
474 get_block_jmpbuf_decl (void)
476 return current_binding_level
->jmpbuf_decl
;
479 /* Exit a binding level. Set any BLOCK into the current code group. */
484 struct gnat_binding_level
*level
= current_binding_level
;
485 tree block
= level
->block
;
487 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
488 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
490 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
491 are no variables free the block and merge its subblocks into those of its
492 parent block. Otherwise, add it to the list of its parent. */
493 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
495 else if (BLOCK_VARS (block
) == NULL_TREE
)
497 BLOCK_SUBBLOCKS (level
->chain
->block
)
498 = block_chainon (BLOCK_SUBBLOCKS (block
),
499 BLOCK_SUBBLOCKS (level
->chain
->block
));
500 BLOCK_CHAIN (block
) = free_block_chain
;
501 free_block_chain
= block
;
505 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
506 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
507 TREE_USED (block
) = 1;
508 set_block_for_group (block
);
511 /* Free this binding structure. */
512 current_binding_level
= level
->chain
;
513 level
->chain
= free_binding_level
;
514 free_binding_level
= level
;
517 /* Exit a binding level and discard the associated BLOCK. */
522 struct gnat_binding_level
*level
= current_binding_level
;
523 tree block
= level
->block
;
525 BLOCK_CHAIN (block
) = free_block_chain
;
526 free_block_chain
= block
;
528 /* Free this binding structure. */
529 current_binding_level
= level
->chain
;
530 level
->chain
= free_binding_level
;
531 free_binding_level
= level
;
534 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
535 for location information and flag propagation. */
538 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
540 /* If DECL is public external or at top level, it has global context. */
541 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
544 global_context
= build_translation_unit_decl (NULL_TREE
);
545 DECL_CONTEXT (decl
) = global_context
;
549 DECL_CONTEXT (decl
) = current_function_decl
;
551 /* Functions imported in another function are not really nested.
552 For really nested functions mark them initially as needing
553 a static chain for uses of that flag before unnesting;
554 lower_nested_functions will then recompute it. */
555 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
556 DECL_STATIC_CHAIN (decl
) = 1;
559 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
561 /* Set the location of DECL and emit a declaration for it. */
562 if (Present (gnat_node
))
563 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
565 add_decl_expr (decl
, gnat_node
);
567 /* Put the declaration on the list. The list of declarations is in reverse
568 order. The list will be reversed later. Put global declarations in the
569 globals list and local ones in the current block. But skip TYPE_DECLs
570 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
571 with the debugger and aren't needed anyway. */
572 if (!(TREE_CODE (decl
) == TYPE_DECL
573 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
575 if (DECL_EXTERNAL (decl
))
577 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
578 vec_safe_push (builtin_decls
, decl
);
580 else if (global_bindings_p ())
581 vec_safe_push (global_decls
, decl
);
584 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
585 BLOCK_VARS (current_binding_level
->block
) = decl
;
589 /* For the declaration of a type, set its name if it either is not already
590 set or if the previous type name was not derived from a source name.
591 We'd rather have the type named with a real name and all the pointer
592 types to the same object have the same POINTER_TYPE node. Code in the
593 equivalent function of c-decl.c makes a copy of the type node here, but
594 that may cause us trouble with incomplete types. We make an exception
595 for fat pointer types because the compiler automatically builds them
596 for unconstrained array types and the debugger uses them to represent
597 both these and pointers to these. */
598 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
600 tree t
= TREE_TYPE (decl
);
602 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
604 /* Array and pointer types aren't "tagged" types so we force the
605 type to be associated with its typedef in the DWARF back-end,
606 in order to make sure that the latter is always preserved. */
607 if (!DECL_ARTIFICIAL (decl
)
608 && (TREE_CODE (t
) == ARRAY_TYPE
609 || TREE_CODE (t
) == POINTER_TYPE
))
611 tree tt
= build_distinct_type_copy (t
);
612 if (TREE_CODE (t
) == POINTER_TYPE
)
613 TYPE_NEXT_PTR_TO (t
) = tt
;
614 TYPE_NAME (tt
) = DECL_NAME (decl
);
615 TYPE_CONTEXT (tt
) = DECL_CONTEXT (decl
);
616 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
617 DECL_ORIGINAL_TYPE (decl
) = tt
;
620 else if (TYPE_IS_FAT_POINTER_P (t
))
622 /* We need a variant for the placeholder machinery to work. */
623 tree tt
= build_variant_type_copy (t
);
624 TYPE_NAME (tt
) = decl
;
625 TYPE_CONTEXT (tt
) = DECL_CONTEXT (decl
);
626 TREE_USED (tt
) = TREE_USED (t
);
627 TREE_TYPE (decl
) = tt
;
628 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
629 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
631 DECL_ORIGINAL_TYPE (decl
) = t
;
632 DECL_ARTIFICIAL (decl
) = 0;
635 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
640 /* Propagate the name to all the anonymous variants. This is needed
641 for the type qualifiers machinery to work properly. */
643 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
644 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
646 TYPE_NAME (t
) = decl
;
647 TYPE_CONTEXT (t
) = DECL_CONTEXT (decl
);
652 /* Create a record type that contains a SIZE bytes long field of TYPE with a
653 starting bit position so that it is aligned to ALIGN bits, and leaving at
654 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
655 record is guaranteed to get. GNAT_NODE is used for the position of the
656 associated TYPE_DECL. */
659 make_aligning_type (tree type
, unsigned int align
, tree size
,
660 unsigned int base_align
, int room
, Node_Id gnat_node
)
662 /* We will be crafting a record type with one field at a position set to be
663 the next multiple of ALIGN past record'address + room bytes. We use a
664 record placeholder to express record'address. */
665 tree record_type
= make_node (RECORD_TYPE
);
666 tree record
= build0 (PLACEHOLDER_EXPR
, record_type
);
669 = convert (sizetype
, build_unary_op (ADDR_EXPR
, NULL_TREE
, record
));
671 /* The diagram below summarizes the shape of what we manipulate:
673 <--------- pos ---------->
674 { +------------+-------------+-----------------+
675 record =>{ |############| ... | field (type) |
676 { +------------+-------------+-----------------+
677 |<-- room -->|<- voffset ->|<---- size ----->|
680 record_addr vblock_addr
682 Every length is in sizetype bytes there, except "pos" which has to be
683 set as a bit position in the GCC tree for the record. */
684 tree room_st
= size_int (room
);
685 tree vblock_addr_st
= size_binop (PLUS_EXPR
, record_addr_st
, room_st
);
686 tree voffset_st
, pos
, field
;
688 tree name
= TYPE_NAME (type
);
690 if (TREE_CODE (name
) == TYPE_DECL
)
691 name
= DECL_NAME (name
);
692 name
= concat_name (name
, "ALIGN");
693 TYPE_NAME (record_type
) = name
;
695 /* Compute VOFFSET and then POS. The next byte position multiple of some
696 alignment after some address is obtained by "and"ing the alignment minus
697 1 with the two's complement of the address. */
698 voffset_st
= size_binop (BIT_AND_EXPR
,
699 fold_build1 (NEGATE_EXPR
, sizetype
, vblock_addr_st
),
700 size_int ((align
/ BITS_PER_UNIT
) - 1));
702 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
703 pos
= size_binop (MULT_EXPR
,
704 convert (bitsizetype
,
705 size_binop (PLUS_EXPR
, room_st
, voffset_st
)),
708 /* Craft the GCC record representation. We exceptionally do everything
709 manually here because 1) our generic circuitry is not quite ready to
710 handle the complex position/size expressions we are setting up, 2) we
711 have a strong simplifying factor at hand: we know the maximum possible
712 value of voffset, and 3) we have to set/reset at least the sizes in
713 accordance with this maximum value anyway, as we need them to convey
714 what should be "alloc"ated for this type.
716 Use -1 as the 'addressable' indication for the field to prevent the
717 creation of a bitfield. We don't need one, it would have damaging
718 consequences on the alignment computation, and create_field_decl would
719 make one without this special argument, for instance because of the
720 complex position expression. */
721 field
= create_field_decl (get_identifier ("F"), type
, record_type
, size
,
723 TYPE_FIELDS (record_type
) = field
;
725 TYPE_ALIGN (record_type
) = base_align
;
726 TYPE_USER_ALIGN (record_type
) = 1;
728 TYPE_SIZE (record_type
)
729 = size_binop (PLUS_EXPR
,
730 size_binop (MULT_EXPR
, convert (bitsizetype
, size
),
732 bitsize_int (align
+ room
* BITS_PER_UNIT
));
733 TYPE_SIZE_UNIT (record_type
)
734 = size_binop (PLUS_EXPR
, size
,
735 size_int (room
+ align
/ BITS_PER_UNIT
));
737 SET_TYPE_MODE (record_type
, BLKmode
);
738 relate_alias_sets (record_type
, type
, ALIAS_SET_COPY
);
740 /* Declare it now since it will never be declared otherwise. This is
741 necessary to ensure that its subtrees are properly marked. */
742 create_type_decl (name
, record_type
, true, false, gnat_node
);
747 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
748 as the field type of a packed record if IN_RECORD is true, or as the
749 component type of a packed array if IN_RECORD is false. See if we can
750 rewrite it either as a type that has a non-BLKmode, which we can pack
751 tighter in the packed record case, or as a smaller type. If so, return
752 the new type. If not, return the original type. */
755 make_packable_type (tree type
, bool in_record
)
757 unsigned HOST_WIDE_INT size
= tree_low_cst (TYPE_SIZE (type
), 1);
758 unsigned HOST_WIDE_INT new_size
;
759 tree new_type
, old_field
, field_list
= NULL_TREE
;
762 /* No point in doing anything if the size is zero. */
766 new_type
= make_node (TREE_CODE (type
));
768 /* Copy the name and flags from the old type to that of the new.
769 Note that we rely on the pointer equality created here for
770 TYPE_NAME to look through conversions in various places. */
771 TYPE_NAME (new_type
) = TYPE_NAME (type
);
772 TYPE_JUSTIFIED_MODULAR_P (new_type
) = TYPE_JUSTIFIED_MODULAR_P (type
);
773 TYPE_CONTAINS_TEMPLATE_P (new_type
) = TYPE_CONTAINS_TEMPLATE_P (type
);
774 if (TREE_CODE (type
) == RECORD_TYPE
)
775 TYPE_PADDING_P (new_type
) = TYPE_PADDING_P (type
);
777 /* If we are in a record and have a small size, set the alignment to
778 try for an integral mode. Otherwise set it to try for a smaller
779 type with BLKmode. */
780 if (in_record
&& size
<= MAX_FIXED_MODE_SIZE
)
782 align
= ceil_pow2 (size
);
783 TYPE_ALIGN (new_type
) = align
;
784 new_size
= (size
+ align
- 1) & -align
;
788 unsigned HOST_WIDE_INT align
;
790 /* Do not try to shrink the size if the RM size is not constant. */
791 if (TYPE_CONTAINS_TEMPLATE_P (type
)
792 || !host_integerp (TYPE_ADA_SIZE (type
), 1))
795 /* Round the RM size up to a unit boundary to get the minimal size
796 for a BLKmode record. Give up if it's already the size. */
797 new_size
= TREE_INT_CST_LOW (TYPE_ADA_SIZE (type
));
798 new_size
= (new_size
+ BITS_PER_UNIT
- 1) & -BITS_PER_UNIT
;
799 if (new_size
== size
)
802 align
= new_size
& -new_size
;
803 TYPE_ALIGN (new_type
) = MIN (TYPE_ALIGN (type
), align
);
806 TYPE_USER_ALIGN (new_type
) = 1;
808 /* Now copy the fields, keeping the position and size as we don't want
809 to change the layout by propagating the packedness downwards. */
810 for (old_field
= TYPE_FIELDS (type
); old_field
;
811 old_field
= DECL_CHAIN (old_field
))
813 tree new_field_type
= TREE_TYPE (old_field
);
814 tree new_field
, new_size
;
816 if (RECORD_OR_UNION_TYPE_P (new_field_type
)
817 && !TYPE_FAT_POINTER_P (new_field_type
)
818 && host_integerp (TYPE_SIZE (new_field_type
), 1))
819 new_field_type
= make_packable_type (new_field_type
, true);
821 /* However, for the last field in a not already packed record type
822 that is of an aggregate type, we need to use the RM size in the
823 packable version of the record type, see finish_record_type. */
824 if (!DECL_CHAIN (old_field
)
825 && !TYPE_PACKED (type
)
826 && RECORD_OR_UNION_TYPE_P (new_field_type
)
827 && !TYPE_FAT_POINTER_P (new_field_type
)
828 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type
)
829 && TYPE_ADA_SIZE (new_field_type
))
830 new_size
= TYPE_ADA_SIZE (new_field_type
);
832 new_size
= DECL_SIZE (old_field
);
835 = create_field_decl (DECL_NAME (old_field
), new_field_type
, new_type
,
836 new_size
, bit_position (old_field
),
838 !DECL_NONADDRESSABLE_P (old_field
));
840 DECL_INTERNAL_P (new_field
) = DECL_INTERNAL_P (old_field
);
841 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field
, old_field
);
842 if (TREE_CODE (new_type
) == QUAL_UNION_TYPE
)
843 DECL_QUALIFIER (new_field
) = DECL_QUALIFIER (old_field
);
845 DECL_CHAIN (new_field
) = field_list
;
846 field_list
= new_field
;
849 finish_record_type (new_type
, nreverse (field_list
), 2, false);
850 relate_alias_sets (new_type
, type
, ALIAS_SET_COPY
);
851 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type
),
852 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type
)));
854 /* If this is a padding record, we never want to make the size smaller
855 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
856 if (TYPE_IS_PADDING_P (type
) || TREE_CODE (type
) == QUAL_UNION_TYPE
)
858 TYPE_SIZE (new_type
) = TYPE_SIZE (type
);
859 TYPE_SIZE_UNIT (new_type
) = TYPE_SIZE_UNIT (type
);
864 TYPE_SIZE (new_type
) = bitsize_int (new_size
);
865 TYPE_SIZE_UNIT (new_type
)
866 = size_int ((new_size
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
869 if (!TYPE_CONTAINS_TEMPLATE_P (type
))
870 SET_TYPE_ADA_SIZE (new_type
, TYPE_ADA_SIZE (type
));
872 compute_record_mode (new_type
);
874 /* Try harder to get a packable type if necessary, for example
875 in case the record itself contains a BLKmode field. */
876 if (in_record
&& TYPE_MODE (new_type
) == BLKmode
)
877 SET_TYPE_MODE (new_type
,
878 mode_for_size_tree (TYPE_SIZE (new_type
), MODE_INT
, 1));
880 /* If neither the mode nor the size has shrunk, return the old type. */
881 if (TYPE_MODE (new_type
) == BLKmode
&& new_size
>= size
)
887 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
888 If TYPE is the best type, return it. Otherwise, make a new type. We
889 only support new integral and pointer types. FOR_BIASED is true if
890 we are making a biased type. */
893 make_type_from_size (tree type
, tree size_tree
, bool for_biased
)
895 unsigned HOST_WIDE_INT size
;
899 /* If size indicates an error, just return TYPE to avoid propagating
900 the error. Likewise if it's too large to represent. */
901 if (!size_tree
|| !host_integerp (size_tree
, 1))
904 size
= tree_low_cst (size_tree
, 1);
906 switch (TREE_CODE (type
))
911 biased_p
= (TREE_CODE (type
) == INTEGER_TYPE
912 && TYPE_BIASED_REPRESENTATION_P (type
));
914 /* Integer types with precision 0 are forbidden. */
918 /* Only do something if the type isn't a packed array type and doesn't
919 already have the proper size and the size isn't too large. */
920 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type
)
921 || (TYPE_PRECISION (type
) == size
&& biased_p
== for_biased
)
922 || size
> LONG_LONG_TYPE_SIZE
)
925 biased_p
|= for_biased
;
926 if (TYPE_UNSIGNED (type
) || biased_p
)
927 new_type
= make_unsigned_type (size
);
929 new_type
= make_signed_type (size
);
930 TREE_TYPE (new_type
) = TREE_TYPE (type
) ? TREE_TYPE (type
) : type
;
931 SET_TYPE_RM_MIN_VALUE (new_type
,
932 convert (TREE_TYPE (new_type
),
933 TYPE_MIN_VALUE (type
)));
934 SET_TYPE_RM_MAX_VALUE (new_type
,
935 convert (TREE_TYPE (new_type
),
936 TYPE_MAX_VALUE (type
)));
937 /* Copy the name to show that it's essentially the same type and
938 not a subrange type. */
939 TYPE_NAME (new_type
) = TYPE_NAME (type
);
940 TYPE_BIASED_REPRESENTATION_P (new_type
) = biased_p
;
941 SET_TYPE_RM_SIZE (new_type
, bitsize_int (size
));
945 /* Do something if this is a fat pointer, in which case we
946 may need to return the thin pointer. */
947 if (TYPE_FAT_POINTER_P (type
) && size
< POINTER_SIZE
* 2)
949 enum machine_mode p_mode
= mode_for_size (size
, MODE_INT
, 0);
950 if (!targetm
.valid_pointer_mode (p_mode
))
953 build_pointer_type_for_mode
954 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type
)),
960 /* Only do something if this is a thin pointer, in which case we
961 may need to return the fat pointer. */
962 if (TYPE_IS_THIN_POINTER_P (type
) && size
>= POINTER_SIZE
* 2)
964 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)));
974 /* See if the data pointed to by the hash table slot is marked. */
977 pad_type_hash_marked_p (const void *p
)
979 const_tree
const type
= ((const struct pad_type_hash
*) p
)->type
;
981 return ggc_marked_p (type
);
984 /* Return the cached hash value. */
987 pad_type_hash_hash (const void *p
)
989 return ((const struct pad_type_hash
*) p
)->hash
;
992 /* Return 1 iff the padded types are equivalent. */
995 pad_type_hash_eq (const void *p1
, const void *p2
)
997 const struct pad_type_hash
*const t1
= (const struct pad_type_hash
*) p1
;
998 const struct pad_type_hash
*const t2
= (const struct pad_type_hash
*) p2
;
1001 if (t1
->hash
!= t2
->hash
)
1007 /* We consider that the padded types are equivalent if they pad the same
1008 type and have the same size, alignment and RM size. Taking the mode
1009 into account is redundant since it is determined by the others. */
1011 TREE_TYPE (TYPE_FIELDS (type1
)) == TREE_TYPE (TYPE_FIELDS (type2
))
1012 && TYPE_SIZE (type1
) == TYPE_SIZE (type2
)
1013 && TYPE_ALIGN (type1
) == TYPE_ALIGN (type2
)
1014 && TYPE_ADA_SIZE (type1
) == TYPE_ADA_SIZE (type2
);
1017 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1018 if needed. We have already verified that SIZE and TYPE are large enough.
1019 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1020 IS_COMPONENT_TYPE is true if this is being done for the component type of
1021 an array. IS_USER_TYPE is true if the original type needs to be completed.
1022 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1023 the RM size of the resulting type is to be set to SIZE too. */
1026 maybe_pad_type (tree type
, tree size
, unsigned int align
,
1027 Entity_Id gnat_entity
, bool is_component_type
,
1028 bool is_user_type
, bool definition
, bool set_rm_size
)
1030 tree orig_size
= TYPE_SIZE (type
);
1033 /* If TYPE is a padded type, see if it agrees with any size and alignment
1034 we were given. If so, return the original type. Otherwise, strip
1035 off the padding, since we will either be returning the inner type
1036 or repadding it. If no size or alignment is specified, use that of
1037 the original padded type. */
1038 if (TYPE_IS_PADDING_P (type
))
1041 || operand_equal_p (round_up (size
,
1042 MAX (align
, TYPE_ALIGN (type
))),
1043 round_up (TYPE_SIZE (type
),
1044 MAX (align
, TYPE_ALIGN (type
))),
1046 && (align
== 0 || align
== TYPE_ALIGN (type
)))
1050 size
= TYPE_SIZE (type
);
1052 align
= TYPE_ALIGN (type
);
1054 type
= TREE_TYPE (TYPE_FIELDS (type
));
1055 orig_size
= TYPE_SIZE (type
);
1058 /* If the size is either not being changed or is being made smaller (which
1059 is not done here and is only valid for bitfields anyway), show the size
1060 isn't changing. Likewise, clear the alignment if it isn't being
1061 changed. Then return if we aren't doing anything. */
1063 && (operand_equal_p (size
, orig_size
, 0)
1064 || (TREE_CODE (orig_size
) == INTEGER_CST
1065 && tree_int_cst_lt (size
, orig_size
))))
1068 if (align
== TYPE_ALIGN (type
))
1071 if (align
== 0 && !size
)
1074 /* If requested, complete the original type and give it a name. */
1076 create_type_decl (get_entity_name (gnat_entity
), type
,
1077 !Comes_From_Source (gnat_entity
),
1079 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1080 && DECL_IGNORED_P (TYPE_NAME (type
))),
1083 /* We used to modify the record in place in some cases, but that could
1084 generate incorrect debugging information. So make a new record
1086 record
= make_node (RECORD_TYPE
);
1087 TYPE_PADDING_P (record
) = 1;
1089 if (Present (gnat_entity
))
1090 TYPE_NAME (record
) = create_concat_name (gnat_entity
, "PAD");
1092 TYPE_ALIGN (record
) = align
;
1093 TYPE_SIZE (record
) = size
? size
: orig_size
;
1094 TYPE_SIZE_UNIT (record
)
1095 = convert (sizetype
,
1096 size_binop (CEIL_DIV_EXPR
, TYPE_SIZE (record
),
1097 bitsize_unit_node
));
1099 /* If we are changing the alignment and the input type is a record with
1100 BLKmode and a small constant size, try to make a form that has an
1101 integral mode. This might allow the padding record to also have an
1102 integral mode, which will be much more efficient. There is no point
1103 in doing so if a size is specified unless it is also a small constant
1104 size and it is incorrect to do so if we cannot guarantee that the mode
1105 will be naturally aligned since the field must always be addressable.
1107 ??? This might not always be a win when done for a stand-alone object:
1108 since the nominal and the effective type of the object will now have
1109 different modes, a VIEW_CONVERT_EXPR will be required for converting
1110 between them and it might be hard to overcome afterwards, including
1111 at the RTL level when the stand-alone object is accessed as a whole. */
1113 && RECORD_OR_UNION_TYPE_P (type
)
1114 && TYPE_MODE (type
) == BLKmode
1115 && !TYPE_BY_REFERENCE_P (type
)
1116 && TREE_CODE (orig_size
) == INTEGER_CST
1117 && !TREE_OVERFLOW (orig_size
)
1118 && compare_tree_int (orig_size
, MAX_FIXED_MODE_SIZE
) <= 0
1120 || (TREE_CODE (size
) == INTEGER_CST
1121 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) <= 0)))
1123 tree packable_type
= make_packable_type (type
, true);
1124 if (TYPE_MODE (packable_type
) != BLKmode
1125 && align
>= TYPE_ALIGN (packable_type
))
1126 type
= packable_type
;
1129 /* Now create the field with the original size. */
1130 field
= create_field_decl (get_identifier ("F"), type
, record
, orig_size
,
1131 bitsize_zero_node
, 0, 1);
1132 DECL_INTERNAL_P (field
) = 1;
1134 /* Do not emit debug info until after the auxiliary record is built. */
1135 finish_record_type (record
, field
, 1, false);
1137 /* Set the RM size if requested. */
1140 SET_TYPE_ADA_SIZE (record
, size
? size
: orig_size
);
1142 /* If the padded type is complete and has constant size, we canonicalize
1143 it by means of the hash table. This is consistent with the language
1144 semantics and ensures that gigi and the middle-end have a common view
1145 of these padded types. */
1146 if (TREE_CONSTANT (TYPE_SIZE (record
)))
1149 struct pad_type_hash in
, *h
;
1152 hashcode
= iterative_hash_object (TYPE_HASH (type
), 0);
1153 hashcode
= iterative_hash_expr (TYPE_SIZE (record
), hashcode
);
1154 hashcode
= iterative_hash_hashval_t (TYPE_ALIGN (record
), hashcode
);
1155 hashcode
= iterative_hash_expr (TYPE_ADA_SIZE (record
), hashcode
);
1159 h
= (struct pad_type_hash
*)
1160 htab_find_with_hash (pad_type_hash_table
, &in
, hashcode
);
1167 h
= ggc_alloc_pad_type_hash ();
1170 loc
= htab_find_slot_with_hash (pad_type_hash_table
, h
, hashcode
,
1176 /* Unless debugging information isn't being written for the input type,
1177 write a record that shows what we are a subtype of and also make a
1178 variable that indicates our size, if still variable. */
1179 if (TREE_CODE (orig_size
) != INTEGER_CST
1180 && TYPE_NAME (record
)
1182 && !(TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1183 && DECL_IGNORED_P (TYPE_NAME (type
))))
1185 tree marker
= make_node (RECORD_TYPE
);
1186 tree name
= TYPE_NAME (record
);
1187 tree orig_name
= TYPE_NAME (type
);
1189 if (TREE_CODE (name
) == TYPE_DECL
)
1190 name
= DECL_NAME (name
);
1192 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1193 orig_name
= DECL_NAME (orig_name
);
1195 TYPE_NAME (marker
) = concat_name (name
, "XVS");
1196 finish_record_type (marker
,
1197 create_field_decl (orig_name
,
1198 build_reference_type (type
),
1199 marker
, NULL_TREE
, NULL_TREE
,
1203 add_parallel_type (record
, marker
);
1205 if (definition
&& size
&& TREE_CODE (size
) != INTEGER_CST
)
1206 TYPE_SIZE_UNIT (marker
)
1207 = create_var_decl (concat_name (name
, "XVZ"), NULL_TREE
, sizetype
,
1208 TYPE_SIZE_UNIT (record
), false, false, false,
1209 false, NULL
, gnat_entity
);
1212 rest_of_record_type_compilation (record
);
1215 /* If the size was widened explicitly, maybe give a warning. Take the
1216 original size as the maximum size of the input if there was an
1217 unconstrained record involved and round it up to the specified alignment,
1218 if one was specified. But don't do it if we are just annotating types
1219 and the type is tagged, since tagged types aren't fully laid out in this
1222 || TREE_CODE (size
) == COND_EXPR
1223 || TREE_CODE (size
) == MAX_EXPR
1225 || (type_annotate_only
&& Is_Tagged_Type (Etype (gnat_entity
))))
1228 if (CONTAINS_PLACEHOLDER_P (orig_size
))
1229 orig_size
= max_size (orig_size
, true);
1232 orig_size
= round_up (orig_size
, align
);
1234 if (!operand_equal_p (size
, orig_size
, 0)
1235 && !(TREE_CODE (size
) == INTEGER_CST
1236 && TREE_CODE (orig_size
) == INTEGER_CST
1237 && (TREE_OVERFLOW (size
)
1238 || TREE_OVERFLOW (orig_size
)
1239 || tree_int_cst_lt (size
, orig_size
))))
1241 Node_Id gnat_error_node
= Empty
;
1243 if (Is_Packed_Array_Type (gnat_entity
))
1244 gnat_entity
= Original_Array_Type (gnat_entity
);
1246 if ((Ekind (gnat_entity
) == E_Component
1247 || Ekind (gnat_entity
) == E_Discriminant
)
1248 && Present (Component_Clause (gnat_entity
)))
1249 gnat_error_node
= Last_Bit (Component_Clause (gnat_entity
));
1250 else if (Present (Size_Clause (gnat_entity
)))
1251 gnat_error_node
= Expression (Size_Clause (gnat_entity
));
1253 /* Generate message only for entities that come from source, since
1254 if we have an entity created by expansion, the message will be
1255 generated for some other corresponding source entity. */
1256 if (Comes_From_Source (gnat_entity
))
1258 if (Present (gnat_error_node
))
1259 post_error_ne_tree ("{^ }bits of & unused?",
1260 gnat_error_node
, gnat_entity
,
1261 size_diffop (size
, orig_size
));
1262 else if (is_component_type
)
1263 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1264 gnat_entity
, gnat_entity
,
1265 size_diffop (size
, orig_size
));
1272 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1273 If this is a multi-dimensional array type, do this recursively.
1276 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1277 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1278 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1281 relate_alias_sets (tree gnu_new_type
, tree gnu_old_type
, enum alias_set_op op
)
1283 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1284 of a one-dimensional array, since the padding has the same alias set
1285 as the field type, but if it's a multi-dimensional array, we need to
1286 see the inner types. */
1287 while (TREE_CODE (gnu_old_type
) == RECORD_TYPE
1288 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type
)
1289 || TYPE_PADDING_P (gnu_old_type
)))
1290 gnu_old_type
= TREE_TYPE (TYPE_FIELDS (gnu_old_type
));
1292 /* Unconstrained array types are deemed incomplete and would thus be given
1293 alias set 0. Retrieve the underlying array type. */
1294 if (TREE_CODE (gnu_old_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1296 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type
))));
1297 if (TREE_CODE (gnu_new_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1299 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type
))));
1301 if (TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1302 && TREE_CODE (TREE_TYPE (gnu_new_type
)) == ARRAY_TYPE
1303 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type
)))
1304 relate_alias_sets (TREE_TYPE (gnu_new_type
), TREE_TYPE (gnu_old_type
), op
);
1308 case ALIAS_SET_COPY
:
1309 /* The alias set shouldn't be copied between array types with different
1310 aliasing settings because this can break the aliasing relationship
1311 between the array type and its element type. */
1312 #ifndef ENABLE_CHECKING
1313 if (flag_strict_aliasing
)
1315 gcc_assert (!(TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1316 && TREE_CODE (gnu_old_type
) == ARRAY_TYPE
1317 && TYPE_NONALIASED_COMPONENT (gnu_new_type
)
1318 != TYPE_NONALIASED_COMPONENT (gnu_old_type
)));
1320 TYPE_ALIAS_SET (gnu_new_type
) = get_alias_set (gnu_old_type
);
1323 case ALIAS_SET_SUBSET
:
1324 case ALIAS_SET_SUPERSET
:
1326 alias_set_type old_set
= get_alias_set (gnu_old_type
);
1327 alias_set_type new_set
= get_alias_set (gnu_new_type
);
1329 /* Do nothing if the alias sets conflict. This ensures that we
1330 never call record_alias_subset several times for the same pair
1331 or at all for alias set 0. */
1332 if (!alias_sets_conflict_p (old_set
, new_set
))
1334 if (op
== ALIAS_SET_SUBSET
)
1335 record_alias_subset (old_set
, new_set
);
1337 record_alias_subset (new_set
, old_set
);
1346 record_component_aliases (gnu_new_type
);
1349 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1350 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1353 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
1355 tree type_decl
= build_decl (input_location
,
1356 TYPE_DECL
, get_identifier (name
), type
);
1357 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1358 TYPE_ARTIFICIAL (type
) = artificial_p
;
1359 gnat_pushdecl (type_decl
, Empty
);
1361 if (debug_hooks
->type_decl
)
1362 debug_hooks
->type_decl (type_decl
, false);
1365 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1366 finish constructing the record type as a fat pointer type. */
1369 finish_fat_pointer_type (tree record_type
, tree field_list
)
1371 /* Make sure we can put it into a register. */
1372 if (STRICT_ALIGNMENT
)
1373 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
1375 /* Show what it really is. */
1376 TYPE_FAT_POINTER_P (record_type
) = 1;
1378 /* Do not emit debug info for it since the types of its fields may still be
1379 incomplete at this point. */
1380 finish_record_type (record_type
, field_list
, 0, false);
1382 /* Force type_contains_placeholder_p to return true on it. Although the
1383 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1384 type but the representation of the unconstrained array. */
1385 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
1388 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1389 finish constructing the record or union type. If REP_LEVEL is zero, this
1390 record has no representation clause and so will be entirely laid out here.
1391 If REP_LEVEL is one, this record has a representation clause and has been
1392 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1393 this record is derived from a parent record and thus inherits its layout;
1394 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1395 we need to write debug information about this type. */
1398 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
1401 enum tree_code code
= TREE_CODE (record_type
);
1402 tree name
= TYPE_NAME (record_type
);
1403 tree ada_size
= bitsize_zero_node
;
1404 tree size
= bitsize_zero_node
;
1405 bool had_size
= TYPE_SIZE (record_type
) != 0;
1406 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
1407 bool had_align
= TYPE_ALIGN (record_type
) != 0;
1410 TYPE_FIELDS (record_type
) = field_list
;
1412 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1413 generate debug info and have a parallel type. */
1414 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
1415 name
= DECL_NAME (name
);
1416 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
1418 /* Globally initialize the record first. If this is a rep'ed record,
1419 that just means some initializations; otherwise, layout the record. */
1422 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
1425 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
1428 TYPE_SIZE (record_type
) = bitsize_zero_node
;
1430 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1431 out just like a UNION_TYPE, since the size will be fixed. */
1432 else if (code
== QUAL_UNION_TYPE
)
1437 /* Ensure there isn't a size already set. There can be in an error
1438 case where there is a rep clause but all fields have errors and
1439 no longer have a position. */
1440 TYPE_SIZE (record_type
) = 0;
1442 /* Ensure we use the traditional GCC layout for bitfields when we need
1443 to pack the record type or have a representation clause. The other
1444 possible layout (Microsoft C compiler), if available, would prevent
1445 efficient packing in almost all cases. */
1446 #ifdef TARGET_MS_BITFIELD_LAYOUT
1447 if (TARGET_MS_BITFIELD_LAYOUT
&& TYPE_PACKED (record_type
))
1448 decl_attributes (&record_type
,
1449 tree_cons (get_identifier ("gcc_struct"),
1450 NULL_TREE
, NULL_TREE
),
1451 ATTR_FLAG_TYPE_IN_PLACE
);
1454 layout_type (record_type
);
1457 /* At this point, the position and size of each field is known. It was
1458 either set before entry by a rep clause, or by laying out the type above.
1460 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1461 to compute the Ada size; the GCC size and alignment (for rep'ed records
1462 that are not padding types); and the mode (for rep'ed records). We also
1463 clear the DECL_BIT_FIELD indication for the cases we know have not been
1464 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1466 if (code
== QUAL_UNION_TYPE
)
1467 field_list
= nreverse (field_list
);
1469 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
1471 tree type
= TREE_TYPE (field
);
1472 tree pos
= bit_position (field
);
1473 tree this_size
= DECL_SIZE (field
);
1476 if (RECORD_OR_UNION_TYPE_P (type
)
1477 && !TYPE_FAT_POINTER_P (type
)
1478 && !TYPE_CONTAINS_TEMPLATE_P (type
)
1479 && TYPE_ADA_SIZE (type
))
1480 this_ada_size
= TYPE_ADA_SIZE (type
);
1482 this_ada_size
= this_size
;
1484 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1485 if (DECL_BIT_FIELD (field
)
1486 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
1488 unsigned int align
= TYPE_ALIGN (type
);
1490 /* In the general case, type alignment is required. */
1491 if (value_factor_p (pos
, align
))
1493 /* The enclosing record type must be sufficiently aligned.
1494 Otherwise, if no alignment was specified for it and it
1495 has been laid out already, bump its alignment to the
1496 desired one if this is compatible with its size. */
1497 if (TYPE_ALIGN (record_type
) >= align
)
1499 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1500 DECL_BIT_FIELD (field
) = 0;
1504 && value_factor_p (TYPE_SIZE (record_type
), align
))
1506 TYPE_ALIGN (record_type
) = align
;
1507 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1508 DECL_BIT_FIELD (field
) = 0;
1512 /* In the non-strict alignment case, only byte alignment is. */
1513 if (!STRICT_ALIGNMENT
1514 && DECL_BIT_FIELD (field
)
1515 && value_factor_p (pos
, BITS_PER_UNIT
))
1516 DECL_BIT_FIELD (field
) = 0;
1519 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1520 field is technically not addressable. Except that it can actually
1521 be addressed if it is BLKmode and happens to be properly aligned. */
1522 if (DECL_BIT_FIELD (field
)
1523 && !(DECL_MODE (field
) == BLKmode
1524 && value_factor_p (pos
, BITS_PER_UNIT
)))
1525 DECL_NONADDRESSABLE_P (field
) = 1;
1527 /* A type must be as aligned as its most aligned field that is not
1528 a bit-field. But this is already enforced by layout_type. */
1529 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
1530 TYPE_ALIGN (record_type
)
1531 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
1536 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
1537 size
= size_binop (MAX_EXPR
, size
, this_size
);
1540 case QUAL_UNION_TYPE
:
1542 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1543 this_ada_size
, ada_size
);
1544 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1549 /* Since we know here that all fields are sorted in order of
1550 increasing bit position, the size of the record is one
1551 higher than the ending bit of the last field processed
1552 unless we have a rep clause, since in that case we might
1553 have a field outside a QUAL_UNION_TYPE that has a higher ending
1554 position. So use a MAX in that case. Also, if this field is a
1555 QUAL_UNION_TYPE, we need to take into account the previous size in
1556 the case of empty variants. */
1558 = merge_sizes (ada_size
, pos
, this_ada_size
,
1559 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1561 = merge_sizes (size
, pos
, this_size
,
1562 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1570 if (code
== QUAL_UNION_TYPE
)
1571 nreverse (field_list
);
1575 /* If this is a padding record, we never want to make the size smaller
1576 than what was specified in it, if any. */
1577 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
1578 size
= TYPE_SIZE (record_type
);
1580 /* Now set any of the values we've just computed that apply. */
1581 if (!TYPE_FAT_POINTER_P (record_type
)
1582 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
1583 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
1587 tree size_unit
= had_size_unit
1588 ? TYPE_SIZE_UNIT (record_type
)
1589 : convert (sizetype
,
1590 size_binop (CEIL_DIV_EXPR
, size
,
1591 bitsize_unit_node
));
1592 unsigned int align
= TYPE_ALIGN (record_type
);
1594 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
1595 TYPE_SIZE_UNIT (record_type
)
1596 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
1598 compute_record_mode (record_type
);
1603 rest_of_record_type_compilation (record_type
);
1606 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1609 add_parallel_type (tree type
, tree parallel_type
)
1611 tree decl
= TYPE_STUB_DECL (type
);
1613 while (DECL_PARALLEL_TYPE (decl
))
1614 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
1616 SET_DECL_PARALLEL_TYPE (decl
, parallel_type
);
1619 /* Return true if TYPE has a parallel type. */
1622 has_parallel_type (tree type
)
1624 tree decl
= TYPE_STUB_DECL (type
);
1626 return DECL_PARALLEL_TYPE (decl
) != NULL_TREE
;
1629 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1630 associated with it. It need not be invoked directly in most cases since
1631 finish_record_type takes care of doing so, but this can be necessary if
1632 a parallel type is to be attached to the record type. */
1635 rest_of_record_type_compilation (tree record_type
)
1637 bool var_size
= false;
1640 /* If this is a padded type, the bulk of the debug info has already been
1641 generated for the field's type. */
1642 if (TYPE_IS_PADDING_P (record_type
))
1645 /* If the type already has a parallel type (XVS type), then we're done. */
1646 if (has_parallel_type (record_type
))
1649 for (field
= TYPE_FIELDS (record_type
); field
; field
= DECL_CHAIN (field
))
1651 /* We need to make an XVE/XVU record if any field has variable size,
1652 whether or not the record does. For example, if we have a union,
1653 it may be that all fields, rounded up to the alignment, have the
1654 same size, in which case we'll use that size. But the debug
1655 output routines (except Dwarf2) won't be able to output the fields,
1656 so we need to make the special record. */
1657 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
1658 /* If a field has a non-constant qualifier, the record will have
1659 variable size too. */
1660 || (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1661 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
1668 /* If this record type is of variable size, make a parallel record type that
1669 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1672 tree new_record_type
1673 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1674 ? UNION_TYPE
: TREE_CODE (record_type
));
1675 tree orig_name
= TYPE_NAME (record_type
), new_name
;
1676 tree last_pos
= bitsize_zero_node
;
1677 tree old_field
, prev_old_field
= NULL_TREE
;
1679 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1680 orig_name
= DECL_NAME (orig_name
);
1683 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
1685 TYPE_NAME (new_record_type
) = new_name
;
1686 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
1687 TYPE_STUB_DECL (new_record_type
)
1688 = create_type_stub_decl (new_name
, new_record_type
);
1689 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
1690 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
1691 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
1692 TYPE_SIZE_UNIT (new_record_type
)
1693 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
1695 /* Now scan all the fields, replacing each field with a new
1696 field corresponding to the new encoding. */
1697 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
1698 old_field
= DECL_CHAIN (old_field
))
1700 tree field_type
= TREE_TYPE (old_field
);
1701 tree field_name
= DECL_NAME (old_field
);
1703 tree curpos
= bit_position (old_field
);
1705 unsigned int align
= 0;
1708 /* See how the position was modified from the last position.
1710 There are two basic cases we support: a value was added
1711 to the last position or the last position was rounded to
1712 a boundary and they something was added. Check for the
1713 first case first. If not, see if there is any evidence
1714 of rounding. If so, round the last position and try
1717 If this is a union, the position can be taken as zero. */
1719 /* Some computations depend on the shape of the position expression,
1720 so strip conversions to make sure it's exposed. */
1721 curpos
= remove_conversions (curpos
, true);
1723 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
1724 pos
= bitsize_zero_node
, align
= 0;
1726 pos
= compute_related_constant (curpos
, last_pos
);
1728 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
1729 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
1731 tree offset
= TREE_OPERAND (curpos
, 0);
1732 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
1734 /* An offset which is a bitwise AND with a mask increases the
1735 alignment according to the number of trailing zeros. */
1736 offset
= remove_conversions (offset
, true);
1737 if (TREE_CODE (offset
) == BIT_AND_EXPR
1738 && TREE_CODE (TREE_OPERAND (offset
, 1)) == INTEGER_CST
)
1740 unsigned HOST_WIDE_INT mask
1741 = TREE_INT_CST_LOW (TREE_OPERAND (offset
, 1));
1744 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
1753 pos
= compute_related_constant (curpos
,
1754 round_up (last_pos
, align
));
1756 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
1757 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
1758 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
1759 && host_integerp (TREE_OPERAND
1760 (TREE_OPERAND (curpos
, 0), 1),
1765 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
1766 pos
= compute_related_constant (curpos
,
1767 round_up (last_pos
, align
));
1769 else if (potential_alignment_gap (prev_old_field
, old_field
,
1772 align
= TYPE_ALIGN (field_type
);
1773 pos
= compute_related_constant (curpos
,
1774 round_up (last_pos
, align
));
1777 /* If we can't compute a position, set it to zero.
1779 ??? We really should abort here, but it's too much work
1780 to get this correct for all cases. */
1783 pos
= bitsize_zero_node
;
1785 /* See if this type is variable-sized and make a pointer type
1786 and indicate the indirection if so. Beware that the debug
1787 back-end may adjust the position computed above according
1788 to the alignment of the field type, i.e. the pointer type
1789 in this case, if we don't preventively counter that. */
1790 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1792 field_type
= build_pointer_type (field_type
);
1793 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1795 field_type
= copy_node (field_type
);
1796 TYPE_ALIGN (field_type
) = align
;
1801 /* Make a new field name, if necessary. */
1802 if (var
|| align
!= 0)
1807 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1808 align
/ BITS_PER_UNIT
);
1810 strcpy (suffix
, "XVL");
1812 field_name
= concat_name (field_name
, suffix
);
1816 = create_field_decl (field_name
, field_type
, new_record_type
,
1817 DECL_SIZE (old_field
), pos
, 0, 0);
1818 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1819 TYPE_FIELDS (new_record_type
) = new_field
;
1821 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1822 zero. The only time it's not the last field of the record
1823 is when there are other components at fixed positions after
1824 it (meaning there was a rep clause for every field) and we
1825 want to be able to encode them. */
1826 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1827 (TREE_CODE (TREE_TYPE (old_field
))
1830 : DECL_SIZE (old_field
));
1831 prev_old_field
= old_field
;
1834 TYPE_FIELDS (new_record_type
) = nreverse (TYPE_FIELDS (new_record_type
));
1836 add_parallel_type (record_type
, new_record_type
);
1840 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1841 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1842 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1843 replace a value of zero with the old size. If HAS_REP is true, we take the
1844 MAX of the end position of this field with LAST_SIZE. In all other cases,
1845 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1848 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1851 tree type
= TREE_TYPE (last_size
);
1854 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1856 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1858 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1862 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1863 integer_zerop (TREE_OPERAND (size
, 1))
1864 ? last_size
: merge_sizes (last_size
, first_bit
,
1865 TREE_OPERAND (size
, 1),
1867 integer_zerop (TREE_OPERAND (size
, 2))
1868 ? last_size
: merge_sizes (last_size
, first_bit
,
1869 TREE_OPERAND (size
, 2),
1872 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1873 when fed through substitute_in_expr) into thinking that a constant
1874 size is not constant. */
1875 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1876 new_size
= TREE_OPERAND (new_size
, 0);
1881 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1882 related by the addition of a constant. Return that constant if so. */
1885 compute_related_constant (tree op0
, tree op1
)
1887 tree op0_var
, op1_var
;
1888 tree op0_con
= split_plus (op0
, &op0_var
);
1889 tree op1_con
= split_plus (op1
, &op1_var
);
1890 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1892 if (operand_equal_p (op0_var
, op1_var
, 0))
1894 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1900 /* Utility function of above to split a tree OP which may be a sum, into a
1901 constant part, which is returned, and a variable part, which is stored
1902 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1906 split_plus (tree in
, tree
*pvar
)
1908 /* Strip conversions in order to ease the tree traversal and maximize the
1909 potential for constant or plus/minus discovery. We need to be careful
1910 to always return and set *pvar to bitsizetype trees, but it's worth
1912 in
= remove_conversions (in
, false);
1914 *pvar
= convert (bitsizetype
, in
);
1916 if (TREE_CODE (in
) == INTEGER_CST
)
1918 *pvar
= bitsize_zero_node
;
1919 return convert (bitsizetype
, in
);
1921 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1923 tree lhs_var
, rhs_var
;
1924 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1925 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1927 if (lhs_var
== TREE_OPERAND (in
, 0)
1928 && rhs_var
== TREE_OPERAND (in
, 1))
1929 return bitsize_zero_node
;
1931 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1932 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1935 return bitsize_zero_node
;
1938 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1939 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1940 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1941 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1942 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1943 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1944 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1945 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1946 invisible reference. */
1949 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1950 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1951 bool return_by_invisi_ref_p
)
1953 /* A list of the data type nodes of the subprogram formal parameters.
1954 This list is generated by traversing the input list of PARM_DECL
1956 vec
<tree
, va_gc
> *param_type_list
= NULL
;
1959 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1960 vec_safe_push (param_type_list
, TREE_TYPE (t
));
1962 type
= build_function_type_vec (return_type
, param_type_list
);
1964 /* TYPE may have been shared since GCC hashes types. If it has a different
1965 CICO_LIST, make a copy. Likewise for the various flags. */
1966 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1967 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1969 type
= copy_type (type
);
1970 TYPE_CI_CO_LIST (type
) = cico_list
;
1971 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1972 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1973 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1979 /* Return a copy of TYPE but safe to modify in any way. */
1982 copy_type (tree type
)
1984 tree new_type
= copy_node (type
);
1986 /* Unshare the language-specific data. */
1987 if (TYPE_LANG_SPECIFIC (type
))
1989 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1990 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1993 /* And the contents of the language-specific slot if needed. */
1994 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1995 && TYPE_RM_VALUES (type
))
1997 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1998 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1999 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
2000 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
2003 /* copy_node clears this field instead of copying it, because it is
2004 aliased with TREE_CHAIN. */
2005 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
2007 TYPE_POINTER_TO (new_type
) = 0;
2008 TYPE_REFERENCE_TO (new_type
) = 0;
2009 TYPE_MAIN_VARIANT (new_type
) = new_type
;
2010 TYPE_NEXT_VARIANT (new_type
) = 0;
2015 /* Return a subtype of sizetype with range MIN to MAX and whose
2016 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2017 of the associated TYPE_DECL. */
2020 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
2022 /* First build a type for the desired range. */
2023 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
2025 /* Then set the index type. */
2026 SET_TYPE_INDEX_TYPE (type
, index
);
2027 create_type_decl (NULL_TREE
, type
, true, false, gnat_node
);
2032 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2033 sizetype is used. */
2036 create_range_type (tree type
, tree min
, tree max
)
2040 if (type
== NULL_TREE
)
2043 /* First build a type with the base range. */
2044 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
2045 TYPE_MAX_VALUE (type
));
2047 /* Then set the actual range. */
2048 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
2049 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
2054 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2055 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2059 create_type_stub_decl (tree type_name
, tree type
)
2061 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2062 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2063 emitted in DWARF. */
2064 tree type_decl
= build_decl (input_location
,
2065 TYPE_DECL
, type_name
, type
);
2066 DECL_ARTIFICIAL (type_decl
) = 1;
2067 TYPE_ARTIFICIAL (type
) = 1;
2071 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2072 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2073 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2074 true if we need to write debug information about this type. GNAT_NODE
2075 is used for the position of the decl. */
2078 create_type_decl (tree type_name
, tree type
, bool artificial_p
,
2079 bool debug_info_p
, Node_Id gnat_node
)
2081 enum tree_code code
= TREE_CODE (type
);
2082 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
2085 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2086 gcc_assert (!TYPE_IS_DUMMY_P (type
));
2088 /* If the type hasn't been named yet, we're naming it; preserve an existing
2089 TYPE_STUB_DECL that has been attached to it for some purpose. */
2090 if (!named
&& TYPE_STUB_DECL (type
))
2092 type_decl
= TYPE_STUB_DECL (type
);
2093 DECL_NAME (type_decl
) = type_name
;
2096 type_decl
= build_decl (input_location
, TYPE_DECL
, type_name
, type
);
2098 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
2099 TYPE_ARTIFICIAL (type
) = artificial_p
;
2101 /* Add this decl to the current binding level. */
2102 gnat_pushdecl (type_decl
, gnat_node
);
2104 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2105 This causes the name to be also viewed as a "tag" by the debug
2106 back-end, with the advantage that no DW_TAG_typedef is emitted
2107 for artificial "tagged" types in DWARF. */
2109 TYPE_STUB_DECL (type
) = type_decl
;
2111 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2112 back-end doesn't support, and for others if we don't need to. */
2113 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
2114 DECL_IGNORED_P (type_decl
) = 1;
2119 /* Return a VAR_DECL or CONST_DECL node.
2121 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2122 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2123 the GCC tree for an optional initial expression; NULL_TREE if none.
2125 CONST_FLAG is true if this variable is constant, in which case we might
2126 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2128 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2129 definition to be made visible outside of the current compilation unit, for
2130 instance variable definitions in a package specification.
2132 EXTERN_FLAG is true when processing an external variable declaration (as
2133 opposed to a definition: no storage is to be allocated for the variable).
2135 STATIC_FLAG is only relevant when not at top level. In that case
2136 it indicates whether to always allocate storage to the variable.
2138 GNAT_NODE is used for the position of the decl. */
2141 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
2142 bool const_flag
, bool public_flag
, bool extern_flag
,
2143 bool static_flag
, bool const_decl_allowed_p
,
2144 struct attrib
*attr_list
, Node_Id gnat_node
)
2146 /* Whether the initializer is a constant initializer. At the global level
2147 or for an external object or an object to be allocated in static memory,
2148 we check that it is a valid constant expression for use in initializing
2149 a static variable; otherwise, we only check that it is constant. */
2152 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
2153 && (global_bindings_p () || extern_flag
|| static_flag
2154 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
2155 : TREE_CONSTANT (var_init
)));
2157 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2158 case the initializer may be used in-lieu of the DECL node (as done in
2159 Identifier_to_gnu). This is useful to prevent the need of elaboration
2160 code when an identifier for which such a decl is made is in turn used as
2161 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2162 but extra constraints apply to this choice (see below) and are not
2163 relevant to the distinction we wish to make. */
2164 bool constant_p
= const_flag
&& init_const
;
2166 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2167 and may be used for scalars in general but not for aggregates. */
2169 = build_decl (input_location
,
2170 (constant_p
&& const_decl_allowed_p
2171 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
2174 /* If this is external, throw away any initializations (they will be done
2175 elsewhere) unless this is a constant for which we would like to remain
2176 able to get the initializer. If we are defining a global here, leave a
2177 constant initialization and save any variable elaborations for the
2178 elaboration routine. If we are just annotating types, throw away the
2179 initialization if it isn't a constant. */
2180 if ((extern_flag
&& !constant_p
)
2181 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
2182 var_init
= NULL_TREE
;
2184 /* At the global level, an initializer requiring code to be generated
2185 produces elaboration statements. Check that such statements are allowed,
2186 that is, not violating a No_Elaboration_Code restriction. */
2187 if (global_bindings_p () && var_init
!= 0 && !init_const
)
2188 Check_Elaboration_Code_Allowed (gnat_node
);
2190 DECL_INITIAL (var_decl
) = var_init
;
2191 TREE_READONLY (var_decl
) = const_flag
;
2192 DECL_EXTERNAL (var_decl
) = extern_flag
;
2193 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
2194 TREE_CONSTANT (var_decl
) = constant_p
;
2195 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
2196 = TYPE_VOLATILE (type
);
2198 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2199 try to fiddle with DECL_COMMON. However, on platforms that don't
2200 support global BSS sections, uninitialized global variables would
2201 go in DATA instead, thus increasing the size of the executable. */
2203 && TREE_CODE (var_decl
) == VAR_DECL
2204 && TREE_PUBLIC (var_decl
)
2205 && !have_global_bss_p ())
2206 DECL_COMMON (var_decl
) = 1;
2208 /* At the global binding level, we need to allocate static storage for the
2209 variable if it isn't external. Otherwise, we allocate automatic storage
2210 unless requested not to. */
2211 TREE_STATIC (var_decl
)
2212 = !extern_flag
&& (static_flag
|| global_bindings_p ());
2214 /* For an external constant whose initializer is not absolute, do not emit
2215 debug info. In DWARF this would mean a global relocation in a read-only
2216 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2220 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
2221 != null_pointer_node
)
2222 DECL_IGNORED_P (var_decl
) = 1;
2224 if (TREE_SIDE_EFFECTS (var_decl
))
2225 TREE_ADDRESSABLE (var_decl
) = 1;
2227 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2228 if (TREE_CODE (var_decl
) == VAR_DECL
)
2229 process_attributes (&var_decl
, &attr_list
, true, gnat_node
);
2231 /* Add this decl to the current binding level. */
2232 gnat_pushdecl (var_decl
, gnat_node
);
2234 if (TREE_CODE (var_decl
) == VAR_DECL
)
2237 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
2239 if (global_bindings_p ())
2240 rest_of_decl_compilation (var_decl
, true, 0);
2246 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2249 aggregate_type_contains_array_p (tree type
)
2251 switch (TREE_CODE (type
))
2255 case QUAL_UNION_TYPE
:
2258 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
2259 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
2260 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
2273 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2274 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2275 nonzero, it is the specified size of the field. If POS is nonzero, it is
2276 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2277 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2278 means we are allowed to take the address of the field; if it is negative,
2279 we should not make a bitfield, which is used by make_aligning_type. */
2282 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
2283 tree size
, tree pos
, int packed
, int addressable
)
2285 tree field_decl
= build_decl (input_location
,
2286 FIELD_DECL
, field_name
, field_type
);
2288 DECL_CONTEXT (field_decl
) = record_type
;
2289 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
2291 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2292 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2293 Likewise for an aggregate without specified position that contains an
2294 array, because in this case slices of variable length of this array
2295 must be handled by GCC and variable-sized objects need to be aligned
2296 to at least a byte boundary. */
2297 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
2299 && AGGREGATE_TYPE_P (field_type
)
2300 && aggregate_type_contains_array_p (field_type
))))
2301 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
2303 /* If a size is specified, use it. Otherwise, if the record type is packed
2304 compute a size to use, which may differ from the object's natural size.
2305 We always set a size in this case to trigger the checks for bitfield
2306 creation below, which is typically required when no position has been
2309 size
= convert (bitsizetype
, size
);
2310 else if (packed
== 1)
2312 size
= rm_size (field_type
);
2313 if (TYPE_MODE (field_type
) == BLKmode
)
2314 size
= round_up (size
, BITS_PER_UNIT
);
2317 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2318 specified for two reasons: first if the size differs from the natural
2319 size. Second, if the alignment is insufficient. There are a number of
2320 ways the latter can be true.
2322 We never make a bitfield if the type of the field has a nonconstant size,
2323 because no such entity requiring bitfield operations should reach here.
2325 We do *preventively* make a bitfield when there might be the need for it
2326 but we don't have all the necessary information to decide, as is the case
2327 of a field with no specified position in a packed record.
2329 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2330 in layout_decl or finish_record_type to clear the bit_field indication if
2331 it is in fact not needed. */
2332 if (addressable
>= 0
2334 && TREE_CODE (size
) == INTEGER_CST
2335 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
2336 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
2337 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
2339 || (TYPE_ALIGN (record_type
) != 0
2340 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
2342 DECL_BIT_FIELD (field_decl
) = 1;
2343 DECL_SIZE (field_decl
) = size
;
2344 if (!packed
&& !pos
)
2346 if (TYPE_ALIGN (record_type
) != 0
2347 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
2348 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
2350 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2354 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
2356 /* Bump the alignment if need be, either for bitfield/packing purposes or
2357 to satisfy the type requirements if no such consideration applies. When
2358 we get the alignment from the type, indicate if this is from an explicit
2359 user request, which prevents stor-layout from lowering it later on. */
2361 unsigned int bit_align
2362 = (DECL_BIT_FIELD (field_decl
) ? 1
2363 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
2365 if (bit_align
> DECL_ALIGN (field_decl
))
2366 DECL_ALIGN (field_decl
) = bit_align
;
2367 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
2369 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2370 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
2376 /* We need to pass in the alignment the DECL is known to have.
2377 This is the lowest-order bit set in POS, but no more than
2378 the alignment of the record, if one is specified. Note
2379 that an alignment of 0 is taken as infinite. */
2380 unsigned int known_align
;
2382 if (host_integerp (pos
, 1))
2383 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
2385 known_align
= BITS_PER_UNIT
;
2387 if (TYPE_ALIGN (record_type
)
2388 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
2389 known_align
= TYPE_ALIGN (record_type
);
2391 layout_decl (field_decl
, known_align
);
2392 SET_DECL_OFFSET_ALIGN (field_decl
,
2393 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
2395 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
2396 &DECL_FIELD_BIT_OFFSET (field_decl
),
2397 DECL_OFFSET_ALIGN (field_decl
), pos
);
2400 /* In addition to what our caller says, claim the field is addressable if we
2401 know that its type is not suitable.
2403 The field may also be "technically" nonaddressable, meaning that even if
2404 we attempt to take the field's address we will actually get the address
2405 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2406 value we have at this point is not accurate enough, so we don't account
2407 for this here and let finish_record_type decide. */
2408 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
2411 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
2416 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2417 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2418 (either an In parameter or an address of a pass-by-ref parameter). */
2421 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
2423 tree param_decl
= build_decl (input_location
,
2424 PARM_DECL
, param_name
, param_type
);
2426 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2427 can lead to various ABI violations. */
2428 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
2429 && INTEGRAL_TYPE_P (param_type
)
2430 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
2432 /* We have to be careful about biased types here. Make a subtype
2433 of integer_type_node with the proper biasing. */
2434 if (TREE_CODE (param_type
) == INTEGER_TYPE
2435 && TYPE_BIASED_REPRESENTATION_P (param_type
))
2438 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
2439 TREE_TYPE (subtype
) = integer_type_node
;
2440 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
2441 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
2442 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
2443 param_type
= subtype
;
2446 param_type
= integer_type_node
;
2449 DECL_ARG_TYPE (param_decl
) = param_type
;
2450 TREE_READONLY (param_decl
) = readonly
;
2454 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2455 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2456 changed. GNAT_NODE is used for the position of error messages. */
2459 process_attributes (tree
*node
, struct attrib
**attr_list
, bool in_place
,
2462 struct attrib
*attr
;
2464 for (attr
= *attr_list
; attr
; attr
= attr
->next
)
2467 case ATTR_MACHINE_ATTRIBUTE
:
2468 Sloc_to_locus (Sloc (gnat_node
), &input_location
);
2469 decl_attributes (node
, tree_cons (attr
->name
, attr
->args
, NULL_TREE
),
2470 in_place
? ATTR_FLAG_TYPE_IN_PLACE
: 0);
2473 case ATTR_LINK_ALIAS
:
2474 if (!DECL_EXTERNAL (*node
))
2476 TREE_STATIC (*node
) = 1;
2477 assemble_alias (*node
, attr
->name
);
2481 case ATTR_WEAK_EXTERNAL
:
2483 declare_weak (*node
);
2485 post_error ("?weak declarations not supported on this target",
2489 case ATTR_LINK_SECTION
:
2490 if (targetm_common
.have_named_sections
)
2492 DECL_SECTION_NAME (*node
)
2493 = build_string (IDENTIFIER_LENGTH (attr
->name
),
2494 IDENTIFIER_POINTER (attr
->name
));
2495 DECL_COMMON (*node
) = 0;
2498 post_error ("?section attributes are not supported for this target",
2502 case ATTR_LINK_CONSTRUCTOR
:
2503 DECL_STATIC_CONSTRUCTOR (*node
) = 1;
2504 TREE_USED (*node
) = 1;
2507 case ATTR_LINK_DESTRUCTOR
:
2508 DECL_STATIC_DESTRUCTOR (*node
) = 1;
2509 TREE_USED (*node
) = 1;
2512 case ATTR_THREAD_LOCAL_STORAGE
:
2513 DECL_TLS_MODEL (*node
) = decl_default_tls_model (*node
);
2514 DECL_COMMON (*node
) = 0;
2521 /* Record DECL as a global renaming pointer. */
2524 record_global_renaming_pointer (tree decl
)
2526 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
2527 vec_safe_push (global_renaming_pointers
, decl
);
2530 /* Invalidate the global renaming pointers. */
2533 invalidate_global_renaming_pointers (void)
2538 if (global_renaming_pointers
== NULL
)
2541 FOR_EACH_VEC_ELT (*global_renaming_pointers
, i
, iter
)
2542 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
2544 vec_free (global_renaming_pointers
);
2547 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2551 value_factor_p (tree value
, HOST_WIDE_INT factor
)
2553 if (host_integerp (value
, 1))
2554 return tree_low_cst (value
, 1) % factor
== 0;
2556 if (TREE_CODE (value
) == MULT_EXPR
)
2557 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
2558 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
2563 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2564 unless we can prove these 2 fields are laid out in such a way that no gap
2565 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2566 is the distance in bits between the end of PREV_FIELD and the starting
2567 position of CURR_FIELD. It is ignored if null. */
2570 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
2572 /* If this is the first field of the record, there cannot be any gap */
2576 /* If the previous field is a union type, then return False: The only
2577 time when such a field is not the last field of the record is when
2578 there are other components at fixed positions after it (meaning there
2579 was a rep clause for every field), in which case we don't want the
2580 alignment constraint to override them. */
2581 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
2584 /* If the distance between the end of prev_field and the beginning of
2585 curr_field is constant, then there is a gap if the value of this
2586 constant is not null. */
2587 if (offset
&& host_integerp (offset
, 1))
2588 return !integer_zerop (offset
);
2590 /* If the size and position of the previous field are constant,
2591 then check the sum of this size and position. There will be a gap
2592 iff it is not multiple of the current field alignment. */
2593 if (host_integerp (DECL_SIZE (prev_field
), 1)
2594 && host_integerp (bit_position (prev_field
), 1))
2595 return ((tree_low_cst (bit_position (prev_field
), 1)
2596 + tree_low_cst (DECL_SIZE (prev_field
), 1))
2597 % DECL_ALIGN (curr_field
) != 0);
2599 /* If both the position and size of the previous field are multiples
2600 of the current field alignment, there cannot be any gap. */
2601 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
2602 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
2605 /* Fallback, return that there may be a potential gap */
2609 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2613 create_label_decl (tree label_name
, Node_Id gnat_node
)
2616 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
2618 DECL_MODE (label_decl
) = VOIDmode
;
2620 /* Add this decl to the current binding level. */
2621 gnat_pushdecl (label_decl
, gnat_node
);
2626 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2627 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2628 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2629 PARM_DECL nodes chained through the DECL_CHAIN field).
2631 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2632 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2633 used for the position of the decl. */
2636 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
2637 tree param_decl_list
, enum inline_status_t inline_status
,
2638 bool public_flag
, bool extern_flag
, bool artificial_flag
,
2639 struct attrib
*attr_list
, Node_Id gnat_node
)
2641 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
2643 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
2644 TREE_TYPE (subprog_type
));
2645 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
2647 /* If this is a non-inline function nested inside an inlined external
2648 function, we cannot honor both requests without cloning the nested
2649 function in the current unit since it is private to the other unit.
2650 We could inline the nested function as well but it's probably better
2651 to err on the side of too little inlining. */
2652 if (inline_status
!= is_enabled
2654 && current_function_decl
2655 && DECL_DECLARED_INLINE_P (current_function_decl
)
2656 && DECL_EXTERNAL (current_function_decl
))
2657 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
2659 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
2660 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
2662 switch (inline_status
)
2665 DECL_UNINLINABLE (subprog_decl
) = 1;
2672 DECL_DECLARED_INLINE_P (subprog_decl
) = 1;
2673 DECL_NO_INLINE_WARNING_P (subprog_decl
) = artificial_flag
;
2680 TREE_PUBLIC (subprog_decl
) = public_flag
;
2681 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
2682 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2683 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2685 DECL_ARTIFICIAL (result_decl
) = 1;
2686 DECL_IGNORED_P (result_decl
) = 1;
2687 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
2688 DECL_RESULT (subprog_decl
) = result_decl
;
2692 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
2694 /* The expand_main_function circuitry expects "main_identifier_node" to
2695 designate the DECL_NAME of the 'main' entry point, in turn expected
2696 to be declared as the "main" function literally by default. Ada
2697 program entry points are typically declared with a different name
2698 within the binder generated file, exported as 'main' to satisfy the
2699 system expectations. Force main_identifier_node in this case. */
2700 if (asm_name
== main_identifier_node
)
2701 DECL_NAME (subprog_decl
) = main_identifier_node
;
2704 process_attributes (&subprog_decl
, &attr_list
, true, gnat_node
);
2706 /* Add this decl to the current binding level. */
2707 gnat_pushdecl (subprog_decl
, gnat_node
);
2709 /* Output the assembler code and/or RTL for the declaration. */
2710 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
2712 return subprog_decl
;
2715 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2716 body. This routine needs to be invoked before processing the declarations
2717 appearing in the subprogram. */
2720 begin_subprog_body (tree subprog_decl
)
2724 announce_function (subprog_decl
);
2726 /* This function is being defined. */
2727 TREE_STATIC (subprog_decl
) = 1;
2729 current_function_decl
= subprog_decl
;
2731 /* Enter a new binding level and show that all the parameters belong to
2735 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
2736 param_decl
= DECL_CHAIN (param_decl
))
2737 DECL_CONTEXT (param_decl
) = subprog_decl
;
2739 make_decl_rtl (subprog_decl
);
2742 /* Finish translating the current subprogram and set its BODY. */
2745 end_subprog_body (tree body
)
2747 tree fndecl
= current_function_decl
;
2749 /* Attach the BLOCK for this level to the function and pop the level. */
2750 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2751 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2754 /* Mark the RESULT_DECL as being in this subprogram. */
2755 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2757 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2758 if (TREE_CODE (body
) == BIND_EXPR
)
2760 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
2761 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
2764 DECL_SAVED_TREE (fndecl
) = body
;
2766 current_function_decl
= decl_function_context (fndecl
);
2769 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2772 rest_of_subprog_body_compilation (tree subprog_decl
)
2774 /* We cannot track the location of errors past this point. */
2775 error_gnat_node
= Empty
;
2777 /* If we're only annotating types, don't actually compile this function. */
2778 if (type_annotate_only
)
2781 /* Dump functions before gimplification. */
2782 dump_function (TDI_original
, subprog_decl
);
2784 if (!decl_function_context (subprog_decl
))
2785 cgraph_finalize_function (subprog_decl
, false);
2787 /* Register this function with cgraph just far enough to get it
2788 added to our parent's nested function list. */
2789 (void) cgraph_get_create_node (subprog_decl
);
2793 gnat_builtin_function (tree decl
)
2795 gnat_pushdecl (decl
, Empty
);
2799 /* Return an integer type with the number of bits of precision given by
2800 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2801 it is a signed type. */
2804 gnat_type_for_size (unsigned precision
, int unsignedp
)
2809 if (precision
<= 2 * MAX_BITS_PER_WORD
2810 && signed_and_unsigned_types
[precision
][unsignedp
])
2811 return signed_and_unsigned_types
[precision
][unsignedp
];
2814 t
= make_unsigned_type (precision
);
2816 t
= make_signed_type (precision
);
2818 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2819 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2823 sprintf (type_name
, "%sSIGNED_%u", unsignedp
? "UN" : "", precision
);
2824 TYPE_NAME (t
) = get_identifier (type_name
);
2830 /* Likewise for floating-point types. */
2833 float_type_for_precision (int precision
, enum machine_mode mode
)
2838 if (float_types
[(int) mode
])
2839 return float_types
[(int) mode
];
2841 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2842 TYPE_PRECISION (t
) = precision
;
2845 gcc_assert (TYPE_MODE (t
) == mode
);
2848 sprintf (type_name
, "FLOAT_%d", precision
);
2849 TYPE_NAME (t
) = get_identifier (type_name
);
2855 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2856 an unsigned type; otherwise a signed type is returned. */
2859 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2861 if (mode
== BLKmode
)
2864 if (mode
== VOIDmode
)
2865 return void_type_node
;
2867 if (COMPLEX_MODE_P (mode
))
2870 if (SCALAR_FLOAT_MODE_P (mode
))
2871 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2873 if (SCALAR_INT_MODE_P (mode
))
2874 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2876 if (VECTOR_MODE_P (mode
))
2878 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2879 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2881 return build_vector_type_for_mode (inner_type
, mode
);
2887 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2890 gnat_unsigned_type (tree type_node
)
2892 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2894 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2896 type
= copy_node (type
);
2897 TREE_TYPE (type
) = type_node
;
2899 else if (TREE_TYPE (type_node
)
2900 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2901 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2903 type
= copy_node (type
);
2904 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2910 /* Return the signed version of a TYPE_NODE, a scalar type. */
2913 gnat_signed_type (tree type_node
)
2915 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2917 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2919 type
= copy_node (type
);
2920 TREE_TYPE (type
) = type_node
;
2922 else if (TREE_TYPE (type_node
)
2923 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2924 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2926 type
= copy_node (type
);
2927 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2933 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2934 transparently converted to each other. */
2937 gnat_types_compatible_p (tree t1
, tree t2
)
2939 enum tree_code code
;
2941 /* This is the default criterion. */
2942 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2945 /* We only check structural equivalence here. */
2946 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2949 /* Vector types are also compatible if they have the same number of subparts
2950 and the same form of (scalar) element type. */
2951 if (code
== VECTOR_TYPE
2952 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2953 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2954 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2957 /* Array types are also compatible if they are constrained and have the same
2958 domain(s) and the same component type. */
2959 if (code
== ARRAY_TYPE
2960 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2961 || (TYPE_DOMAIN (t1
)
2963 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2964 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2965 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2966 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2967 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2968 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2969 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2975 /* Return true if EXPR is a useless type conversion. */
2978 gnat_useless_type_conversion (tree expr
)
2980 if (CONVERT_EXPR_P (expr
)
2981 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2982 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
2983 return gnat_types_compatible_p (TREE_TYPE (expr
),
2984 TREE_TYPE (TREE_OPERAND (expr
, 0)));
2989 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2992 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
2993 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
2995 return TYPE_CI_CO_LIST (t
) == cico_list
2996 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
2997 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
2998 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
3001 /* EXP is an expression for the size of an object. If this size contains
3002 discriminant references, replace them with the maximum (if MAX_P) or
3003 minimum (if !MAX_P) possible value of the discriminant. */
3006 max_size (tree exp
, bool max_p
)
3008 enum tree_code code
= TREE_CODE (exp
);
3009 tree type
= TREE_TYPE (exp
);
3011 switch (TREE_CODE_CLASS (code
))
3013 case tcc_declaration
:
3018 if (code
== CALL_EXPR
)
3023 t
= maybe_inline_call_in_expr (exp
);
3025 return max_size (t
, max_p
);
3027 n
= call_expr_nargs (exp
);
3029 argarray
= XALLOCAVEC (tree
, n
);
3030 for (i
= 0; i
< n
; i
++)
3031 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
3032 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
3037 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3038 modify. Otherwise, we treat it like a variable. */
3039 if (!CONTAINS_PLACEHOLDER_P (exp
))
3042 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
3044 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
3046 case tcc_comparison
:
3047 return max_p
? size_one_node
: size_zero_node
;
3050 if (code
== NON_LVALUE_EXPR
)
3051 return max_size (TREE_OPERAND (exp
, 0), max_p
);
3053 return fold_build1 (code
, type
,
3054 max_size (TREE_OPERAND (exp
, 0),
3055 code
== NEGATE_EXPR
? !max_p
: max_p
));
3059 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
3060 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
3061 code
== MINUS_EXPR
? !max_p
: max_p
);
3063 /* Special-case wanting the maximum value of a MIN_EXPR.
3064 In that case, if one side overflows, return the other. */
3065 if (max_p
&& code
== MIN_EXPR
)
3067 if (TREE_CODE (rhs
) == INTEGER_CST
&& TREE_OVERFLOW (rhs
))
3070 if (TREE_CODE (lhs
) == INTEGER_CST
&& TREE_OVERFLOW (lhs
))
3074 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3075 overflowing and the RHS a variable. */
3076 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
3077 && TREE_CODE (lhs
) == INTEGER_CST
3078 && TREE_OVERFLOW (lhs
)
3079 && !TREE_CONSTANT (rhs
))
3082 return size_binop (code
, lhs
, rhs
);
3085 case tcc_expression
:
3086 switch (TREE_CODE_LENGTH (code
))
3089 if (code
== SAVE_EXPR
)
3092 return fold_build1 (code
, type
,
3093 max_size (TREE_OPERAND (exp
, 0), max_p
));
3096 if (code
== COMPOUND_EXPR
)
3097 return max_size (TREE_OPERAND (exp
, 1), max_p
);
3099 return fold_build2 (code
, type
,
3100 max_size (TREE_OPERAND (exp
, 0), max_p
),
3101 max_size (TREE_OPERAND (exp
, 1), max_p
));
3104 if (code
== COND_EXPR
)
3105 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
3106 max_size (TREE_OPERAND (exp
, 1), max_p
),
3107 max_size (TREE_OPERAND (exp
, 2), max_p
));
3113 /* Other tree classes cannot happen. */
3121 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3122 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3123 Return a constructor for the template. */
3126 build_template (tree template_type
, tree array_type
, tree expr
)
3128 vec
<constructor_elt
, va_gc
> *template_elts
= NULL
;
3129 tree bound_list
= NULL_TREE
;
3132 while (TREE_CODE (array_type
) == RECORD_TYPE
3133 && (TYPE_PADDING_P (array_type
)
3134 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
3135 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
3137 if (TREE_CODE (array_type
) == ARRAY_TYPE
3138 || (TREE_CODE (array_type
) == INTEGER_TYPE
3139 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
3140 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
3142 /* First make the list for a CONSTRUCTOR for the template. Go down the
3143 field list of the template instead of the type chain because this
3144 array might be an Ada array of arrays and we can't tell where the
3145 nested arrays stop being the underlying object. */
3147 for (field
= TYPE_FIELDS (template_type
); field
;
3149 ? (bound_list
= TREE_CHAIN (bound_list
))
3150 : (array_type
= TREE_TYPE (array_type
))),
3151 field
= DECL_CHAIN (DECL_CHAIN (field
)))
3153 tree bounds
, min
, max
;
3155 /* If we have a bound list, get the bounds from there. Likewise
3156 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3157 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3158 This will give us a maximum range. */
3160 bounds
= TREE_VALUE (bound_list
);
3161 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
3162 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
3163 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
3164 && DECL_BY_COMPONENT_PTR_P (expr
))
3165 bounds
= TREE_TYPE (field
);
3169 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
3170 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
3172 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3173 substitute it from OBJECT. */
3174 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
3175 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
3177 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
3178 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
3181 return gnat_build_constructor (template_type
, template_elts
);
3184 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3185 being built; the new decl is chained on to the front of the list. */
3188 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
3189 tree initial
, tree field_list
)
3192 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
3195 DECL_INITIAL (field
) = initial
;
3196 DECL_CHAIN (field
) = field_list
;
3200 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3201 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3202 type contains in its DECL_INITIAL the expression to use when a constructor
3203 is made for the type. GNAT_ENTITY is an entity used to print out an error
3204 message if the mechanism cannot be applied to an object of that type and
3205 also for the name. */
3208 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3210 tree record_type
= make_node (RECORD_TYPE
);
3211 tree pointer32_type
, pointer64_type
;
3212 tree field_list
= NULL_TREE
;
3213 int klass
, ndim
, i
, dtype
= 0;
3214 tree inner_type
, tem
;
3217 /* If TYPE is an unconstrained array, use the underlying array type. */
3218 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3219 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3221 /* If this is an array, compute the number of dimensions in the array,
3222 get the index types, and point to the inner type. */
3223 if (TREE_CODE (type
) != ARRAY_TYPE
)
3226 for (ndim
= 1, inner_type
= type
;
3227 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3228 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3229 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3232 idx_arr
= XALLOCAVEC (tree
, ndim
);
3234 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
3235 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3236 for (i
= ndim
- 1, inner_type
= type
;
3238 i
--, inner_type
= TREE_TYPE (inner_type
))
3239 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3241 for (i
= 0, inner_type
= type
;
3243 i
++, inner_type
= TREE_TYPE (inner_type
))
3244 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3246 /* Now get the DTYPE value. */
3247 switch (TREE_CODE (type
))
3252 if (TYPE_VAX_FLOATING_POINT_P (type
))
3253 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3266 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3269 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3272 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3275 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3278 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3281 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3287 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3291 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3292 && TYPE_VAX_FLOATING_POINT_P (type
))
3293 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3305 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3316 /* Get the CLASS value. */
3319 case By_Descriptor_A
:
3320 case By_Short_Descriptor_A
:
3323 case By_Descriptor_NCA
:
3324 case By_Short_Descriptor_NCA
:
3327 case By_Descriptor_SB
:
3328 case By_Short_Descriptor_SB
:
3332 case By_Short_Descriptor
:
3333 case By_Descriptor_S
:
3334 case By_Short_Descriptor_S
:
3340 /* Make the type for a descriptor for VMS. The first four fields are the
3341 same for all types. */
3343 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
3344 size_in_bytes ((mech
== By_Descriptor_A
3345 || mech
== By_Short_Descriptor_A
)
3346 ? inner_type
: type
),
3349 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
3350 size_int (dtype
), field_list
);
3352 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
3353 size_int (klass
), field_list
);
3355 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
3356 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3358 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3359 that we cannot build a template call to the CE routine as it would get a
3360 wrong source location; instead we use a second placeholder for it. */
3361 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
3362 build0 (PLACEHOLDER_EXPR
, type
));
3363 tem
= build3 (COND_EXPR
, pointer32_type
,
3365 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
3366 build_int_cstu (pointer64_type
, 0x80000000))
3367 : boolean_false_node
,
3368 build0 (PLACEHOLDER_EXPR
, void_type_node
),
3369 convert (pointer32_type
, tem
));
3372 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
3378 case By_Short_Descriptor
:
3379 case By_Descriptor_S
:
3380 case By_Short_Descriptor_S
:
3383 case By_Descriptor_SB
:
3384 case By_Short_Descriptor_SB
:
3386 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3388 (TREE_CODE (type
) == ARRAY_TYPE
3389 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3393 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3395 (TREE_CODE (type
) == ARRAY_TYPE
3396 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3401 case By_Descriptor_A
:
3402 case By_Short_Descriptor_A
:
3403 case By_Descriptor_NCA
:
3404 case By_Short_Descriptor_NCA
:
3406 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3407 record_type
, size_zero_node
, field_list
);
3410 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3411 record_type
, size_zero_node
, field_list
);
3414 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3416 size_int ((mech
== By_Descriptor_NCA
3417 || mech
== By_Short_Descriptor_NCA
)
3419 /* Set FL_COLUMN, FL_COEFF, and
3421 : (TREE_CODE (type
) == ARRAY_TYPE
3422 && TYPE_CONVENTION_FORTRAN_P
3428 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3429 record_type
, size_int (ndim
), field_list
);
3432 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3433 record_type
, size_in_bytes (type
),
3436 /* Now build a pointer to the 0,0,0... element. */
3437 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3438 for (i
= 0, inner_type
= type
; i
< ndim
;
3439 i
++, inner_type
= TREE_TYPE (inner_type
))
3440 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3441 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3442 NULL_TREE
, NULL_TREE
);
3445 = make_descriptor_field ("A0", pointer32_type
, record_type
,
3446 build1 (ADDR_EXPR
, pointer32_type
, tem
),
3449 /* Next come the addressing coefficients. */
3450 tem
= size_one_node
;
3451 for (i
= 0; i
< ndim
; i
++)
3455 = size_binop (MULT_EXPR
, tem
,
3456 size_binop (PLUS_EXPR
,
3457 size_binop (MINUS_EXPR
,
3458 TYPE_MAX_VALUE (idx_arr
[i
]),
3459 TYPE_MIN_VALUE (idx_arr
[i
])),
3462 fname
[0] = ((mech
== By_Descriptor_NCA
||
3463 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
3464 fname
[1] = '0' + i
, fname
[2] = 0;
3466 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3467 record_type
, idx_length
, field_list
);
3469 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
3473 /* Finally here are the bounds. */
3474 for (i
= 0; i
< ndim
; i
++)
3478 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3480 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3481 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
3486 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3487 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
3493 post_error ("unsupported descriptor type for &", gnat_entity
);
3496 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
3497 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3501 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3502 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3503 type contains in its DECL_INITIAL the expression to use when a constructor
3504 is made for the type. GNAT_ENTITY is an entity used to print out an error
3505 message if the mechanism cannot be applied to an object of that type and
3506 also for the name. */
3509 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3511 tree record_type
= make_node (RECORD_TYPE
);
3512 tree pointer64_type
;
3513 tree field_list
= NULL_TREE
;
3514 int klass
, ndim
, i
, dtype
= 0;
3515 tree inner_type
, tem
;
3518 /* If TYPE is an unconstrained array, use the underlying array type. */
3519 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3520 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3522 /* If this is an array, compute the number of dimensions in the array,
3523 get the index types, and point to the inner type. */
3524 if (TREE_CODE (type
) != ARRAY_TYPE
)
3527 for (ndim
= 1, inner_type
= type
;
3528 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3529 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3530 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3533 idx_arr
= XALLOCAVEC (tree
, ndim
);
3535 if (mech
!= By_Descriptor_NCA
3536 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3537 for (i
= ndim
- 1, inner_type
= type
;
3539 i
--, inner_type
= TREE_TYPE (inner_type
))
3540 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3542 for (i
= 0, inner_type
= type
;
3544 i
++, inner_type
= TREE_TYPE (inner_type
))
3545 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3547 /* Now get the DTYPE value. */
3548 switch (TREE_CODE (type
))
3553 if (TYPE_VAX_FLOATING_POINT_P (type
))
3554 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3567 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3570 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3573 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3576 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3579 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3582 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3588 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3592 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3593 && TYPE_VAX_FLOATING_POINT_P (type
))
3594 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3606 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3617 /* Get the CLASS value. */
3620 case By_Descriptor_A
:
3623 case By_Descriptor_NCA
:
3626 case By_Descriptor_SB
:
3630 case By_Descriptor_S
:
3636 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3637 are the same for all types. */
3639 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3640 record_type
, size_int (1), field_list
);
3642 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3643 record_type
, size_int (dtype
), field_list
);
3645 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3646 record_type
, size_int (klass
), field_list
);
3648 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3649 record_type
, size_int (-1), field_list
);
3651 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3653 size_in_bytes (mech
== By_Descriptor_A
3654 ? inner_type
: type
),
3657 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3660 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
3661 build_unary_op (ADDR_EXPR
, pointer64_type
,
3662 build0 (PLACEHOLDER_EXPR
, type
)),
3668 case By_Descriptor_S
:
3671 case By_Descriptor_SB
:
3673 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3675 (TREE_CODE (type
) == ARRAY_TYPE
3676 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3680 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3682 (TREE_CODE (type
) == ARRAY_TYPE
3683 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3688 case By_Descriptor_A
:
3689 case By_Descriptor_NCA
:
3691 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3692 record_type
, size_zero_node
, field_list
);
3695 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3696 record_type
, size_zero_node
, field_list
);
3698 dtype
= (mech
== By_Descriptor_NCA
3700 /* Set FL_COLUMN, FL_COEFF, and
3702 : (TREE_CODE (type
) == ARRAY_TYPE
3703 && TYPE_CONVENTION_FORTRAN_P (type
)
3706 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3707 record_type
, size_int (dtype
),
3711 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3712 record_type
, size_int (ndim
), field_list
);
3715 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3716 record_type
, size_int (0), field_list
);
3718 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3719 record_type
, size_in_bytes (type
),
3722 /* Now build a pointer to the 0,0,0... element. */
3723 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3724 for (i
= 0, inner_type
= type
; i
< ndim
;
3725 i
++, inner_type
= TREE_TYPE (inner_type
))
3726 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3727 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3728 NULL_TREE
, NULL_TREE
);
3731 = make_descriptor_field ("A0", pointer64_type
, record_type
,
3732 build1 (ADDR_EXPR
, pointer64_type
, tem
),
3735 /* Next come the addressing coefficients. */
3736 tem
= size_one_node
;
3737 for (i
= 0; i
< ndim
; i
++)
3741 = size_binop (MULT_EXPR
, tem
,
3742 size_binop (PLUS_EXPR
,
3743 size_binop (MINUS_EXPR
,
3744 TYPE_MAX_VALUE (idx_arr
[i
]),
3745 TYPE_MIN_VALUE (idx_arr
[i
])),
3748 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
3749 fname
[1] = '0' + i
, fname
[2] = 0;
3751 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3752 record_type
, idx_length
, field_list
);
3754 if (mech
== By_Descriptor_NCA
)
3758 /* Finally here are the bounds. */
3759 for (i
= 0; i
< ndim
; i
++)
3763 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3765 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3767 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
3771 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3773 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
3778 post_error ("unsupported descriptor type for &", gnat_entity
);
3781 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
3782 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3786 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3787 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3790 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3792 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3795 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3796 gnu_expr
= gnat_protect_expr (gnu_expr
);
3797 gnat_mark_addressable (gnu_expr
);
3799 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3800 routine in case we have a 32-bit descriptor. */
3801 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3802 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3803 N_Raise_Constraint_Error
),
3806 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3809 = convert (TREE_TYPE (field
),
3810 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3812 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3815 return gnat_build_constructor (gnu_type
, v
);
3818 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3819 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3820 which the VMS descriptor is passed. */
3823 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3825 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3826 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3827 /* The CLASS field is the 3rd field in the descriptor. */
3828 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3829 /* The POINTER field is the 6th field in the descriptor. */
3830 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3832 /* Retrieve the value of the POINTER field. */
3834 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3836 if (POINTER_TYPE_P (gnu_type
))
3837 return convert (gnu_type
, gnu_expr64
);
3839 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3841 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3842 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3843 tree template_type
= TREE_TYPE (p_bounds_type
);
3844 tree min_field
= TYPE_FIELDS (template_type
);
3845 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3846 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3847 /* See the head comment of build_vms_descriptor. */
3848 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3849 tree lfield
, ufield
;
3850 vec
<constructor_elt
, va_gc
> *v
;
3852 /* Convert POINTER to the pointer-to-array type. */
3853 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3857 case 1: /* Class S */
3858 case 15: /* Class SB */
3859 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3861 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3862 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3863 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3864 convert (TREE_TYPE (min_field
),
3866 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3867 convert (TREE_TYPE (max_field
), t
));
3868 template_tree
= gnat_build_constructor (template_type
, v
);
3869 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3871 /* For class S, we are done. */
3875 /* Test that we really have a SB descriptor, like DEC Ada. */
3876 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3877 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3878 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3879 /* If so, there is already a template in the descriptor and
3880 it is located right after the POINTER field. The fields are
3881 64bits so they must be repacked. */
3882 t
= DECL_CHAIN (pointer
);
3883 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3884 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3887 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3889 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3891 /* Build the template in the form of a constructor. */
3893 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3894 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3896 template_tree
= gnat_build_constructor (template_type
, v
);
3898 /* Otherwise use the {1, LENGTH} template we build above. */
3899 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3900 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3905 case 4: /* Class A */
3906 /* The AFLAGS field is the 3rd field after the pointer in the
3908 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3909 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3910 /* The DIMCT field is the next field in the descriptor after
3913 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3914 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3915 or FL_COEFF or FL_BOUNDS not set. */
3916 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3917 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3918 build_binary_op (NE_EXPR
, boolean_type_node
,
3920 convert (TREE_TYPE (dimct
),
3922 build_binary_op (NE_EXPR
, boolean_type_node
,
3923 build2 (BIT_AND_EXPR
,
3927 /* There is already a template in the descriptor and it is located
3928 in block 3. The fields are 64bits so they must be repacked. */
3929 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3931 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3932 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3935 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3937 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3939 /* Build the template in the form of a constructor. */
3941 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3942 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3944 template_tree
= gnat_build_constructor (template_type
, v
);
3945 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3946 build_call_raise (CE_Length_Check_Failed
, Empty
,
3947 N_Raise_Constraint_Error
),
3950 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3953 case 10: /* Class NCA */
3955 post_error ("unsupported descriptor type for &", gnat_subprog
);
3956 template_addr
= integer_zero_node
;
3960 /* Build the fat pointer in the form of a constructor. */
3962 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3963 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3965 return gnat_build_constructor (gnu_type
, v
);
3972 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3973 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3974 which the VMS descriptor is passed. */
3977 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3979 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3980 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3981 /* The CLASS field is the 3rd field in the descriptor. */
3982 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3983 /* The POINTER field is the 4th field in the descriptor. */
3984 tree pointer
= DECL_CHAIN (klass
);
3986 /* Retrieve the value of the POINTER field. */
3988 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3990 if (POINTER_TYPE_P (gnu_type
))
3991 return convert (gnu_type
, gnu_expr32
);
3993 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3995 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3996 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3997 tree template_type
= TREE_TYPE (p_bounds_type
);
3998 tree min_field
= TYPE_FIELDS (template_type
);
3999 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
4000 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
4001 /* See the head comment of build_vms_descriptor. */
4002 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
4003 vec
<constructor_elt
, va_gc
> *v
;
4005 /* Convert POINTER to the pointer-to-array type. */
4006 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
4010 case 1: /* Class S */
4011 case 15: /* Class SB */
4012 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
4014 t
= TYPE_FIELDS (desc_type
);
4015 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4016 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
4017 convert (TREE_TYPE (min_field
),
4019 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
4020 convert (TREE_TYPE (max_field
), t
));
4021 template_tree
= gnat_build_constructor (template_type
, v
);
4022 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
4024 /* For class S, we are done. */
4028 /* Test that we really have a SB descriptor, like DEC Ada. */
4029 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
4030 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
4031 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
4032 /* If so, there is already a template in the descriptor and
4033 it is located right after the POINTER field. */
4034 t
= DECL_CHAIN (pointer
);
4036 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4037 /* Otherwise use the {1, LENGTH} template we build above. */
4038 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
4039 build_unary_op (ADDR_EXPR
, p_bounds_type
,
4044 case 4: /* Class A */
4045 /* The AFLAGS field is the 7th field in the descriptor. */
4046 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
4047 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4048 /* The DIMCT field is the 8th field in the descriptor. */
4050 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4051 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4052 or FL_COEFF or FL_BOUNDS not set. */
4053 u
= build_int_cst (TREE_TYPE (aflags
), 192);
4054 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
4055 build_binary_op (NE_EXPR
, boolean_type_node
,
4057 convert (TREE_TYPE (dimct
),
4059 build_binary_op (NE_EXPR
, boolean_type_node
,
4060 build2 (BIT_AND_EXPR
,
4064 /* There is already a template in the descriptor and it is
4065 located at the start of block 3 (12th field). */
4066 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
4068 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4069 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
4070 build_call_raise (CE_Length_Check_Failed
, Empty
,
4071 N_Raise_Constraint_Error
),
4074 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
4077 case 10: /* Class NCA */
4079 post_error ("unsupported descriptor type for &", gnat_subprog
);
4080 template_addr
= integer_zero_node
;
4084 /* Build the fat pointer in the form of a constructor. */
4086 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
4087 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
4090 return gnat_build_constructor (gnu_type
, v
);
4097 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4098 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4099 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4100 descriptor is passed. */
4103 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
4104 Entity_Id gnat_subprog
)
4106 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4107 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4108 tree mbo
= TYPE_FIELDS (desc_type
);
4109 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
4110 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
4111 tree is64bit
, gnu_expr32
, gnu_expr64
;
4113 /* If the field name is not MBO, it must be 32-bit and no alternate.
4114 Otherwise primary must be 64-bit and alternate 32-bit. */
4115 if (strcmp (mbostr
, "MBO") != 0)
4117 tree ret
= convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
4121 /* Build the test for 64-bit descriptor. */
4122 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
4123 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
4125 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
4126 build_binary_op (EQ_EXPR
, boolean_type_node
,
4127 convert (integer_type_node
, mbo
),
4129 build_binary_op (EQ_EXPR
, boolean_type_node
,
4130 convert (integer_type_node
, mbmo
),
4131 integer_minus_one_node
));
4133 /* Build the 2 possible end results. */
4134 gnu_expr64
= convert_vms_descriptor64 (gnu_type
, gnu_expr
, gnat_subprog
);
4135 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
4136 gnu_expr32
= convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
4137 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
4140 /* Build a type to be used to represent an aliased object whose nominal type
4141 is an unconstrained array. This consists of a RECORD_TYPE containing a
4142 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4143 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4144 an arbitrary unconstrained object. Use NAME as the name of the record.
4145 DEBUG_INFO_P is true if we need to write debug information for the type. */
4148 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
4151 tree type
= make_node (RECORD_TYPE
);
4153 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
4154 NULL_TREE
, NULL_TREE
, 0, 1);
4156 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
4157 NULL_TREE
, NULL_TREE
, 0, 1);
4159 TYPE_NAME (type
) = name
;
4160 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
4161 DECL_CHAIN (template_field
) = array_field
;
4162 finish_record_type (type
, template_field
, 0, true);
4164 /* Declare it now since it will never be declared otherwise. This is
4165 necessary to ensure that its subtrees are properly marked. */
4166 create_type_decl (name
, type
, true, debug_info_p
, Empty
);
4171 /* Same, taking a thin or fat pointer type instead of a template type. */
4174 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
4175 tree name
, bool debug_info_p
)
4179 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
4182 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
4183 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
4184 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
4187 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
4190 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4191 In the normal case this is just two adjustments, but we have more to
4192 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4195 update_pointer_to (tree old_type
, tree new_type
)
4197 tree ptr
= TYPE_POINTER_TO (old_type
);
4198 tree ref
= TYPE_REFERENCE_TO (old_type
);
4201 /* If this is the main variant, process all the other variants first. */
4202 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
4203 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
4204 update_pointer_to (t
, new_type
);
4206 /* If no pointers and no references, we are done. */
4210 /* Merge the old type qualifiers in the new type.
4212 Each old variant has qualifiers for specific reasons, and the new
4213 designated type as well. Each set of qualifiers represents useful
4214 information grabbed at some point, and merging the two simply unifies
4215 these inputs into the final type description.
4217 Consider for instance a volatile type frozen after an access to constant
4218 type designating it; after the designated type's freeze, we get here with
4219 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4220 when the access type was processed. We will make a volatile and readonly
4221 designated type, because that's what it really is.
4223 We might also get here for a non-dummy OLD_TYPE variant with different
4224 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4225 to private record type elaboration (see the comments around the call to
4226 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4227 the qualifiers in those cases too, to avoid accidentally discarding the
4228 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4230 = build_qualified_type (new_type
,
4231 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
4233 /* If old type and new type are identical, there is nothing to do. */
4234 if (old_type
== new_type
)
4237 /* Otherwise, first handle the simple case. */
4238 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
4240 tree new_ptr
, new_ref
;
4242 /* If pointer or reference already points to new type, nothing to do.
4243 This can happen as update_pointer_to can be invoked multiple times
4244 on the same couple of types because of the type variants. */
4245 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
4246 || (ref
&& TREE_TYPE (ref
) == new_type
))
4249 /* Chain PTR and its variants at the end. */
4250 new_ptr
= TYPE_POINTER_TO (new_type
);
4253 while (TYPE_NEXT_PTR_TO (new_ptr
))
4254 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
4255 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
4258 TYPE_POINTER_TO (new_type
) = ptr
;
4260 /* Now adjust them. */
4261 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
4262 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
4264 TREE_TYPE (t
) = new_type
;
4265 if (TYPE_NULL_BOUNDS (t
))
4266 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
4269 /* Chain REF and its variants at the end. */
4270 new_ref
= TYPE_REFERENCE_TO (new_type
);
4273 while (TYPE_NEXT_REF_TO (new_ref
))
4274 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
4275 TYPE_NEXT_REF_TO (new_ref
) = ref
;
4278 TYPE_REFERENCE_TO (new_type
) = ref
;
4280 /* Now adjust them. */
4281 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
4282 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
4283 TREE_TYPE (t
) = new_type
;
4285 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4286 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
4289 /* Now deal with the unconstrained array case. In this case the pointer
4290 is actually a record where both fields are pointers to dummy nodes.
4291 Turn them into pointers to the correct types using update_pointer_to.
4292 Likewise for the pointer to the object record (thin pointer). */
4295 tree new_ptr
= TYPE_POINTER_TO (new_type
);
4297 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
4299 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4300 since update_pointer_to can be invoked multiple times on the same
4301 couple of types because of the type variants. */
4302 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
4306 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
4307 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
4310 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
4311 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
4313 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
4314 TYPE_OBJECT_RECORD_TYPE (new_type
));
4316 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4320 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4321 unconstrained one. This involves making or finding a template. */
4324 convert_to_fat_pointer (tree type
, tree expr
)
4326 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
4327 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
4328 tree etype
= TREE_TYPE (expr
);
4330 vec
<constructor_elt
, va_gc
> *v
;
4333 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4334 array (compare_fat_pointers ensures that this is the full discriminant)
4335 and a valid pointer to the bounds. This latter property is necessary
4336 since the compiler can hoist the load of the bounds done through it. */
4337 if (integer_zerop (expr
))
4339 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4340 tree null_bounds
, t
;
4342 if (TYPE_NULL_BOUNDS (ptr_template_type
))
4343 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
4346 /* The template type can still be dummy at this point so we build an
4347 empty constructor. The middle-end will fill it in with zeros. */
4348 t
= build_constructor (template_type
,
4350 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
4351 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
4352 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
4355 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4356 fold_convert (p_array_type
, null_pointer_node
));
4357 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
4358 t
= build_constructor (type
, v
);
4359 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4360 TREE_CONSTANT (t
) = 0;
4361 TREE_STATIC (t
) = 1;
4366 /* If EXPR is a thin pointer, make template and data from the record. */
4367 if (TYPE_IS_THIN_POINTER_P (etype
))
4369 tree field
= TYPE_FIELDS (TREE_TYPE (etype
));
4371 expr
= gnat_protect_expr (expr
);
4372 if (TREE_CODE (expr
) == ADDR_EXPR
)
4373 expr
= TREE_OPERAND (expr
, 0);
4376 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4377 the thin pointer value has been shifted so we first need to shift
4378 it back to get the template address. */
4379 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)))
4381 = build_binary_op (POINTER_PLUS_EXPR
, etype
, expr
,
4382 fold_build1 (NEGATE_EXPR
, sizetype
,
4384 (DECL_CHAIN (field
))));
4385 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
4388 template_tree
= build_component_ref (expr
, NULL_TREE
, field
, false);
4389 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
4390 build_component_ref (expr
, NULL_TREE
,
4391 DECL_CHAIN (field
), false));
4394 /* Otherwise, build the constructor for the template. */
4396 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
4398 /* The final result is a constructor for the fat pointer.
4400 If EXPR is an argument of a foreign convention subprogram, the type it
4401 points to is directly the component type. In this case, the expression
4402 type may not match the corresponding FIELD_DECL type at this point, so we
4403 call "convert" here to fix that up if necessary. This type consistency is
4404 required, for instance because it ensures that possible later folding of
4405 COMPONENT_REFs against this constructor always yields something of the
4406 same type as the initial reference.
4408 Note that the call to "build_template" above is still fine because it
4409 will only refer to the provided TEMPLATE_TYPE in this case. */
4410 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4411 convert (p_array_type
, expr
));
4412 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4413 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4415 return gnat_build_constructor (type
, v
);
4418 /* Create an expression whose value is that of EXPR,
4419 converted to type TYPE. The TREE_TYPE of the value
4420 is always TYPE. This function implements all reasonable
4421 conversions; callers should filter out those that are
4422 not permitted by the language being compiled. */
4425 convert (tree type
, tree expr
)
4427 tree etype
= TREE_TYPE (expr
);
4428 enum tree_code ecode
= TREE_CODE (etype
);
4429 enum tree_code code
= TREE_CODE (type
);
4431 /* If the expression is already of the right type, we are done. */
4435 /* If both input and output have padding and are of variable size, do this
4436 as an unchecked conversion. Likewise if one is a mere variant of the
4437 other, so we avoid a pointless unpad/repad sequence. */
4438 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
4439 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
4440 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4441 || !TREE_CONSTANT (TYPE_SIZE (etype
))
4442 || TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
4443 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
4444 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
4447 /* If the output type has padding, convert to the inner type and make a
4448 constructor to build the record, unless a variable size is involved. */
4449 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
4451 vec
<constructor_elt
, va_gc
> *v
;
4453 /* If we previously converted from another type and our type is
4454 of variable size, remove the conversion to avoid the need for
4455 variable-sized temporaries. Likewise for a conversion between
4456 original and packable version. */
4457 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4458 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4459 || (ecode
== RECORD_TYPE
4460 && TYPE_NAME (etype
)
4461 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
4462 expr
= TREE_OPERAND (expr
, 0);
4464 /* If we are just removing the padding from expr, convert the original
4465 object if we have variable size in order to avoid the need for some
4466 variable-sized temporaries. Likewise if the padding is a variant
4467 of the other, so we avoid a pointless unpad/repad sequence. */
4468 if (TREE_CODE (expr
) == COMPONENT_REF
4469 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4470 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4471 || TYPE_MAIN_VARIANT (type
)
4472 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4473 || (ecode
== RECORD_TYPE
4474 && TYPE_NAME (etype
)
4475 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
4476 return convert (type
, TREE_OPERAND (expr
, 0));
4478 /* If the inner type is of self-referential size and the expression type
4479 is a record, do this as an unchecked conversion. But first pad the
4480 expression if possible to have the same size on both sides. */
4481 if (ecode
== RECORD_TYPE
4482 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
4484 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
4485 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4486 false, false, false, true),
4488 return unchecked_convert (type
, expr
, false);
4491 /* If we are converting between array types with variable size, do the
4492 final conversion as an unchecked conversion, again to avoid the need
4493 for some variable-sized temporaries. If valid, this conversion is
4494 very likely purely technical and without real effects. */
4495 if (ecode
== ARRAY_TYPE
4496 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
4497 && !TREE_CONSTANT (TYPE_SIZE (etype
))
4498 && !TREE_CONSTANT (TYPE_SIZE (type
)))
4499 return unchecked_convert (type
,
4500 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4505 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4506 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
4507 return gnat_build_constructor (type
, v
);
4510 /* If the input type has padding, remove it and convert to the output type.
4511 The conditions ordering is arranged to ensure that the output type is not
4512 a padding type here, as it is not clear whether the conversion would
4513 always be correct if this was to happen. */
4514 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
4518 /* If we have just converted to this padded type, just get the
4519 inner expression. */
4520 if (TREE_CODE (expr
) == CONSTRUCTOR
4521 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr
))
4522 && (*CONSTRUCTOR_ELTS (expr
))[0].index
== TYPE_FIELDS (etype
))
4523 unpadded
= (*CONSTRUCTOR_ELTS (expr
))[0].value
;
4525 /* Otherwise, build an explicit component reference. */
4528 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4530 return convert (type
, unpadded
);
4533 /* If the input is a biased type, adjust first. */
4534 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4535 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
4536 fold_convert (TREE_TYPE (etype
),
4538 TYPE_MIN_VALUE (etype
)));
4540 /* If the input is a justified modular type, we need to extract the actual
4541 object before converting it to any other type with the exceptions of an
4542 unconstrained array or of a mere type variant. It is useful to avoid the
4543 extraction and conversion in the type variant case because it could end
4544 up replacing a VAR_DECL expr by a constructor and we might be about the
4545 take the address of the result. */
4546 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
4547 && code
!= UNCONSTRAINED_ARRAY_TYPE
4548 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
4549 return convert (type
, build_component_ref (expr
, NULL_TREE
,
4550 TYPE_FIELDS (etype
), false));
4552 /* If converting to a type that contains a template, convert to the data
4553 type and then build the template. */
4554 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
4556 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4557 vec
<constructor_elt
, va_gc
> *v
;
4560 /* If the source already has a template, get a reference to the
4561 associated array only, as we are going to rebuild a template
4562 for the target type anyway. */
4563 expr
= maybe_unconstrained_array (expr
);
4565 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4566 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
4567 obj_type
, NULL_TREE
));
4568 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4569 convert (obj_type
, expr
));
4570 return gnat_build_constructor (type
, v
);
4573 /* There are some cases of expressions that we process specially. */
4574 switch (TREE_CODE (expr
))
4580 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4581 conversion in gnat_expand_expr. NULL_EXPR does not represent
4582 and actual value, so no conversion is needed. */
4583 expr
= copy_node (expr
);
4584 TREE_TYPE (expr
) = type
;
4588 /* If we are converting a STRING_CST to another constrained array type,
4589 just make a new one in the proper type. */
4590 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
4591 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
4592 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
4594 expr
= copy_node (expr
);
4595 TREE_TYPE (expr
) = type
;
4601 /* If we are converting a VECTOR_CST to a mere variant type, just make
4602 a new one in the proper type. */
4603 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4605 expr
= copy_node (expr
);
4606 TREE_TYPE (expr
) = type
;
4611 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4612 a new one in the proper type. */
4613 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4615 expr
= copy_node (expr
);
4616 TREE_TYPE (expr
) = type
;
4617 CONSTRUCTOR_ELTS (expr
) = vec_safe_copy (CONSTRUCTOR_ELTS (expr
));
4621 /* Likewise for a conversion between original and packable version, or
4622 conversion between types of the same size and with the same list of
4623 fields, but we have to work harder to preserve type consistency. */
4625 && code
== RECORD_TYPE
4626 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
4627 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
4630 vec
<constructor_elt
, va_gc
> *e
= CONSTRUCTOR_ELTS (expr
);
4631 unsigned HOST_WIDE_INT len
= vec_safe_length (e
);
4632 vec
<constructor_elt
, va_gc
> *v
;
4634 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
4635 unsigned HOST_WIDE_INT idx
;
4638 /* Whether we need to clear TREE_CONSTANT et al. on the output
4639 constructor when we convert in place. */
4640 bool clear_constant
= false;
4642 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
4644 /* We expect only simple constructors. */
4645 if (!SAME_FIELD_P (index
, efield
))
4647 /* The field must be the same. */
4648 if (!SAME_FIELD_P (efield
, field
))
4650 constructor_elt elt
= {field
, convert (TREE_TYPE (field
), value
)};
4651 v
->quick_push (elt
);
4653 /* If packing has made this field a bitfield and the input
4654 value couldn't be emitted statically any more, we need to
4655 clear TREE_CONSTANT on our output. */
4657 && TREE_CONSTANT (expr
)
4658 && !CONSTRUCTOR_BITFIELD_P (efield
)
4659 && CONSTRUCTOR_BITFIELD_P (field
)
4660 && !initializer_constant_valid_for_bitfield_p (value
))
4661 clear_constant
= true;
4663 efield
= DECL_CHAIN (efield
);
4664 field
= DECL_CHAIN (field
);
4667 /* If we have been able to match and convert all the input fields
4668 to their output type, convert in place now. We'll fallback to a
4669 view conversion downstream otherwise. */
4672 expr
= copy_node (expr
);
4673 TREE_TYPE (expr
) = type
;
4674 CONSTRUCTOR_ELTS (expr
) = v
;
4676 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
4681 /* Likewise for a conversion between array type and vector type with a
4682 compatible representative array. */
4683 else if (code
== VECTOR_TYPE
4684 && ecode
== ARRAY_TYPE
4685 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4688 vec
<constructor_elt
, va_gc
> *e
= CONSTRUCTOR_ELTS (expr
);
4689 unsigned HOST_WIDE_INT len
= vec_safe_length (e
);
4690 vec
<constructor_elt
, va_gc
> *v
;
4691 unsigned HOST_WIDE_INT ix
;
4694 /* Build a VECTOR_CST from a *constant* array constructor. */
4695 if (TREE_CONSTANT (expr
))
4697 bool constant_p
= true;
4699 /* Iterate through elements and check if all constructor
4700 elements are *_CSTs. */
4701 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4702 if (!CONSTANT_CLASS_P (value
))
4709 return build_vector_from_ctor (type
,
4710 CONSTRUCTOR_ELTS (expr
));
4713 /* Otherwise, build a regular vector constructor. */
4715 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4717 constructor_elt elt
= {NULL_TREE
, value
};
4718 v
->quick_push (elt
);
4720 expr
= copy_node (expr
);
4721 TREE_TYPE (expr
) = type
;
4722 CONSTRUCTOR_ELTS (expr
) = v
;
4727 case UNCONSTRAINED_ARRAY_REF
:
4728 /* First retrieve the underlying array. */
4729 expr
= maybe_unconstrained_array (expr
);
4730 etype
= TREE_TYPE (expr
);
4731 ecode
= TREE_CODE (etype
);
4734 case VIEW_CONVERT_EXPR
:
4736 /* GCC 4.x is very sensitive to type consistency overall, and view
4737 conversions thus are very frequent. Even though just "convert"ing
4738 the inner operand to the output type is fine in most cases, it
4739 might expose unexpected input/output type mismatches in special
4740 circumstances so we avoid such recursive calls when we can. */
4741 tree op0
= TREE_OPERAND (expr
, 0);
4743 /* If we are converting back to the original type, we can just
4744 lift the input conversion. This is a common occurrence with
4745 switches back-and-forth amongst type variants. */
4746 if (type
== TREE_TYPE (op0
))
4749 /* Otherwise, if we're converting between two aggregate or vector
4750 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4751 target type in place or to just convert the inner expression. */
4752 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4753 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4755 /* If we are converting between mere variants, we can just
4756 substitute the VIEW_CONVERT_EXPR in place. */
4757 if (gnat_types_compatible_p (type
, etype
))
4758 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4760 /* Otherwise, we may just bypass the input view conversion unless
4761 one of the types is a fat pointer, which is handled by
4762 specialized code below which relies on exact type matching. */
4763 else if (!TYPE_IS_FAT_POINTER_P (type
)
4764 && !TYPE_IS_FAT_POINTER_P (etype
))
4765 return convert (type
, op0
);
4775 /* Check for converting to a pointer to an unconstrained array. */
4776 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4777 return convert_to_fat_pointer (type
, expr
);
4779 /* If we are converting between two aggregate or vector types that are mere
4780 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4781 to a vector type from its representative array type. */
4782 else if ((code
== ecode
4783 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4784 && gnat_types_compatible_p (type
, etype
))
4785 || (code
== VECTOR_TYPE
4786 && ecode
== ARRAY_TYPE
4787 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4789 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4791 /* If we are converting between tagged types, try to upcast properly. */
4792 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4793 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4795 tree child_etype
= etype
;
4797 tree field
= TYPE_FIELDS (child_etype
);
4798 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4799 return build_component_ref (expr
, NULL_TREE
, field
, false);
4800 child_etype
= TREE_TYPE (field
);
4801 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4804 /* If we are converting from a smaller form of record type back to it, just
4805 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4806 size on both sides. */
4807 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4808 && smaller_form_type_p (etype
, type
))
4810 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4811 false, false, false, true),
4813 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4816 /* In all other cases of related types, make a NOP_EXPR. */
4817 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4818 return fold_convert (type
, expr
);
4823 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4826 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4827 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4828 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4829 return unchecked_convert (type
, expr
, false);
4830 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4831 return fold_convert (type
,
4832 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4833 convert (TREE_TYPE (type
), expr
),
4834 TYPE_MIN_VALUE (type
)));
4836 /* ... fall through ... */
4840 /* If we are converting an additive expression to an integer type
4841 with lower precision, be wary of the optimization that can be
4842 applied by convert_to_integer. There are 2 problematic cases:
4843 - if the first operand was originally of a biased type,
4844 because we could be recursively called to convert it
4845 to an intermediate type and thus rematerialize the
4846 additive operator endlessly,
4847 - if the expression contains a placeholder, because an
4848 intermediate conversion that changes the sign could
4849 be inserted and thus introduce an artificial overflow
4850 at compile time when the placeholder is substituted. */
4851 if (code
== INTEGER_TYPE
4852 && ecode
== INTEGER_TYPE
4853 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4854 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4856 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4858 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4859 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4860 || CONTAINS_PLACEHOLDER_P (expr
))
4861 return build1 (NOP_EXPR
, type
, expr
);
4864 return fold (convert_to_integer (type
, expr
));
4867 case REFERENCE_TYPE
:
4868 /* If converting between two thin pointers, adjust if needed to account
4869 for differing offsets from the base pointer, depending on whether
4870 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4871 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4874 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)) != NULL_TREE
4875 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype
))))
4878 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != NULL_TREE
4879 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type
))))
4881 tree byte_diff
= size_diffop (type_pos
, etype_pos
);
4883 expr
= build1 (NOP_EXPR
, type
, expr
);
4884 if (integer_zerop (byte_diff
))
4887 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4888 fold_convert (sizetype
, byte_diff
));
4891 /* If converting fat pointer to normal or thin pointer, get the pointer
4892 to the array and then convert it. */
4893 if (TYPE_IS_FAT_POINTER_P (etype
))
4895 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4897 return fold (convert_to_pointer (type
, expr
));
4900 return fold (convert_to_real (type
, expr
));
4903 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4905 vec
<constructor_elt
, va_gc
> *v
;
4908 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4909 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4911 return gnat_build_constructor (type
, v
);
4914 /* ... fall through ... */
4917 /* In these cases, assume the front-end has validated the conversion.
4918 If the conversion is valid, it will be a bit-wise conversion, so
4919 it can be viewed as an unchecked conversion. */
4920 return unchecked_convert (type
, expr
, false);
4923 /* This is a either a conversion between a tagged type and some
4924 subtype, which we have to mark as a UNION_TYPE because of
4925 overlapping fields or a conversion of an Unchecked_Union. */
4926 return unchecked_convert (type
, expr
, false);
4928 case UNCONSTRAINED_ARRAY_TYPE
:
4929 /* If the input is a VECTOR_TYPE, convert to the representative
4930 array type first. */
4931 if (ecode
== VECTOR_TYPE
)
4933 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4934 etype
= TREE_TYPE (expr
);
4935 ecode
= TREE_CODE (etype
);
4938 /* If EXPR is a constrained array, take its address, convert it to a
4939 fat pointer, and then dereference it. Likewise if EXPR is a
4940 record containing both a template and a constrained array.
4941 Note that a record representing a justified modular type
4942 always represents a packed constrained array. */
4943 if (ecode
== ARRAY_TYPE
4944 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4945 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4946 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4949 (INDIRECT_REF
, NULL_TREE
,
4950 convert_to_fat_pointer (TREE_TYPE (type
),
4951 build_unary_op (ADDR_EXPR
,
4954 /* Do something very similar for converting one unconstrained
4955 array to another. */
4956 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4958 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4959 convert (TREE_TYPE (type
),
4960 build_unary_op (ADDR_EXPR
,
4966 return fold (convert_to_complex (type
, expr
));
4973 /* Create an expression whose value is that of EXPR converted to the common
4974 index type, which is sizetype. EXPR is supposed to be in the base type
4975 of the GNAT index type. Calling it is equivalent to doing
4977 convert (sizetype, expr)
4979 but we try to distribute the type conversion with the knowledge that EXPR
4980 cannot overflow in its type. This is a best-effort approach and we fall
4981 back to the above expression as soon as difficulties are encountered.
4983 This is necessary to overcome issues that arise when the GNAT base index
4984 type and the GCC common index type (sizetype) don't have the same size,
4985 which is quite frequent on 64-bit architectures. In this case, and if
4986 the GNAT base index type is signed but the iteration type of the loop has
4987 been forced to unsigned, the loop scalar evolution engine cannot compute
4988 a simple evolution for the general induction variables associated with the
4989 array indices, because it will preserve the wrap-around semantics in the
4990 unsigned type of their "inner" part. As a result, many loop optimizations
4993 The solution is to use a special (basic) induction variable that is at
4994 least as large as sizetype, and to express the aforementioned general
4995 induction variables in terms of this induction variable, eliminating
4996 the problematic intermediate truncation to the GNAT base index type.
4997 This is possible as long as the original expression doesn't overflow
4998 and if the middle-end hasn't introduced artificial overflows in the
4999 course of the various simplification it can make to the expression. */
5002 convert_to_index_type (tree expr
)
5004 enum tree_code code
= TREE_CODE (expr
);
5005 tree type
= TREE_TYPE (expr
);
5007 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5008 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5009 if (TYPE_UNSIGNED (type
) || !optimize
)
5010 return convert (sizetype
, expr
);
5015 /* The main effect of the function: replace a loop parameter with its
5016 associated special induction variable. */
5017 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
5018 expr
= DECL_INDUCTION_VAR (expr
);
5023 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5024 /* Bail out as soon as we suspect some sort of type frobbing. */
5025 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
5026 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
5030 /* ... fall through ... */
5032 case NON_LVALUE_EXPR
:
5033 return fold_build1 (code
, sizetype
,
5034 convert_to_index_type (TREE_OPERAND (expr
, 0)));
5039 return fold_build2 (code
, sizetype
,
5040 convert_to_index_type (TREE_OPERAND (expr
, 0)),
5041 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5044 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5045 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5048 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5049 convert_to_index_type (TREE_OPERAND (expr
, 1)),
5050 convert_to_index_type (TREE_OPERAND (expr
, 2)));
5056 return convert (sizetype
, expr
);
5059 /* Remove all conversions that are done in EXP. This includes converting
5060 from a padded type or to a justified modular type. If TRUE_ADDRESS
5061 is true, always return the address of the containing object even if
5062 the address is not bit-aligned. */
5065 remove_conversions (tree exp
, bool true_address
)
5067 switch (TREE_CODE (exp
))
5071 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
5072 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
5074 remove_conversions ((*CONSTRUCTOR_ELTS (exp
))[0].value
, true);
5078 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5079 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5083 case VIEW_CONVERT_EXPR
:
5084 case NON_LVALUE_EXPR
:
5085 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5094 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5095 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5096 likewise return an expression pointing to the underlying array. */
5099 maybe_unconstrained_array (tree exp
)
5101 enum tree_code code
= TREE_CODE (exp
);
5102 tree type
= TREE_TYPE (exp
);
5104 switch (TREE_CODE (type
))
5106 case UNCONSTRAINED_ARRAY_TYPE
:
5107 if (code
== UNCONSTRAINED_ARRAY_REF
)
5109 const bool read_only
= TREE_READONLY (exp
);
5110 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
5112 exp
= TREE_OPERAND (exp
, 0);
5113 type
= TREE_TYPE (exp
);
5115 if (TREE_CODE (exp
) == COND_EXPR
)
5118 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5119 build_component_ref (TREE_OPERAND (exp
, 1),
5124 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5125 build_component_ref (TREE_OPERAND (exp
, 2),
5130 exp
= build3 (COND_EXPR
,
5131 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
5132 TREE_OPERAND (exp
, 0), op1
, op2
);
5136 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5137 build_component_ref (exp
, NULL_TREE
,
5140 TREE_READONLY (exp
) = read_only
;
5141 TREE_THIS_NOTRAP (exp
) = no_trap
;
5145 else if (code
== NULL_EXPR
)
5146 exp
= build1 (NULL_EXPR
,
5147 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
5148 TREE_OPERAND (exp
, 0));
5152 /* If this is a padded type and it contains a template, convert to the
5153 unpadded type first. */
5154 if (TYPE_PADDING_P (type
)
5155 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
5156 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
5158 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5159 type
= TREE_TYPE (exp
);
5162 if (TYPE_CONTAINS_TEMPLATE_P (type
))
5164 exp
= build_component_ref (exp
, NULL_TREE
,
5165 DECL_CHAIN (TYPE_FIELDS (type
)),
5167 type
= TREE_TYPE (exp
);
5169 /* If the array type is padded, convert to the unpadded type. */
5170 if (TYPE_IS_PADDING_P (type
))
5171 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5182 /* Return true if EXPR is an expression that can be folded as an operand
5183 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5186 can_fold_for_view_convert_p (tree expr
)
5190 /* The folder will fold NOP_EXPRs between integral types with the same
5191 precision (in the middle-end's sense). We cannot allow it if the
5192 types don't have the same precision in the Ada sense as well. */
5193 if (TREE_CODE (expr
) != NOP_EXPR
)
5196 t1
= TREE_TYPE (expr
);
5197 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5199 /* Defer to the folder for non-integral conversions. */
5200 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
5203 /* Only fold conversions that preserve both precisions. */
5204 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
5205 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
5211 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5212 If NOTRUNC_P is true, truncation operations should be suppressed.
5214 Special care is required with (source or target) integral types whose
5215 precision is not equal to their size, to make sure we fetch or assign
5216 the value bits whose location might depend on the endianness, e.g.
5218 Rmsize : constant := 8;
5219 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5221 type Bit_Array is array (1 .. Rmsize) of Boolean;
5222 pragma Pack (Bit_Array);
5224 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5226 Value : Int := 2#1000_0001#;
5227 Vbits : Bit_Array := To_Bit_Array (Value);
5229 we expect the 8 bits at Vbits'Address to always contain Value, while
5230 their original location depends on the endianness, at Value'Address
5231 on a little-endian architecture but not on a big-endian one. */
5234 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
5236 tree etype
= TREE_TYPE (expr
);
5237 enum tree_code ecode
= TREE_CODE (etype
);
5238 enum tree_code code
= TREE_CODE (type
);
5241 /* If the expression is already of the right type, we are done. */
5245 /* If both types types are integral just do a normal conversion.
5246 Likewise for a conversion to an unconstrained array. */
5247 if ((((INTEGRAL_TYPE_P (type
)
5248 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
5249 || (POINTER_TYPE_P (type
) && !TYPE_IS_THIN_POINTER_P (type
))
5250 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
5251 && ((INTEGRAL_TYPE_P (etype
)
5252 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
5253 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
5254 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
5255 || code
== UNCONSTRAINED_ARRAY_TYPE
)
5257 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
5259 tree ntype
= copy_type (etype
);
5260 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
5261 TYPE_MAIN_VARIANT (ntype
) = ntype
;
5262 expr
= build1 (NOP_EXPR
, ntype
, expr
);
5265 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5267 tree rtype
= copy_type (type
);
5268 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
5269 TYPE_MAIN_VARIANT (rtype
) = rtype
;
5270 expr
= convert (rtype
, expr
);
5271 expr
= build1 (NOP_EXPR
, type
, expr
);
5274 expr
= convert (type
, expr
);
5277 /* If we are converting to an integral type whose precision is not equal
5278 to its size, first unchecked convert to a record type that contains an
5279 field of the given precision. Then extract the field. */
5280 else if (INTEGRAL_TYPE_P (type
)
5281 && TYPE_RM_SIZE (type
)
5282 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5283 GET_MODE_BITSIZE (TYPE_MODE (type
))))
5285 tree rec_type
= make_node (RECORD_TYPE
);
5286 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
5287 tree field_type
, field
;
5289 if (TYPE_UNSIGNED (type
))
5290 field_type
= make_unsigned_type (prec
);
5292 field_type
= make_signed_type (prec
);
5293 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
5295 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5296 NULL_TREE
, NULL_TREE
, 1, 0);
5298 TYPE_FIELDS (rec_type
) = field
;
5299 layout_type (rec_type
);
5301 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5302 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
5303 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
5306 /* Similarly if we are converting from an integral type whose precision is
5307 not equal to its size, first copy into a field of the given precision
5308 and unchecked convert the record type. */
5309 else if (INTEGRAL_TYPE_P (etype
)
5310 && TYPE_RM_SIZE (etype
)
5311 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
5312 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5314 tree rec_type
= make_node (RECORD_TYPE
);
5315 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
5316 vec
<constructor_elt
, va_gc
> *v
;
5318 tree field_type
, field
;
5320 if (TYPE_UNSIGNED (etype
))
5321 field_type
= make_unsigned_type (prec
);
5323 field_type
= make_signed_type (prec
);
5324 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
5326 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5327 NULL_TREE
, NULL_TREE
, 1, 0);
5329 TYPE_FIELDS (rec_type
) = field
;
5330 layout_type (rec_type
);
5332 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
5333 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
5334 expr
= gnat_build_constructor (rec_type
, v
);
5335 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5338 /* If we are converting from a scalar type to a type with a different size,
5339 we need to pad to have the same size on both sides.
5341 ??? We cannot do it unconditionally because unchecked conversions are
5342 used liberally by the front-end to implement polymorphism, e.g. in:
5344 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5345 return p___size__4 (p__object!(S191s.all));
5347 so we skip all expressions that are references. */
5348 else if (!REFERENCE_CLASS_P (expr
)
5349 && !AGGREGATE_TYPE_P (etype
)
5350 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5351 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
5355 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
5356 false, false, false, true),
5358 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5362 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
5363 false, false, false, true);
5364 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5365 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
5370 /* We have a special case when we are converting between two unconstrained
5371 array types. In that case, take the address, convert the fat pointer
5372 types, and dereference. */
5373 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
5374 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5375 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
5376 build_unary_op (ADDR_EXPR
, NULL_TREE
,
5379 /* Another special case is when we are converting to a vector type from its
5380 representative array type; this a regular conversion. */
5381 else if (code
== VECTOR_TYPE
5382 && ecode
== ARRAY_TYPE
5383 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
5385 expr
= convert (type
, expr
);
5389 expr
= maybe_unconstrained_array (expr
);
5390 etype
= TREE_TYPE (expr
);
5391 ecode
= TREE_CODE (etype
);
5392 if (can_fold_for_view_convert_p (expr
))
5393 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5395 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5398 /* If the result is an integral type whose precision is not equal to its
5399 size, sign- or zero-extend the result. We need not do this if the input
5400 is an integral type of the same precision and signedness or if the output
5401 is a biased type or if both the input and output are unsigned. */
5403 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
5404 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5405 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5406 GET_MODE_BITSIZE (TYPE_MODE (type
)))
5407 && !(INTEGRAL_TYPE_P (etype
)
5408 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
5409 && operand_equal_p (TYPE_RM_SIZE (type
),
5410 (TYPE_RM_SIZE (etype
) != 0
5411 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
5413 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
5416 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
5418 = convert (base_type
,
5419 size_binop (MINUS_EXPR
,
5421 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
5422 TYPE_RM_SIZE (type
)));
5425 build_binary_op (RSHIFT_EXPR
, base_type
,
5426 build_binary_op (LSHIFT_EXPR
, base_type
,
5427 convert (base_type
, expr
),
5432 /* An unchecked conversion should never raise Constraint_Error. The code
5433 below assumes that GCC's conversion routines overflow the same way that
5434 the underlying hardware does. This is probably true. In the rare case
5435 when it is false, we can rely on the fact that such conversions are
5436 erroneous anyway. */
5437 if (TREE_CODE (expr
) == INTEGER_CST
)
5438 TREE_OVERFLOW (expr
) = 0;
5440 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5441 show no longer constant. */
5442 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
5443 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
5445 TREE_CONSTANT (expr
) = 0;
5450 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5451 the latter being a record type as predicated by Is_Record_Type. */
5454 tree_code_for_record_type (Entity_Id gnat_type
)
5456 Node_Id component_list
, component
;
5458 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5459 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5460 if (!Is_Unchecked_Union (gnat_type
))
5463 gnat_type
= Implementation_Base_Type (gnat_type
);
5465 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
5467 for (component
= First_Non_Pragma (Component_Items (component_list
));
5468 Present (component
);
5469 component
= Next_Non_Pragma (component
))
5470 if (Ekind (Defining_Entity (component
)) == E_Component
)
5476 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5477 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5478 according to the presence of an alignment clause on the type or, if it
5479 is an array, on the component type. */
5482 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
5484 gnat_type
= Underlying_Type (gnat_type
);
5486 *align_clause
= Present (Alignment_Clause (gnat_type
));
5488 if (Is_Array_Type (gnat_type
))
5490 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5491 if (Present (Alignment_Clause (gnat_type
)))
5492 *align_clause
= true;
5495 if (!Is_Floating_Point_Type (gnat_type
))
5498 if (UI_To_Int (Esize (gnat_type
)) != 64)
5504 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5505 size is greater or equal to 64 bits, or an array of such a type. Set
5506 ALIGN_CLAUSE according to the presence of an alignment clause on the
5507 type or, if it is an array, on the component type. */
5510 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
5512 gnat_type
= Underlying_Type (gnat_type
);
5514 *align_clause
= Present (Alignment_Clause (gnat_type
));
5516 if (Is_Array_Type (gnat_type
))
5518 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5519 if (Present (Alignment_Clause (gnat_type
)))
5520 *align_clause
= true;
5523 if (!Is_Scalar_Type (gnat_type
))
5526 if (UI_To_Int (Esize (gnat_type
)) < 64)
5532 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5533 component of an aggregate type. */
5536 type_for_nonaliased_component_p (tree gnu_type
)
5538 /* If the type is passed by reference, we may have pointers to the
5539 component so it cannot be made non-aliased. */
5540 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
5543 /* We used to say that any component of aggregate type is aliased
5544 because the front-end may take 'Reference of it. The front-end
5545 has been enhanced in the meantime so as to use a renaming instead
5546 in most cases, but the back-end can probably take the address of
5547 such a component too so we go for the conservative stance.
5549 For instance, we might need the address of any array type, even
5550 if normally passed by copy, to construct a fat pointer if the
5551 component is used as an actual for an unconstrained formal.
5553 Likewise for record types: even if a specific record subtype is
5554 passed by copy, the parent type might be passed by ref (e.g. if
5555 it's of variable size) and we might take the address of a child
5556 component to pass to a parent formal. We have no way to check
5557 for such conditions here. */
5558 if (AGGREGATE_TYPE_P (gnu_type
))
5564 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5567 smaller_form_type_p (tree type
, tree orig_type
)
5571 /* We're not interested in variants here. */
5572 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
5575 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5576 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
5579 size
= TYPE_SIZE (type
);
5580 osize
= TYPE_SIZE (orig_type
);
5582 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
5585 return tree_int_cst_lt (size
, osize
) != 0;
5588 /* Perform final processing on global variables. */
5590 static GTY (()) tree dummy_global
;
5593 gnat_write_global_declarations (void)
5598 /* If we have declared types as used at the global level, insert them in
5599 the global hash table. We use a dummy variable for this purpose. */
5600 if (types_used_by_cur_var_decl
&& !types_used_by_cur_var_decl
->is_empty ())
5602 struct varpool_node
*node
;
5605 ASM_FORMAT_PRIVATE_NAME (label
, first_global_object_name
, 0);
5607 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, get_identifier (label
),
5609 TREE_STATIC (dummy_global
) = 1;
5610 TREE_ASM_WRITTEN (dummy_global
) = 1;
5611 node
= varpool_node_for_decl (dummy_global
);
5612 node
->symbol
.force_output
= 1;
5614 while (!types_used_by_cur_var_decl
->is_empty ())
5616 tree t
= types_used_by_cur_var_decl
->pop ();
5617 types_used_by_var_decl_insert (t
, dummy_global
);
5621 /* Output debug information for all global type declarations first. This
5622 ensures that global types whose compilation hasn't been finalized yet,
5623 for example pointers to Taft amendment types, have their compilation
5624 finalized in the right context. */
5625 FOR_EACH_VEC_SAFE_ELT (global_decls
, i
, iter
)
5626 if (TREE_CODE (iter
) == TYPE_DECL
)
5627 debug_hooks
->global_decl (iter
);
5629 /* Proceed to optimize and emit assembly. */
5630 finalize_compilation_unit ();
5632 /* After cgraph has had a chance to emit everything that's going to
5633 be emitted, output debug information for the rest of globals. */
5636 timevar_push (TV_SYMOUT
);
5637 FOR_EACH_VEC_SAFE_ELT (global_decls
, i
, iter
)
5638 if (TREE_CODE (iter
) != TYPE_DECL
)
5639 debug_hooks
->global_decl (iter
);
5640 timevar_pop (TV_SYMOUT
);
5644 /* ************************************************************************
5645 * * GCC builtins support *
5646 * ************************************************************************ */
5648 /* The general scheme is fairly simple:
5650 For each builtin function/type to be declared, gnat_install_builtins calls
5651 internal facilities which eventually get to gnat_push_decl, which in turn
5652 tracks the so declared builtin function decls in the 'builtin_decls' global
5653 datastructure. When an Intrinsic subprogram declaration is processed, we
5654 search this global datastructure to retrieve the associated BUILT_IN DECL
5657 /* Search the chain of currently available builtin declarations for a node
5658 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5659 found, if any, or NULL_TREE otherwise. */
5661 builtin_decl_for (tree name
)
5666 FOR_EACH_VEC_SAFE_ELT (builtin_decls
, i
, decl
)
5667 if (DECL_NAME (decl
) == name
)
5673 /* The code below eventually exposes gnat_install_builtins, which declares
5674 the builtin types and functions we might need, either internally or as
5675 user accessible facilities.
5677 ??? This is a first implementation shot, still in rough shape. It is
5678 heavily inspired from the "C" family implementation, with chunks copied
5679 verbatim from there.
5681 Two obvious TODO candidates are
5682 o Use a more efficient name/decl mapping scheme
5683 o Devise a middle-end infrastructure to avoid having to copy
5684 pieces between front-ends. */
5686 /* ----------------------------------------------------------------------- *
5687 * BUILTIN ELEMENTARY TYPES *
5688 * ----------------------------------------------------------------------- */
5690 /* Standard data types to be used in builtin argument declarations. */
5694 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
5696 CTI_CONST_STRING_TYPE
,
5701 static tree c_global_trees
[CTI_MAX
];
5703 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5704 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5705 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5707 /* ??? In addition some attribute handlers, we currently don't support a
5708 (small) number of builtin-types, which in turns inhibits support for a
5709 number of builtin functions. */
5710 #define wint_type_node void_type_node
5711 #define intmax_type_node void_type_node
5712 #define uintmax_type_node void_type_node
5714 /* Build the void_list_node (void_type_node having been created). */
5717 build_void_list_node (void)
5719 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
5723 /* Used to help initialize the builtin-types.def table. When a type of
5724 the correct size doesn't exist, use error_mark_node instead of NULL.
5725 The later results in segfaults even when a decl using the type doesn't
5729 builtin_type_for_size (int size
, bool unsignedp
)
5731 tree type
= gnat_type_for_size (size
, unsignedp
);
5732 return type
? type
: error_mark_node
;
5735 /* Build/push the elementary type decls that builtin functions/types
5739 install_builtin_elementary_types (void)
5741 signed_size_type_node
= gnat_signed_type (size_type_node
);
5742 pid_type_node
= integer_type_node
;
5743 void_list_node
= build_void_list_node ();
5745 string_type_node
= build_pointer_type (char_type_node
);
5746 const_string_type_node
5747 = build_pointer_type (build_qualified_type
5748 (char_type_node
, TYPE_QUAL_CONST
));
5751 /* ----------------------------------------------------------------------- *
5752 * BUILTIN FUNCTION TYPES *
5753 * ----------------------------------------------------------------------- */
5755 /* Now, builtin function types per se. */
5759 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5760 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5761 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5762 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5763 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5764 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5765 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5766 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5767 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5768 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5769 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5770 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5771 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5772 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5773 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5775 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5776 #include "builtin-types.def"
5777 #undef DEF_PRIMITIVE_TYPE
5778 #undef DEF_FUNCTION_TYPE_0
5779 #undef DEF_FUNCTION_TYPE_1
5780 #undef DEF_FUNCTION_TYPE_2
5781 #undef DEF_FUNCTION_TYPE_3
5782 #undef DEF_FUNCTION_TYPE_4
5783 #undef DEF_FUNCTION_TYPE_5
5784 #undef DEF_FUNCTION_TYPE_6
5785 #undef DEF_FUNCTION_TYPE_7
5786 #undef DEF_FUNCTION_TYPE_VAR_0
5787 #undef DEF_FUNCTION_TYPE_VAR_1
5788 #undef DEF_FUNCTION_TYPE_VAR_2
5789 #undef DEF_FUNCTION_TYPE_VAR_3
5790 #undef DEF_FUNCTION_TYPE_VAR_4
5791 #undef DEF_FUNCTION_TYPE_VAR_5
5792 #undef DEF_POINTER_TYPE
5796 typedef enum c_builtin_type builtin_type
;
5798 /* A temporary array used in communication with def_fn_type. */
5799 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5801 /* A helper function for install_builtin_types. Build function type
5802 for DEF with return type RET and N arguments. If VAR is true, then the
5803 function should be variadic after those N arguments.
5805 Takes special care not to ICE if any of the types involved are
5806 error_mark_node, which indicates that said type is not in fact available
5807 (see builtin_type_for_size). In which case the function type as a whole
5808 should be error_mark_node. */
5811 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5814 tree
*args
= XALLOCAVEC (tree
, n
);
5819 for (i
= 0; i
< n
; ++i
)
5821 builtin_type a
= (builtin_type
) va_arg (list
, int);
5822 t
= builtin_types
[a
];
5823 if (t
== error_mark_node
)
5828 t
= builtin_types
[ret
];
5829 if (t
== error_mark_node
)
5832 t
= build_varargs_function_type_array (t
, n
, args
);
5834 t
= build_function_type_array (t
, n
, args
);
5837 builtin_types
[def
] = t
;
5841 /* Build the builtin function types and install them in the builtin_types
5842 array for later use in builtin function decls. */
5845 install_builtin_function_types (void)
5847 tree va_list_ref_type_node
;
5848 tree va_list_arg_type_node
;
5850 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5852 va_list_arg_type_node
= va_list_ref_type_node
=
5853 build_pointer_type (TREE_TYPE (va_list_type_node
));
5857 va_list_arg_type_node
= va_list_type_node
;
5858 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5861 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5862 builtin_types[ENUM] = VALUE;
5863 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5864 def_fn_type (ENUM, RETURN, 0, 0);
5865 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5866 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5867 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5868 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5869 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5870 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5871 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5872 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5873 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5874 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5875 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5877 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5878 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5880 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5881 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5882 def_fn_type (ENUM, RETURN, 1, 0);
5883 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5884 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5885 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5886 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5887 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5888 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5889 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5890 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5891 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5892 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5893 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5894 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5896 #include "builtin-types.def"
5898 #undef DEF_PRIMITIVE_TYPE
5899 #undef DEF_FUNCTION_TYPE_1
5900 #undef DEF_FUNCTION_TYPE_2
5901 #undef DEF_FUNCTION_TYPE_3
5902 #undef DEF_FUNCTION_TYPE_4
5903 #undef DEF_FUNCTION_TYPE_5
5904 #undef DEF_FUNCTION_TYPE_6
5905 #undef DEF_FUNCTION_TYPE_VAR_0
5906 #undef DEF_FUNCTION_TYPE_VAR_1
5907 #undef DEF_FUNCTION_TYPE_VAR_2
5908 #undef DEF_FUNCTION_TYPE_VAR_3
5909 #undef DEF_FUNCTION_TYPE_VAR_4
5910 #undef DEF_FUNCTION_TYPE_VAR_5
5911 #undef DEF_POINTER_TYPE
5912 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5915 /* ----------------------------------------------------------------------- *
5916 * BUILTIN ATTRIBUTES *
5917 * ----------------------------------------------------------------------- */
5919 enum built_in_attribute
5921 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5922 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5923 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5924 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5925 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5926 #include "builtin-attrs.def"
5927 #undef DEF_ATTR_NULL_TREE
5929 #undef DEF_ATTR_STRING
5930 #undef DEF_ATTR_IDENT
5931 #undef DEF_ATTR_TREE_LIST
5935 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5938 install_builtin_attributes (void)
5940 /* Fill in the built_in_attributes array. */
5941 #define DEF_ATTR_NULL_TREE(ENUM) \
5942 built_in_attributes[(int) ENUM] = NULL_TREE;
5943 #define DEF_ATTR_INT(ENUM, VALUE) \
5944 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5945 #define DEF_ATTR_STRING(ENUM, VALUE) \
5946 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5947 #define DEF_ATTR_IDENT(ENUM, STRING) \
5948 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5949 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5950 built_in_attributes[(int) ENUM] \
5951 = tree_cons (built_in_attributes[(int) PURPOSE], \
5952 built_in_attributes[(int) VALUE], \
5953 built_in_attributes[(int) CHAIN]);
5954 #include "builtin-attrs.def"
5955 #undef DEF_ATTR_NULL_TREE
5957 #undef DEF_ATTR_STRING
5958 #undef DEF_ATTR_IDENT
5959 #undef DEF_ATTR_TREE_LIST
5962 /* Handle a "const" attribute; arguments as in
5963 struct attribute_spec.handler. */
5966 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5967 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5970 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5971 TREE_READONLY (*node
) = 1;
5973 *no_add_attrs
= true;
5978 /* Handle a "nothrow" attribute; arguments as in
5979 struct attribute_spec.handler. */
5982 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5983 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5986 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5987 TREE_NOTHROW (*node
) = 1;
5989 *no_add_attrs
= true;
5994 /* Handle a "pure" attribute; arguments as in
5995 struct attribute_spec.handler. */
5998 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5999 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6001 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6002 DECL_PURE_P (*node
) = 1;
6003 /* ??? TODO: Support types. */
6006 warning (OPT_Wattributes
, "%qs attribute ignored",
6007 IDENTIFIER_POINTER (name
));
6008 *no_add_attrs
= true;
6014 /* Handle a "no vops" attribute; arguments as in
6015 struct attribute_spec.handler. */
6018 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6019 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6020 bool *ARG_UNUSED (no_add_attrs
))
6022 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
6023 DECL_IS_NOVOPS (*node
) = 1;
6027 /* Helper for nonnull attribute handling; fetch the operand number
6028 from the attribute argument list. */
6031 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
6033 /* Verify the arg number is a constant. */
6034 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
6035 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
6038 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
6042 /* Handle the "nonnull" attribute. */
6044 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6045 tree args
, int ARG_UNUSED (flags
),
6049 unsigned HOST_WIDE_INT attr_arg_num
;
6051 /* If no arguments are specified, all pointer arguments should be
6052 non-null. Verify a full prototype is given so that the arguments
6053 will have the correct types when we actually check them later. */
6056 if (!prototype_p (type
))
6058 error ("nonnull attribute without arguments on a non-prototype");
6059 *no_add_attrs
= true;
6064 /* Argument list specified. Verify that each argument number references
6065 a pointer argument. */
6066 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
6068 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
6070 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
6072 error ("nonnull argument has invalid operand number (argument %lu)",
6073 (unsigned long) attr_arg_num
);
6074 *no_add_attrs
= true;
6078 if (prototype_p (type
))
6080 function_args_iterator iter
;
6083 function_args_iter_init (&iter
, type
);
6084 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
6086 argument
= function_args_iter_cond (&iter
);
6087 if (!argument
|| ck_num
== arg_num
)
6092 || TREE_CODE (argument
) == VOID_TYPE
)
6094 error ("nonnull argument with out-of-range operand number "
6095 "(argument %lu, operand %lu)",
6096 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6097 *no_add_attrs
= true;
6101 if (TREE_CODE (argument
) != POINTER_TYPE
)
6103 error ("nonnull argument references non-pointer operand "
6104 "(argument %lu, operand %lu)",
6105 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6106 *no_add_attrs
= true;
6115 /* Handle a "sentinel" attribute. */
6118 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
6119 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6121 if (!prototype_p (*node
))
6123 warning (OPT_Wattributes
,
6124 "%qs attribute requires prototypes with named arguments",
6125 IDENTIFIER_POINTER (name
));
6126 *no_add_attrs
= true;
6130 if (!stdarg_p (*node
))
6132 warning (OPT_Wattributes
,
6133 "%qs attribute only applies to variadic functions",
6134 IDENTIFIER_POINTER (name
));
6135 *no_add_attrs
= true;
6141 tree position
= TREE_VALUE (args
);
6143 if (TREE_CODE (position
) != INTEGER_CST
)
6145 warning (0, "requested position is not an integer constant");
6146 *no_add_attrs
= true;
6150 if (tree_int_cst_lt (position
, integer_zero_node
))
6152 warning (0, "requested position is less than zero");
6153 *no_add_attrs
= true;
6161 /* Handle a "noreturn" attribute; arguments as in
6162 struct attribute_spec.handler. */
6165 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6166 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6168 tree type
= TREE_TYPE (*node
);
6170 /* See FIXME comment in c_common_attribute_table. */
6171 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6172 TREE_THIS_VOLATILE (*node
) = 1;
6173 else if (TREE_CODE (type
) == POINTER_TYPE
6174 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
6176 = build_pointer_type
6177 (build_type_variant (TREE_TYPE (type
),
6178 TYPE_READONLY (TREE_TYPE (type
)), 1));
6181 warning (OPT_Wattributes
, "%qs attribute ignored",
6182 IDENTIFIER_POINTER (name
));
6183 *no_add_attrs
= true;
6189 /* Handle a "leaf" attribute; arguments as in
6190 struct attribute_spec.handler. */
6193 handle_leaf_attribute (tree
*node
, tree name
,
6194 tree
ARG_UNUSED (args
),
6195 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6197 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6199 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6200 *no_add_attrs
= true;
6202 if (!TREE_PUBLIC (*node
))
6204 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
6205 *no_add_attrs
= true;
6211 /* Handle a "malloc" attribute; arguments as in
6212 struct attribute_spec.handler. */
6215 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6216 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6218 if (TREE_CODE (*node
) == FUNCTION_DECL
6219 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
6220 DECL_IS_MALLOC (*node
) = 1;
6223 warning (OPT_Wattributes
, "%qs attribute ignored",
6224 IDENTIFIER_POINTER (name
));
6225 *no_add_attrs
= true;
6231 /* Fake handler for attributes we don't properly support. */
6234 fake_attribute_handler (tree
* ARG_UNUSED (node
),
6235 tree
ARG_UNUSED (name
),
6236 tree
ARG_UNUSED (args
),
6237 int ARG_UNUSED (flags
),
6238 bool * ARG_UNUSED (no_add_attrs
))
6243 /* Handle a "type_generic" attribute. */
6246 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6247 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6248 bool * ARG_UNUSED (no_add_attrs
))
6250 /* Ensure we have a function type. */
6251 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
6253 /* Ensure we have a variadic function. */
6254 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
6259 /* Handle a "vector_size" attribute; arguments as in
6260 struct attribute_spec.handler. */
6263 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
6264 int ARG_UNUSED (flags
),
6267 unsigned HOST_WIDE_INT vecsize
, nunits
;
6268 enum machine_mode orig_mode
;
6269 tree type
= *node
, new_type
, size
;
6271 *no_add_attrs
= true;
6273 size
= TREE_VALUE (args
);
6275 if (!host_integerp (size
, 1))
6277 warning (OPT_Wattributes
, "%qs attribute ignored",
6278 IDENTIFIER_POINTER (name
));
6282 /* Get the vector size (in bytes). */
6283 vecsize
= tree_low_cst (size
, 1);
6285 /* We need to provide for vector pointers, vector arrays, and
6286 functions returning vectors. For example:
6288 __attribute__((vector_size(16))) short *foo;
6290 In this case, the mode is SI, but the type being modified is
6291 HI, so we need to look further. */
6293 while (POINTER_TYPE_P (type
)
6294 || TREE_CODE (type
) == FUNCTION_TYPE
6295 || TREE_CODE (type
) == ARRAY_TYPE
)
6296 type
= TREE_TYPE (type
);
6298 /* Get the mode of the type being modified. */
6299 orig_mode
= TYPE_MODE (type
);
6301 if ((!INTEGRAL_TYPE_P (type
)
6302 && !SCALAR_FLOAT_TYPE_P (type
)
6303 && !FIXED_POINT_TYPE_P (type
))
6304 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
6305 && GET_MODE_CLASS (orig_mode
) != MODE_INT
6306 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
6307 || !host_integerp (TYPE_SIZE_UNIT (type
), 1)
6308 || TREE_CODE (type
) == BOOLEAN_TYPE
)
6310 error ("invalid vector type for attribute %qs",
6311 IDENTIFIER_POINTER (name
));
6315 if (vecsize
% tree_low_cst (TYPE_SIZE_UNIT (type
), 1))
6317 error ("vector size not an integral multiple of component size");
6323 error ("zero vector size");
6327 /* Calculate how many units fit in the vector. */
6328 nunits
= vecsize
/ tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
6329 if (nunits
& (nunits
- 1))
6331 error ("number of components of the vector not a power of two");
6335 new_type
= build_vector_type (type
, nunits
);
6337 /* Build back pointers if needed. */
6338 *node
= reconstruct_complex_type (*node
, new_type
);
6343 /* Handle a "vector_type" attribute; arguments as in
6344 struct attribute_spec.handler. */
6347 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6348 int ARG_UNUSED (flags
),
6351 /* Vector representative type and size. */
6352 tree rep_type
= *node
;
6353 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
6355 /* Vector size in bytes and number of units. */
6356 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
6358 /* Vector element type and mode. */
6360 enum machine_mode elem_mode
;
6362 *no_add_attrs
= true;
6364 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
6366 error ("attribute %qs applies to array types only",
6367 IDENTIFIER_POINTER (name
));
6371 /* Silently punt on variable sizes. We can't make vector types for them,
6372 need to ignore them on front-end generated subtypes of unconstrained
6373 bases, and this attribute is for binding implementors, not end-users, so
6374 we should never get there from legitimate explicit uses. */
6376 if (!host_integerp (rep_size
, 1))
6379 /* Get the element type/mode and check this is something we know
6380 how to make vectors of. */
6382 elem_type
= TREE_TYPE (rep_type
);
6383 elem_mode
= TYPE_MODE (elem_type
);
6385 if ((!INTEGRAL_TYPE_P (elem_type
)
6386 && !SCALAR_FLOAT_TYPE_P (elem_type
)
6387 && !FIXED_POINT_TYPE_P (elem_type
))
6388 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
6389 && GET_MODE_CLASS (elem_mode
) != MODE_INT
6390 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
6391 || !host_integerp (TYPE_SIZE_UNIT (elem_type
), 1))
6393 error ("invalid element type for attribute %qs",
6394 IDENTIFIER_POINTER (name
));
6398 /* Sanity check the vector size and element type consistency. */
6400 vec_bytes
= tree_low_cst (rep_size
, 1);
6402 if (vec_bytes
% tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1))
6404 error ("vector size not an integral multiple of component size");
6410 error ("zero vector size");
6414 vec_units
= vec_bytes
/ tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1);
6415 if (vec_units
& (vec_units
- 1))
6417 error ("number of components of the vector not a power of two");
6421 /* Build the vector type and replace. */
6423 *node
= build_vector_type (elem_type
, vec_units
);
6424 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
6429 /* ----------------------------------------------------------------------- *
6430 * BUILTIN FUNCTIONS *
6431 * ----------------------------------------------------------------------- */
6433 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6434 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6435 if nonansi_p and flag_no_nonansi_builtin. */
6438 def_builtin_1 (enum built_in_function fncode
,
6440 enum built_in_class fnclass
,
6441 tree fntype
, tree libtype
,
6442 bool both_p
, bool fallback_p
,
6443 bool nonansi_p ATTRIBUTE_UNUSED
,
6444 tree fnattrs
, bool implicit_p
)
6447 const char *libname
;
6449 /* Preserve an already installed decl. It most likely was setup in advance
6450 (e.g. as part of the internal builtins) for specific reasons. */
6451 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
6454 gcc_assert ((!both_p
&& !fallback_p
)
6455 || !strncmp (name
, "__builtin_",
6456 strlen ("__builtin_")));
6458 libname
= name
+ strlen ("__builtin_");
6459 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
6460 (fallback_p
? libname
: NULL
),
6463 /* ??? This is normally further controlled by command-line options
6464 like -fno-builtin, but we don't have them for Ada. */
6465 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
6468 set_builtin_decl (fncode
, decl
, implicit_p
);
6471 static int flag_isoc94
= 0;
6472 static int flag_isoc99
= 0;
6474 /* Install what the common builtins.def offers. */
6477 install_builtin_functions (void)
6479 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6480 NONANSI_P, ATTRS, IMPLICIT, COND) \
6482 def_builtin_1 (ENUM, NAME, CLASS, \
6483 builtin_types[(int) TYPE], \
6484 builtin_types[(int) LIBTYPE], \
6485 BOTH_P, FALLBACK_P, NONANSI_P, \
6486 built_in_attributes[(int) ATTRS], IMPLICIT);
6487 #include "builtins.def"
6491 /* ----------------------------------------------------------------------- *
6492 * BUILTIN FUNCTIONS *
6493 * ----------------------------------------------------------------------- */
6495 /* Install the builtin functions we might need. */
6498 gnat_install_builtins (void)
6500 install_builtin_elementary_types ();
6501 install_builtin_function_types ();
6502 install_builtin_attributes ();
6504 /* Install builtins used by generic middle-end pieces first. Some of these
6505 know about internal specificities and control attributes accordingly, for
6506 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6507 the generic definition from builtins.def. */
6508 build_common_builtin_nodes ();
6510 /* Now, install the target specific builtins, such as the AltiVec family on
6511 ppc, and the common set as exposed by builtins.def. */
6512 targetm
.init_builtins ();
6513 install_builtin_functions ();
6516 #include "gt-ada-utils.h"
6517 #include "gtype-ada.h"