1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
42 #include "diagnostic.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
61 #ifndef MAX_BITS_PER_WORD
62 #define MAX_BITS_PER_WORD BITS_PER_WORD
65 /* If nonzero, pretend we are allocating at global level. */
68 /* The default alignment of "double" floating-point types, i.e. floating
69 point types whose size is equal to 64 bits, or 0 if this alignment is
70 not specifically capped. */
71 int double_float_alignment
;
73 /* The default alignment of "double" or larger scalar types, i.e. scalar
74 types whose size is greater or equal to 64 bits, or 0 if this alignment
75 is not specifically capped. */
76 int double_scalar_alignment
;
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls
[(int) ADT_LAST
];
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
84 /* Likewise, but with extra info for each of the possible raise reasons. */
85 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
87 /* Forward declarations for handlers of attributes. */
88 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
96 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
97 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
98 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
99 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
101 /* Fake handler for attributes we don't properly support, typically because
102 they'd require dragging a lot of the common-c front-end circuitry. */
103 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
105 /* Table of machine-independent internal attributes for Ada. We support
106 this minimal set of attributes to accommodate the needs of builtins. */
107 const struct attribute_spec gnat_internal_attribute_table
[] =
109 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
110 affects_type_identity } */
111 { "const", 0, 0, true, false, false, handle_const_attribute
,
113 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
115 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
117 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
119 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
121 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
123 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
125 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
127 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
129 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
132 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
134 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
136 { "may_alias", 0, 0, false, true, false, NULL
, false },
138 /* ??? format and format_arg are heavy and not supported, which actually
139 prevents support for stdio builtins, which we however declare as part
140 of the common builtins.def contents. */
141 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
142 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
144 { NULL
, 0, 0, false, false, false, NULL
, false }
147 /* Associates a GNAT tree node to a GCC tree node. It is used in
148 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
149 of `save_gnu_tree' for more info. */
150 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
152 #define GET_GNU_TREE(GNAT_ENTITY) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
156 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
159 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
162 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
164 #define GET_DUMMY_NODE(GNAT_ENTITY) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
168 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
171 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173 /* This variable keeps a table for types for each precision so that we only
174 allocate each of them once. Signed and unsigned types are kept separate.
176 Note that these types are only used when fold-const requests something
177 special. Perhaps we should NOT share these types; we'll see how it
179 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
181 /* Likewise for float types, but record these by mode. */
182 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
184 /* For each binding contour we allocate a binding_level structure to indicate
185 the binding depth. */
187 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
188 /* The binding level containing this one (the enclosing binding level). */
189 struct gnat_binding_level
*chain
;
190 /* The BLOCK node for this level. */
192 /* If nonzero, the setjmp buffer that needs to be updated for any
193 variable-sized definition within this context. */
197 /* The binding level currently in effect. */
198 static GTY(()) struct gnat_binding_level
*current_binding_level
;
200 /* A chain of gnat_binding_level structures awaiting reuse. */
201 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
203 /* The context to be used for global declarations. */
204 static GTY(()) tree global_context
;
206 /* An array of global declarations. */
207 static GTY(()) VEC(tree
,gc
) *global_decls
;
209 /* An array of builtin function declarations. */
210 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
212 /* An array of global renaming pointers. */
213 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
215 /* A chain of unused BLOCK nodes. */
216 static GTY((deletable
)) tree free_block_chain
;
218 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
219 static tree
compute_related_constant (tree
, tree
);
220 static tree
split_plus (tree
, tree
*);
221 static tree
float_type_for_precision (int, enum machine_mode
);
222 static tree
convert_to_fat_pointer (tree
, tree
);
223 static bool potential_alignment_gap (tree
, tree
, tree
);
224 static void process_attributes (tree
, struct attrib
*);
226 /* Initialize the association of GNAT nodes to GCC trees. */
229 init_gnat_to_gnu (void)
231 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
234 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
235 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
236 If NO_CHECK is true, the latter check is suppressed.
238 If GNU_DECL is zero, reset a previous association. */
241 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
243 /* Check that GNAT_ENTITY is not already defined and that it is being set
244 to something which is a decl. If that is not the case, this usually
245 means GNAT_ENTITY is defined twice, but occasionally is due to some
247 gcc_assert (!(gnu_decl
248 && (PRESENT_GNU_TREE (gnat_entity
)
249 || (!no_check
&& !DECL_P (gnu_decl
)))));
251 SET_GNU_TREE (gnat_entity
, gnu_decl
);
254 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
255 that was associated with it. If there is no such tree node, abort.
257 In some cases, such as delayed elaboration or expressions that need to
258 be elaborated only once, GNAT_ENTITY is really not an entity. */
261 get_gnu_tree (Entity_Id gnat_entity
)
263 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
264 return GET_GNU_TREE (gnat_entity
);
267 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
270 present_gnu_tree (Entity_Id gnat_entity
)
272 return PRESENT_GNU_TREE (gnat_entity
);
275 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
278 init_dummy_type (void)
280 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
283 /* Make a dummy type corresponding to GNAT_TYPE. */
286 make_dummy_type (Entity_Id gnat_type
)
288 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
291 /* If there is an equivalent type, get its underlying type. */
292 if (Present (gnat_underlying
))
293 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
295 /* If there was no equivalent type (can only happen when just annotating
296 types) or underlying type, go back to the original type. */
297 if (No (gnat_underlying
))
298 gnat_underlying
= gnat_type
;
300 /* If it there already a dummy type, use that one. Else make one. */
301 if (PRESENT_DUMMY_NODE (gnat_underlying
))
302 return GET_DUMMY_NODE (gnat_underlying
);
304 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
306 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
307 ? tree_code_for_record_type (gnat_underlying
)
309 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
310 TYPE_DUMMY_P (gnu_type
) = 1;
311 TYPE_STUB_DECL (gnu_type
)
312 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
313 if (Is_By_Reference_Type (gnat_underlying
))
314 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
316 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
321 /* Return the dummy type that was made for GNAT_TYPE, if any. */
324 get_dummy_type (Entity_Id gnat_type
)
326 return GET_DUMMY_NODE (gnat_type
);
329 /* Build dummy fat and thin pointer types whose designated type is specified
330 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
333 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
335 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
336 tree gnu_fat_type
, fields
, gnu_object_type
;
338 gnu_template_type
= make_node (RECORD_TYPE
);
339 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
340 TYPE_DUMMY_P (gnu_template_type
) = 1;
341 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
343 gnu_array_type
= make_node (ENUMERAL_TYPE
);
344 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
345 TYPE_DUMMY_P (gnu_array_type
) = 1;
346 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
348 gnu_fat_type
= make_node (RECORD_TYPE
);
349 /* Build a stub DECL to trigger the special processing for fat pointer types
351 TYPE_NAME (gnu_fat_type
)
352 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
354 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
355 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
357 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
358 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
359 finish_fat_pointer_type (gnu_fat_type
, fields
);
360 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
361 /* Suppress debug info until after the type is completed. */
362 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
364 gnu_object_type
= make_node (RECORD_TYPE
);
365 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
366 TYPE_DUMMY_P (gnu_object_type
) = 1;
368 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
369 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
372 /* Return true if we are in the global binding level. */
375 global_bindings_p (void)
377 return force_global
|| current_function_decl
== NULL_TREE
;
380 /* Enter a new binding level. */
383 gnat_pushlevel (void)
385 struct gnat_binding_level
*newlevel
= NULL
;
387 /* Reuse a struct for this binding level, if there is one. */
388 if (free_binding_level
)
390 newlevel
= free_binding_level
;
391 free_binding_level
= free_binding_level
->chain
;
394 newlevel
= ggc_alloc_gnat_binding_level ();
396 /* Use a free BLOCK, if any; otherwise, allocate one. */
397 if (free_block_chain
)
399 newlevel
->block
= free_block_chain
;
400 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
401 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
404 newlevel
->block
= make_node (BLOCK
);
406 /* Point the BLOCK we just made to its parent. */
407 if (current_binding_level
)
408 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
410 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
411 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
412 TREE_USED (newlevel
->block
) = 1;
414 /* Add this level to the front of the chain (stack) of active levels. */
415 newlevel
->chain
= current_binding_level
;
416 newlevel
->jmpbuf_decl
= NULL_TREE
;
417 current_binding_level
= newlevel
;
420 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
421 and point FNDECL to this BLOCK. */
424 set_current_block_context (tree fndecl
)
426 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
427 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
428 set_block_for_group (current_binding_level
->block
);
431 /* Set the jmpbuf_decl for the current binding level to DECL. */
434 set_block_jmpbuf_decl (tree decl
)
436 current_binding_level
->jmpbuf_decl
= decl
;
439 /* Get the jmpbuf_decl, if any, for the current binding level. */
442 get_block_jmpbuf_decl (void)
444 return current_binding_level
->jmpbuf_decl
;
447 /* Exit a binding level. Set any BLOCK into the current code group. */
452 struct gnat_binding_level
*level
= current_binding_level
;
453 tree block
= level
->block
;
455 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
456 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
458 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
459 are no variables free the block and merge its subblocks into those of its
460 parent block. Otherwise, add it to the list of its parent. */
461 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
463 else if (BLOCK_VARS (block
) == NULL_TREE
)
465 BLOCK_SUBBLOCKS (level
->chain
->block
)
466 = block_chainon (BLOCK_SUBBLOCKS (block
),
467 BLOCK_SUBBLOCKS (level
->chain
->block
));
468 BLOCK_CHAIN (block
) = free_block_chain
;
469 free_block_chain
= block
;
473 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
474 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
475 TREE_USED (block
) = 1;
476 set_block_for_group (block
);
479 /* Free this binding structure. */
480 current_binding_level
= level
->chain
;
481 level
->chain
= free_binding_level
;
482 free_binding_level
= level
;
485 /* Exit a binding level and discard the associated BLOCK. */
490 struct gnat_binding_level
*level
= current_binding_level
;
491 tree block
= level
->block
;
493 BLOCK_CHAIN (block
) = free_block_chain
;
494 free_block_chain
= block
;
496 /* Free this binding structure. */
497 current_binding_level
= level
->chain
;
498 level
->chain
= free_binding_level
;
499 free_binding_level
= level
;
502 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
503 for location information and flag propagation. */
506 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
508 /* If DECL is public external or at top level, it has global context. */
509 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
512 global_context
= build_translation_unit_decl (NULL_TREE
);
513 DECL_CONTEXT (decl
) = global_context
;
517 DECL_CONTEXT (decl
) = current_function_decl
;
519 /* Functions imported in another function are not really nested.
520 For really nested functions mark them initially as needing
521 a static chain for uses of that flag before unnesting;
522 lower_nested_functions will then recompute it. */
523 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
524 DECL_STATIC_CHAIN (decl
) = 1;
527 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
529 /* Set the location of DECL and emit a declaration for it. */
530 if (Present (gnat_node
))
531 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
533 add_decl_expr (decl
, gnat_node
);
535 /* Put the declaration on the list. The list of declarations is in reverse
536 order. The list will be reversed later. Put global declarations in the
537 globals list and local ones in the current block. But skip TYPE_DECLs
538 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
539 with the debugger and aren't needed anyway. */
540 if (!(TREE_CODE (decl
) == TYPE_DECL
541 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
543 if (global_bindings_p ())
545 VEC_safe_push (tree
, gc
, global_decls
, decl
);
547 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
548 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
550 else if (!DECL_EXTERNAL (decl
))
552 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
553 BLOCK_VARS (current_binding_level
->block
) = decl
;
557 /* For the declaration of a type, set its name if it either is not already
558 set or if the previous type name was not derived from a source name.
559 We'd rather have the type named with a real name and all the pointer
560 types to the same object have the same POINTER_TYPE node. Code in the
561 equivalent function of c-decl.c makes a copy of the type node here, but
562 that may cause us trouble with incomplete types. We make an exception
563 for fat pointer types because the compiler automatically builds them
564 for unconstrained array types and the debugger uses them to represent
565 both these and pointers to these. */
566 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
568 tree t
= TREE_TYPE (decl
);
570 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
572 /* Array and pointer types aren't "tagged" types so we force the
573 type to be associated with its typedef in the DWARF back-end,
574 in order to make sure that the latter is always preserved. */
575 if (!DECL_ARTIFICIAL (decl
)
576 && (TREE_CODE (t
) == ARRAY_TYPE
577 || TREE_CODE (t
) == POINTER_TYPE
))
579 tree tt
= build_distinct_type_copy (t
);
580 if (TREE_CODE (t
) == POINTER_TYPE
)
581 TYPE_NEXT_PTR_TO (t
) = tt
;
582 TYPE_NAME (tt
) = DECL_NAME (decl
);
583 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
584 DECL_ORIGINAL_TYPE (decl
) = tt
;
587 else if (TYPE_IS_FAT_POINTER_P (t
))
589 /* We need a variant for the placeholder machinery to work. */
590 tree tt
= build_variant_type_copy (t
);
591 TYPE_NAME (tt
) = decl
;
592 TREE_USED (tt
) = TREE_USED (t
);
593 TREE_TYPE (decl
) = tt
;
594 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
595 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
597 DECL_ORIGINAL_TYPE (decl
) = t
;
598 DECL_ARTIFICIAL (decl
) = 0;
601 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
606 /* Propagate the name to all the anonymous variants. This is needed
607 for the type qualifiers machinery to work properly. */
609 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
610 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
611 TYPE_NAME (t
) = decl
;
615 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
616 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
619 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
621 tree type_decl
= build_decl (input_location
,
622 TYPE_DECL
, get_identifier (name
), type
);
623 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
624 TYPE_ARTIFICIAL (type
) = artificial_p
;
625 gnat_pushdecl (type_decl
, Empty
);
627 if (debug_hooks
->type_decl
)
628 debug_hooks
->type_decl (type_decl
, false);
631 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
632 finish constructing the record type as a fat pointer type. */
635 finish_fat_pointer_type (tree record_type
, tree field_list
)
637 /* Make sure we can put it into a register. */
638 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
640 /* Show what it really is. */
641 TYPE_FAT_POINTER_P (record_type
) = 1;
643 /* Do not emit debug info for it since the types of its fields may still be
644 incomplete at this point. */
645 finish_record_type (record_type
, field_list
, 0, false);
647 /* Force type_contains_placeholder_p to return true on it. Although the
648 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
649 type but the representation of the unconstrained array. */
650 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
653 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
654 finish constructing the record or union type. If REP_LEVEL is zero, this
655 record has no representation clause and so will be entirely laid out here.
656 If REP_LEVEL is one, this record has a representation clause and has been
657 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
658 this record is derived from a parent record and thus inherits its layout;
659 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
660 we need to write debug information about this type. */
663 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
666 enum tree_code code
= TREE_CODE (record_type
);
667 tree name
= TYPE_NAME (record_type
);
668 tree ada_size
= bitsize_zero_node
;
669 tree size
= bitsize_zero_node
;
670 bool had_size
= TYPE_SIZE (record_type
) != 0;
671 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
672 bool had_align
= TYPE_ALIGN (record_type
) != 0;
675 TYPE_FIELDS (record_type
) = field_list
;
677 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
678 generate debug info and have a parallel type. */
679 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
680 name
= DECL_NAME (name
);
681 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
683 /* Globally initialize the record first. If this is a rep'ed record,
684 that just means some initializations; otherwise, layout the record. */
687 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
690 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
693 TYPE_SIZE (record_type
) = bitsize_zero_node
;
695 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
696 out just like a UNION_TYPE, since the size will be fixed. */
697 else if (code
== QUAL_UNION_TYPE
)
702 /* Ensure there isn't a size already set. There can be in an error
703 case where there is a rep clause but all fields have errors and
704 no longer have a position. */
705 TYPE_SIZE (record_type
) = 0;
706 layout_type (record_type
);
709 /* At this point, the position and size of each field is known. It was
710 either set before entry by a rep clause, or by laying out the type above.
712 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
713 to compute the Ada size; the GCC size and alignment (for rep'ed records
714 that are not padding types); and the mode (for rep'ed records). We also
715 clear the DECL_BIT_FIELD indication for the cases we know have not been
716 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
718 if (code
== QUAL_UNION_TYPE
)
719 field_list
= nreverse (field_list
);
721 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
723 tree type
= TREE_TYPE (field
);
724 tree pos
= bit_position (field
);
725 tree this_size
= DECL_SIZE (field
);
728 if (RECORD_OR_UNION_TYPE_P (type
)
729 && !TYPE_FAT_POINTER_P (type
)
730 && !TYPE_CONTAINS_TEMPLATE_P (type
)
731 && TYPE_ADA_SIZE (type
))
732 this_ada_size
= TYPE_ADA_SIZE (type
);
734 this_ada_size
= this_size
;
736 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
737 if (DECL_BIT_FIELD (field
)
738 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
740 unsigned int align
= TYPE_ALIGN (type
);
742 /* In the general case, type alignment is required. */
743 if (value_factor_p (pos
, align
))
745 /* The enclosing record type must be sufficiently aligned.
746 Otherwise, if no alignment was specified for it and it
747 has been laid out already, bump its alignment to the
748 desired one if this is compatible with its size. */
749 if (TYPE_ALIGN (record_type
) >= align
)
751 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
752 DECL_BIT_FIELD (field
) = 0;
756 && value_factor_p (TYPE_SIZE (record_type
), align
))
758 TYPE_ALIGN (record_type
) = align
;
759 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
760 DECL_BIT_FIELD (field
) = 0;
764 /* In the non-strict alignment case, only byte alignment is. */
765 if (!STRICT_ALIGNMENT
766 && DECL_BIT_FIELD (field
)
767 && value_factor_p (pos
, BITS_PER_UNIT
))
768 DECL_BIT_FIELD (field
) = 0;
771 /* If we still have DECL_BIT_FIELD set at this point, we know that the
772 field is technically not addressable. Except that it can actually
773 be addressed if it is BLKmode and happens to be properly aligned. */
774 if (DECL_BIT_FIELD (field
)
775 && !(DECL_MODE (field
) == BLKmode
776 && value_factor_p (pos
, BITS_PER_UNIT
)))
777 DECL_NONADDRESSABLE_P (field
) = 1;
779 /* A type must be as aligned as its most aligned field that is not
780 a bit-field. But this is already enforced by layout_type. */
781 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
782 TYPE_ALIGN (record_type
)
783 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
788 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
789 size
= size_binop (MAX_EXPR
, size
, this_size
);
792 case QUAL_UNION_TYPE
:
794 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
795 this_ada_size
, ada_size
);
796 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
801 /* Since we know here that all fields are sorted in order of
802 increasing bit position, the size of the record is one
803 higher than the ending bit of the last field processed
804 unless we have a rep clause, since in that case we might
805 have a field outside a QUAL_UNION_TYPE that has a higher ending
806 position. So use a MAX in that case. Also, if this field is a
807 QUAL_UNION_TYPE, we need to take into account the previous size in
808 the case of empty variants. */
810 = merge_sizes (ada_size
, pos
, this_ada_size
,
811 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
813 = merge_sizes (size
, pos
, this_size
,
814 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
822 if (code
== QUAL_UNION_TYPE
)
823 nreverse (field_list
);
827 /* If this is a padding record, we never want to make the size smaller
828 than what was specified in it, if any. */
829 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
830 size
= TYPE_SIZE (record_type
);
832 /* Now set any of the values we've just computed that apply. */
833 if (!TYPE_FAT_POINTER_P (record_type
)
834 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
835 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
839 tree size_unit
= had_size_unit
840 ? TYPE_SIZE_UNIT (record_type
)
842 size_binop (CEIL_DIV_EXPR
, size
,
844 unsigned int align
= TYPE_ALIGN (record_type
);
846 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
847 TYPE_SIZE_UNIT (record_type
)
848 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
850 compute_record_mode (record_type
);
855 rest_of_record_type_compilation (record_type
);
858 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
859 associated with it. It need not be invoked directly in most cases since
860 finish_record_type takes care of doing so, but this can be necessary if
861 a parallel type is to be attached to the record type. */
864 rest_of_record_type_compilation (tree record_type
)
866 bool var_size
= false;
869 /* If this is a padded type, the bulk of the debug info has already been
870 generated for the field's type. */
871 if (TYPE_IS_PADDING_P (record_type
))
874 for (field
= TYPE_FIELDS (record_type
); field
; field
= DECL_CHAIN (field
))
876 /* We need to make an XVE/XVU record if any field has variable size,
877 whether or not the record does. For example, if we have a union,
878 it may be that all fields, rounded up to the alignment, have the
879 same size, in which case we'll use that size. But the debug
880 output routines (except Dwarf2) won't be able to output the fields,
881 so we need to make the special record. */
882 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
883 /* If a field has a non-constant qualifier, the record will have
884 variable size too. */
885 || (TREE_CODE (record_type
) == QUAL_UNION_TYPE
886 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
893 /* If this record type is of variable size, make a parallel record type that
894 will tell the debugger how the former is laid out (see exp_dbug.ads). */
898 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
899 ? UNION_TYPE
: TREE_CODE (record_type
));
900 tree orig_name
= TYPE_NAME (record_type
), new_name
;
901 tree last_pos
= bitsize_zero_node
;
902 tree old_field
, prev_old_field
= NULL_TREE
;
904 if (TREE_CODE (orig_name
) == TYPE_DECL
)
905 orig_name
= DECL_NAME (orig_name
);
908 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
910 TYPE_NAME (new_record_type
) = new_name
;
911 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
912 TYPE_STUB_DECL (new_record_type
)
913 = create_type_stub_decl (new_name
, new_record_type
);
914 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
915 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
916 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
917 TYPE_SIZE_UNIT (new_record_type
)
918 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
920 /* Now scan all the fields, replacing each field with a new
921 field corresponding to the new encoding. */
922 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
923 old_field
= DECL_CHAIN (old_field
))
925 tree field_type
= TREE_TYPE (old_field
);
926 tree field_name
= DECL_NAME (old_field
);
928 tree curpos
= bit_position (old_field
);
930 unsigned int align
= 0;
933 /* See how the position was modified from the last position.
935 There are two basic cases we support: a value was added
936 to the last position or the last position was rounded to
937 a boundary and they something was added. Check for the
938 first case first. If not, see if there is any evidence
939 of rounding. If so, round the last position and try
942 If this is a union, the position can be taken as zero. */
944 /* Some computations depend on the shape of the position expression,
945 so strip conversions to make sure it's exposed. */
946 curpos
= remove_conversions (curpos
, true);
948 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
949 pos
= bitsize_zero_node
, align
= 0;
951 pos
= compute_related_constant (curpos
, last_pos
);
953 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
954 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
956 tree offset
= TREE_OPERAND (curpos
, 0);
957 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
959 /* An offset which is a bitwise AND with a negative power of 2
960 means an alignment corresponding to this power of 2. Note
961 that, as sizetype is sign-extended but nonetheless unsigned,
962 we don't directly use tree_int_cst_sgn. */
963 offset
= remove_conversions (offset
, true);
964 if (TREE_CODE (offset
) == BIT_AND_EXPR
965 && host_integerp (TREE_OPERAND (offset
, 1), 0)
966 && TREE_INT_CST_HIGH (TREE_OPERAND (offset
, 1)) < 0)
969 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
970 if (exact_log2 (pow
) > 0)
974 pos
= compute_related_constant (curpos
,
975 round_up (last_pos
, align
));
977 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
978 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
979 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
980 && host_integerp (TREE_OPERAND
981 (TREE_OPERAND (curpos
, 0), 1),
986 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
987 pos
= compute_related_constant (curpos
,
988 round_up (last_pos
, align
));
990 else if (potential_alignment_gap (prev_old_field
, old_field
,
993 align
= TYPE_ALIGN (field_type
);
994 pos
= compute_related_constant (curpos
,
995 round_up (last_pos
, align
));
998 /* If we can't compute a position, set it to zero.
1000 ??? We really should abort here, but it's too much work
1001 to get this correct for all cases. */
1004 pos
= bitsize_zero_node
;
1006 /* See if this type is variable-sized and make a pointer type
1007 and indicate the indirection if so. Beware that the debug
1008 back-end may adjust the position computed above according
1009 to the alignment of the field type, i.e. the pointer type
1010 in this case, if we don't preventively counter that. */
1011 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1013 field_type
= build_pointer_type (field_type
);
1014 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1016 field_type
= copy_node (field_type
);
1017 TYPE_ALIGN (field_type
) = align
;
1022 /* Make a new field name, if necessary. */
1023 if (var
|| align
!= 0)
1028 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1029 align
/ BITS_PER_UNIT
);
1031 strcpy (suffix
, "XVL");
1033 field_name
= concat_name (field_name
, suffix
);
1037 = create_field_decl (field_name
, field_type
, new_record_type
,
1038 DECL_SIZE (old_field
), pos
, 0, 0);
1039 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1040 TYPE_FIELDS (new_record_type
) = new_field
;
1042 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1043 zero. The only time it's not the last field of the record
1044 is when there are other components at fixed positions after
1045 it (meaning there was a rep clause for every field) and we
1046 want to be able to encode them. */
1047 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1048 (TREE_CODE (TREE_TYPE (old_field
))
1051 : DECL_SIZE (old_field
));
1052 prev_old_field
= old_field
;
1055 TYPE_FIELDS (new_record_type
) = nreverse (TYPE_FIELDS (new_record_type
));
1057 add_parallel_type (TYPE_STUB_DECL (record_type
), new_record_type
);
1061 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1064 add_parallel_type (tree decl
, tree parallel_type
)
1068 while (DECL_PARALLEL_TYPE (d
))
1069 d
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d
));
1071 SET_DECL_PARALLEL_TYPE (d
, parallel_type
);
1074 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1075 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1076 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1077 replace a value of zero with the old size. If HAS_REP is true, we take the
1078 MAX of the end position of this field with LAST_SIZE. In all other cases,
1079 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1082 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1085 tree type
= TREE_TYPE (last_size
);
1088 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1090 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1092 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1096 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1097 integer_zerop (TREE_OPERAND (size
, 1))
1098 ? last_size
: merge_sizes (last_size
, first_bit
,
1099 TREE_OPERAND (size
, 1),
1101 integer_zerop (TREE_OPERAND (size
, 2))
1102 ? last_size
: merge_sizes (last_size
, first_bit
,
1103 TREE_OPERAND (size
, 2),
1106 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1107 when fed through substitute_in_expr) into thinking that a constant
1108 size is not constant. */
1109 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1110 new_size
= TREE_OPERAND (new_size
, 0);
1115 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1116 related by the addition of a constant. Return that constant if so. */
1119 compute_related_constant (tree op0
, tree op1
)
1121 tree op0_var
, op1_var
;
1122 tree op0_con
= split_plus (op0
, &op0_var
);
1123 tree op1_con
= split_plus (op1
, &op1_var
);
1124 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1126 if (operand_equal_p (op0_var
, op1_var
, 0))
1128 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1134 /* Utility function of above to split a tree OP which may be a sum, into a
1135 constant part, which is returned, and a variable part, which is stored
1136 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1140 split_plus (tree in
, tree
*pvar
)
1142 /* Strip conversions in order to ease the tree traversal and maximize the
1143 potential for constant or plus/minus discovery. We need to be careful
1144 to always return and set *pvar to bitsizetype trees, but it's worth
1146 in
= remove_conversions (in
, false);
1148 *pvar
= convert (bitsizetype
, in
);
1150 if (TREE_CODE (in
) == INTEGER_CST
)
1152 *pvar
= bitsize_zero_node
;
1153 return convert (bitsizetype
, in
);
1155 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1157 tree lhs_var
, rhs_var
;
1158 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1159 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1161 if (lhs_var
== TREE_OPERAND (in
, 0)
1162 && rhs_var
== TREE_OPERAND (in
, 1))
1163 return bitsize_zero_node
;
1165 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1166 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1169 return bitsize_zero_node
;
1172 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1173 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1174 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1175 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1176 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1177 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1178 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1179 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1180 invisible reference. */
1183 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1184 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1185 bool return_by_invisi_ref_p
)
1187 /* A list of the data type nodes of the subprogram formal parameters.
1188 This list is generated by traversing the input list of PARM_DECL
1190 VEC(tree
,gc
) *param_type_list
= NULL
;
1193 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1194 VEC_safe_push (tree
, gc
, param_type_list
, TREE_TYPE (t
));
1196 type
= build_function_type_vec (return_type
, param_type_list
);
1198 /* TYPE may have been shared since GCC hashes types. If it has a different
1199 CICO_LIST, make a copy. Likewise for the various flags. */
1200 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1201 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1203 type
= copy_type (type
);
1204 TYPE_CI_CO_LIST (type
) = cico_list
;
1205 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1206 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1207 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1213 /* Return a copy of TYPE but safe to modify in any way. */
1216 copy_type (tree type
)
1218 tree new_type
= copy_node (type
);
1220 /* Unshare the language-specific data. */
1221 if (TYPE_LANG_SPECIFIC (type
))
1223 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1224 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1227 /* And the contents of the language-specific slot if needed. */
1228 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1229 && TYPE_RM_VALUES (type
))
1231 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1232 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1233 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
1234 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
1237 /* copy_node clears this field instead of copying it, because it is
1238 aliased with TREE_CHAIN. */
1239 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
1241 TYPE_POINTER_TO (new_type
) = 0;
1242 TYPE_REFERENCE_TO (new_type
) = 0;
1243 TYPE_MAIN_VARIANT (new_type
) = new_type
;
1244 TYPE_NEXT_VARIANT (new_type
) = 0;
1249 /* Return a subtype of sizetype with range MIN to MAX and whose
1250 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1251 of the associated TYPE_DECL. */
1254 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
1256 /* First build a type for the desired range. */
1257 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
1259 /* Then set the index type. */
1260 SET_TYPE_INDEX_TYPE (type
, index
);
1261 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
1266 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1267 sizetype is used. */
1270 create_range_type (tree type
, tree min
, tree max
)
1274 if (type
== NULL_TREE
)
1277 /* First build a type with the base range. */
1278 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
1279 TYPE_MAX_VALUE (type
));
1281 /* Then set the actual range. */
1282 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
1283 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
1288 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1289 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1293 create_type_stub_decl (tree type_name
, tree type
)
1295 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1296 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1297 emitted in DWARF. */
1298 tree type_decl
= build_decl (input_location
,
1299 TYPE_DECL
, type_name
, type
);
1300 DECL_ARTIFICIAL (type_decl
) = 1;
1301 TYPE_ARTIFICIAL (type
) = 1;
1305 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1306 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1307 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1308 true if we need to write debug information about this type. GNAT_NODE
1309 is used for the position of the decl. */
1312 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
1313 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
1315 enum tree_code code
= TREE_CODE (type
);
1316 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
1319 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1320 gcc_assert (!TYPE_IS_DUMMY_P (type
));
1322 /* If the type hasn't been named yet, we're naming it; preserve an existing
1323 TYPE_STUB_DECL that has been attached to it for some purpose. */
1324 if (!named
&& TYPE_STUB_DECL (type
))
1326 type_decl
= TYPE_STUB_DECL (type
);
1327 DECL_NAME (type_decl
) = type_name
;
1330 type_decl
= build_decl (input_location
,
1331 TYPE_DECL
, type_name
, type
);
1333 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1334 TYPE_ARTIFICIAL (type
) = artificial_p
;
1336 /* Add this decl to the current binding level. */
1337 gnat_pushdecl (type_decl
, gnat_node
);
1339 process_attributes (type_decl
, attr_list
);
1341 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1342 This causes the name to be also viewed as a "tag" by the debug
1343 back-end, with the advantage that no DW_TAG_typedef is emitted
1344 for artificial "tagged" types in DWARF. */
1346 TYPE_STUB_DECL (type
) = type_decl
;
1348 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
1349 back-end doesn't support, and for others if we don't need to. */
1350 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
1351 DECL_IGNORED_P (type_decl
) = 1;
1356 /* Return a VAR_DECL or CONST_DECL node.
1358 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1359 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1360 the GCC tree for an optional initial expression; NULL_TREE if none.
1362 CONST_FLAG is true if this variable is constant, in which case we might
1363 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1365 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1366 definition to be made visible outside of the current compilation unit, for
1367 instance variable definitions in a package specification.
1369 EXTERN_FLAG is true when processing an external variable declaration (as
1370 opposed to a definition: no storage is to be allocated for the variable).
1372 STATIC_FLAG is only relevant when not at top level. In that case
1373 it indicates whether to always allocate storage to the variable.
1375 GNAT_NODE is used for the position of the decl. */
1378 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1379 bool const_flag
, bool public_flag
, bool extern_flag
,
1380 bool static_flag
, bool const_decl_allowed_p
,
1381 struct attrib
*attr_list
, Node_Id gnat_node
)
1383 /* Whether the initializer is a constant initializer. At the global level
1384 or for an external object or an object to be allocated in static memory,
1385 we check that it is a valid constant expression for use in initializing
1386 a static variable; otherwise, we only check that it is constant. */
1389 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
1390 && (global_bindings_p () || extern_flag
|| static_flag
1391 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
1392 : TREE_CONSTANT (var_init
)));
1394 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1395 case the initializer may be used in-lieu of the DECL node (as done in
1396 Identifier_to_gnu). This is useful to prevent the need of elaboration
1397 code when an identifier for which such a decl is made is in turn used as
1398 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1399 but extra constraints apply to this choice (see below) and are not
1400 relevant to the distinction we wish to make. */
1401 bool constant_p
= const_flag
&& init_const
;
1403 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1404 and may be used for scalars in general but not for aggregates. */
1406 = build_decl (input_location
,
1407 (constant_p
&& const_decl_allowed_p
1408 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
1411 /* If this is external, throw away any initializations (they will be done
1412 elsewhere) unless this is a constant for which we would like to remain
1413 able to get the initializer. If we are defining a global here, leave a
1414 constant initialization and save any variable elaborations for the
1415 elaboration routine. If we are just annotating types, throw away the
1416 initialization if it isn't a constant. */
1417 if ((extern_flag
&& !constant_p
)
1418 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
1419 var_init
= NULL_TREE
;
1421 /* At the global level, an initializer requiring code to be generated
1422 produces elaboration statements. Check that such statements are allowed,
1423 that is, not violating a No_Elaboration_Code restriction. */
1424 if (global_bindings_p () && var_init
!= 0 && !init_const
)
1425 Check_Elaboration_Code_Allowed (gnat_node
);
1427 DECL_INITIAL (var_decl
) = var_init
;
1428 TREE_READONLY (var_decl
) = const_flag
;
1429 DECL_EXTERNAL (var_decl
) = extern_flag
;
1430 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1431 TREE_CONSTANT (var_decl
) = constant_p
;
1432 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1433 = TYPE_VOLATILE (type
);
1435 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1436 try to fiddle with DECL_COMMON. However, on platforms that don't
1437 support global BSS sections, uninitialized global variables would
1438 go in DATA instead, thus increasing the size of the executable. */
1440 && TREE_CODE (var_decl
) == VAR_DECL
1441 && TREE_PUBLIC (var_decl
)
1442 && !have_global_bss_p ())
1443 DECL_COMMON (var_decl
) = 1;
1445 /* At the global binding level, we need to allocate static storage for the
1446 variable if it isn't external. Otherwise, we allocate automatic storage
1447 unless requested not to. */
1448 TREE_STATIC (var_decl
)
1449 = !extern_flag
&& (static_flag
|| global_bindings_p ());
1451 /* For an external constant whose initializer is not absolute, do not emit
1452 debug info. In DWARF this would mean a global relocation in a read-only
1453 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1457 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
1458 != null_pointer_node
)
1459 DECL_IGNORED_P (var_decl
) = 1;
1461 /* Add this decl to the current binding level. */
1462 gnat_pushdecl (var_decl
, gnat_node
);
1464 if (TREE_SIDE_EFFECTS (var_decl
))
1465 TREE_ADDRESSABLE (var_decl
) = 1;
1467 if (TREE_CODE (var_decl
) == VAR_DECL
)
1470 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1471 process_attributes (var_decl
, attr_list
);
1472 if (global_bindings_p ())
1473 rest_of_decl_compilation (var_decl
, true, 0);
1476 expand_decl (var_decl
);
1481 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1484 aggregate_type_contains_array_p (tree type
)
1486 switch (TREE_CODE (type
))
1490 case QUAL_UNION_TYPE
:
1493 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1494 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
1495 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
1508 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1509 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1510 nonzero, it is the specified size of the field. If POS is nonzero, it is
1511 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1512 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1513 means we are allowed to take the address of the field; if it is negative,
1514 we should not make a bitfield, which is used by make_aligning_type. */
1517 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
1518 tree size
, tree pos
, int packed
, int addressable
)
1520 tree field_decl
= build_decl (input_location
,
1521 FIELD_DECL
, field_name
, field_type
);
1523 DECL_CONTEXT (field_decl
) = record_type
;
1524 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1526 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1527 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1528 Likewise for an aggregate without specified position that contains an
1529 array, because in this case slices of variable length of this array
1530 must be handled by GCC and variable-sized objects need to be aligned
1531 to at least a byte boundary. */
1532 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
1534 && AGGREGATE_TYPE_P (field_type
)
1535 && aggregate_type_contains_array_p (field_type
))))
1536 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1538 /* If a size is specified, use it. Otherwise, if the record type is packed
1539 compute a size to use, which may differ from the object's natural size.
1540 We always set a size in this case to trigger the checks for bitfield
1541 creation below, which is typically required when no position has been
1544 size
= convert (bitsizetype
, size
);
1545 else if (packed
== 1)
1547 size
= rm_size (field_type
);
1548 if (TYPE_MODE (field_type
) == BLKmode
)
1549 size
= round_up (size
, BITS_PER_UNIT
);
1552 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1553 specified for two reasons: first if the size differs from the natural
1554 size. Second, if the alignment is insufficient. There are a number of
1555 ways the latter can be true.
1557 We never make a bitfield if the type of the field has a nonconstant size,
1558 because no such entity requiring bitfield operations should reach here.
1560 We do *preventively* make a bitfield when there might be the need for it
1561 but we don't have all the necessary information to decide, as is the case
1562 of a field with no specified position in a packed record.
1564 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1565 in layout_decl or finish_record_type to clear the bit_field indication if
1566 it is in fact not needed. */
1567 if (addressable
>= 0
1569 && TREE_CODE (size
) == INTEGER_CST
1570 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1571 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
1572 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
1574 || (TYPE_ALIGN (record_type
) != 0
1575 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1577 DECL_BIT_FIELD (field_decl
) = 1;
1578 DECL_SIZE (field_decl
) = size
;
1579 if (!packed
&& !pos
)
1581 if (TYPE_ALIGN (record_type
) != 0
1582 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
1583 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
1585 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1589 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
1591 /* Bump the alignment if need be, either for bitfield/packing purposes or
1592 to satisfy the type requirements if no such consideration applies. When
1593 we get the alignment from the type, indicate if this is from an explicit
1594 user request, which prevents stor-layout from lowering it later on. */
1596 unsigned int bit_align
1597 = (DECL_BIT_FIELD (field_decl
) ? 1
1598 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
1600 if (bit_align
> DECL_ALIGN (field_decl
))
1601 DECL_ALIGN (field_decl
) = bit_align
;
1602 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
1604 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1605 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
1611 /* We need to pass in the alignment the DECL is known to have.
1612 This is the lowest-order bit set in POS, but no more than
1613 the alignment of the record, if one is specified. Note
1614 that an alignment of 0 is taken as infinite. */
1615 unsigned int known_align
;
1617 if (host_integerp (pos
, 1))
1618 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1620 known_align
= BITS_PER_UNIT
;
1622 if (TYPE_ALIGN (record_type
)
1623 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1624 known_align
= TYPE_ALIGN (record_type
);
1626 layout_decl (field_decl
, known_align
);
1627 SET_DECL_OFFSET_ALIGN (field_decl
,
1628 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1630 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1631 &DECL_FIELD_BIT_OFFSET (field_decl
),
1632 DECL_OFFSET_ALIGN (field_decl
), pos
);
1635 /* In addition to what our caller says, claim the field is addressable if we
1636 know that its type is not suitable.
1638 The field may also be "technically" nonaddressable, meaning that even if
1639 we attempt to take the field's address we will actually get the address
1640 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1641 value we have at this point is not accurate enough, so we don't account
1642 for this here and let finish_record_type decide. */
1643 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
1646 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
1651 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1652 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1653 (either an In parameter or an address of a pass-by-ref parameter). */
1656 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
1658 tree param_decl
= build_decl (input_location
,
1659 PARM_DECL
, param_name
, param_type
);
1661 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1662 can lead to various ABI violations. */
1663 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
1664 && INTEGRAL_TYPE_P (param_type
)
1665 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1667 /* We have to be careful about biased types here. Make a subtype
1668 of integer_type_node with the proper biasing. */
1669 if (TREE_CODE (param_type
) == INTEGER_TYPE
1670 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1673 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
1674 TREE_TYPE (subtype
) = integer_type_node
;
1675 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
1676 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
1677 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
1678 param_type
= subtype
;
1681 param_type
= integer_type_node
;
1684 DECL_ARG_TYPE (param_decl
) = param_type
;
1685 TREE_READONLY (param_decl
) = readonly
;
1689 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1692 process_attributes (tree decl
, struct attrib
*attr_list
)
1694 for (; attr_list
; attr_list
= attr_list
->next
)
1695 switch (attr_list
->type
)
1697 case ATTR_MACHINE_ATTRIBUTE
:
1698 input_location
= DECL_SOURCE_LOCATION (decl
);
1699 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
1701 ATTR_FLAG_TYPE_IN_PLACE
);
1704 case ATTR_LINK_ALIAS
:
1705 if (! DECL_EXTERNAL (decl
))
1707 TREE_STATIC (decl
) = 1;
1708 assemble_alias (decl
, attr_list
->name
);
1712 case ATTR_WEAK_EXTERNAL
:
1714 declare_weak (decl
);
1716 post_error ("?weak declarations not supported on this target",
1717 attr_list
->error_point
);
1720 case ATTR_LINK_SECTION
:
1721 if (targetm_common
.have_named_sections
)
1723 DECL_SECTION_NAME (decl
)
1724 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1725 IDENTIFIER_POINTER (attr_list
->name
));
1726 DECL_COMMON (decl
) = 0;
1729 post_error ("?section attributes are not supported for this target",
1730 attr_list
->error_point
);
1733 case ATTR_LINK_CONSTRUCTOR
:
1734 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1735 TREE_USED (decl
) = 1;
1738 case ATTR_LINK_DESTRUCTOR
:
1739 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1740 TREE_USED (decl
) = 1;
1743 case ATTR_THREAD_LOCAL_STORAGE
:
1744 DECL_TLS_MODEL (decl
) = decl_default_tls_model (decl
);
1745 DECL_COMMON (decl
) = 0;
1750 /* Record DECL as a global renaming pointer. */
1753 record_global_renaming_pointer (tree decl
)
1755 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
1756 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
1759 /* Invalidate the global renaming pointers. */
1762 invalidate_global_renaming_pointers (void)
1767 FOR_EACH_VEC_ELT (tree
, global_renaming_pointers
, i
, iter
)
1768 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
1770 VEC_free (tree
, gc
, global_renaming_pointers
);
1773 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1777 value_factor_p (tree value
, HOST_WIDE_INT factor
)
1779 if (host_integerp (value
, 1))
1780 return tree_low_cst (value
, 1) % factor
== 0;
1782 if (TREE_CODE (value
) == MULT_EXPR
)
1783 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1784 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1789 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
1790 unless we can prove these 2 fields are laid out in such a way that no gap
1791 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1792 is the distance in bits between the end of PREV_FIELD and the starting
1793 position of CURR_FIELD. It is ignored if null. */
1796 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1798 /* If this is the first field of the record, there cannot be any gap */
1802 /* If the previous field is a union type, then return False: The only
1803 time when such a field is not the last field of the record is when
1804 there are other components at fixed positions after it (meaning there
1805 was a rep clause for every field), in which case we don't want the
1806 alignment constraint to override them. */
1807 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1810 /* If the distance between the end of prev_field and the beginning of
1811 curr_field is constant, then there is a gap if the value of this
1812 constant is not null. */
1813 if (offset
&& host_integerp (offset
, 1))
1814 return !integer_zerop (offset
);
1816 /* If the size and position of the previous field are constant,
1817 then check the sum of this size and position. There will be a gap
1818 iff it is not multiple of the current field alignment. */
1819 if (host_integerp (DECL_SIZE (prev_field
), 1)
1820 && host_integerp (bit_position (prev_field
), 1))
1821 return ((tree_low_cst (bit_position (prev_field
), 1)
1822 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1823 % DECL_ALIGN (curr_field
) != 0);
1825 /* If both the position and size of the previous field are multiples
1826 of the current field alignment, there cannot be any gap. */
1827 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1828 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1831 /* Fallback, return that there may be a potential gap */
1835 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
1839 create_label_decl (tree label_name
, Node_Id gnat_node
)
1842 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
1844 DECL_MODE (label_decl
) = VOIDmode
;
1846 /* Add this decl to the current binding level. */
1847 gnat_pushdecl (label_decl
, gnat_node
);
1852 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1853 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1854 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1855 PARM_DECL nodes chained through the DECL_CHAIN field).
1857 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
1858 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
1859 used for the position of the decl. */
1862 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
1863 tree param_decl_list
, bool inline_flag
, bool public_flag
,
1864 bool extern_flag
, bool artificial_flag
,
1865 struct attrib
*attr_list
, Node_Id gnat_node
)
1867 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
1869 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
1870 TREE_TYPE (subprog_type
));
1871 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1873 /* If this is a non-inline function nested inside an inlined external
1874 function, we cannot honor both requests without cloning the nested
1875 function in the current unit since it is private to the other unit.
1876 We could inline the nested function as well but it's probably better
1877 to err on the side of too little inlining. */
1880 && current_function_decl
1881 && DECL_DECLARED_INLINE_P (current_function_decl
)
1882 && DECL_EXTERNAL (current_function_decl
))
1883 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
1885 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
1886 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1887 DECL_DECLARED_INLINE_P (subprog_decl
) = inline_flag
;
1888 DECL_NO_INLINE_WARNING_P (subprog_decl
) = inline_flag
&& artificial_flag
;
1890 TREE_PUBLIC (subprog_decl
) = public_flag
;
1891 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1892 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1893 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1895 DECL_ARTIFICIAL (result_decl
) = 1;
1896 DECL_IGNORED_P (result_decl
) = 1;
1897 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
1898 DECL_RESULT (subprog_decl
) = result_decl
;
1902 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1904 /* The expand_main_function circuitry expects "main_identifier_node" to
1905 designate the DECL_NAME of the 'main' entry point, in turn expected
1906 to be declared as the "main" function literally by default. Ada
1907 program entry points are typically declared with a different name
1908 within the binder generated file, exported as 'main' to satisfy the
1909 system expectations. Force main_identifier_node in this case. */
1910 if (asm_name
== main_identifier_node
)
1911 DECL_NAME (subprog_decl
) = main_identifier_node
;
1914 /* Add this decl to the current binding level. */
1915 gnat_pushdecl (subprog_decl
, gnat_node
);
1917 process_attributes (subprog_decl
, attr_list
);
1919 /* Output the assembler code and/or RTL for the declaration. */
1920 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
1922 return subprog_decl
;
1925 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1926 body. This routine needs to be invoked before processing the declarations
1927 appearing in the subprogram. */
1930 begin_subprog_body (tree subprog_decl
)
1934 announce_function (subprog_decl
);
1936 /* This function is being defined. */
1937 TREE_STATIC (subprog_decl
) = 1;
1939 current_function_decl
= subprog_decl
;
1941 /* Enter a new binding level and show that all the parameters belong to
1945 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1946 param_decl
= DECL_CHAIN (param_decl
))
1947 DECL_CONTEXT (param_decl
) = subprog_decl
;
1949 make_decl_rtl (subprog_decl
);
1952 /* Finish translating the current subprogram and set its BODY. */
1955 end_subprog_body (tree body
)
1957 tree fndecl
= current_function_decl
;
1959 /* Attach the BLOCK for this level to the function and pop the level. */
1960 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
1961 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
1964 /* Mark the RESULT_DECL as being in this subprogram. */
1965 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
1967 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
1968 if (TREE_CODE (body
) == BIND_EXPR
)
1970 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
1971 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
1974 DECL_SAVED_TREE (fndecl
) = body
;
1976 current_function_decl
= decl_function_context (fndecl
);
1979 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
1982 rest_of_subprog_body_compilation (tree subprog_decl
)
1984 /* We cannot track the location of errors past this point. */
1985 error_gnat_node
= Empty
;
1987 /* If we're only annotating types, don't actually compile this function. */
1988 if (type_annotate_only
)
1991 /* Dump functions before gimplification. */
1992 dump_function (TDI_original
, subprog_decl
);
1994 /* ??? This special handling of nested functions is probably obsolete. */
1995 if (!decl_function_context (subprog_decl
))
1996 cgraph_finalize_function (subprog_decl
, false);
1998 /* Register this function with cgraph just far enough to get it
1999 added to our parent's nested function list. */
2000 (void) cgraph_get_create_node (subprog_decl
);
2004 gnat_builtin_function (tree decl
)
2006 gnat_pushdecl (decl
, Empty
);
2010 /* Return an integer type with the number of bits of precision given by
2011 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2012 it is a signed type. */
2015 gnat_type_for_size (unsigned precision
, int unsignedp
)
2020 if (precision
<= 2 * MAX_BITS_PER_WORD
2021 && signed_and_unsigned_types
[precision
][unsignedp
])
2022 return signed_and_unsigned_types
[precision
][unsignedp
];
2025 t
= make_unsigned_type (precision
);
2027 t
= make_signed_type (precision
);
2029 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2030 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2034 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2035 TYPE_NAME (t
) = get_identifier (type_name
);
2041 /* Likewise for floating-point types. */
2044 float_type_for_precision (int precision
, enum machine_mode mode
)
2049 if (float_types
[(int) mode
])
2050 return float_types
[(int) mode
];
2052 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2053 TYPE_PRECISION (t
) = precision
;
2056 gcc_assert (TYPE_MODE (t
) == mode
);
2059 sprintf (type_name
, "FLOAT_%d", precision
);
2060 TYPE_NAME (t
) = get_identifier (type_name
);
2066 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2067 an unsigned type; otherwise a signed type is returned. */
2070 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2072 if (mode
== BLKmode
)
2075 if (mode
== VOIDmode
)
2076 return void_type_node
;
2078 if (COMPLEX_MODE_P (mode
))
2081 if (SCALAR_FLOAT_MODE_P (mode
))
2082 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2084 if (SCALAR_INT_MODE_P (mode
))
2085 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2087 if (VECTOR_MODE_P (mode
))
2089 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2090 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2092 return build_vector_type_for_mode (inner_type
, mode
);
2098 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2101 gnat_unsigned_type (tree type_node
)
2103 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2105 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2107 type
= copy_node (type
);
2108 TREE_TYPE (type
) = type_node
;
2110 else if (TREE_TYPE (type_node
)
2111 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2112 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2114 type
= copy_node (type
);
2115 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2121 /* Return the signed version of a TYPE_NODE, a scalar type. */
2124 gnat_signed_type (tree type_node
)
2126 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2128 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2130 type
= copy_node (type
);
2131 TREE_TYPE (type
) = type_node
;
2133 else if (TREE_TYPE (type_node
)
2134 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2135 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2137 type
= copy_node (type
);
2138 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2144 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2145 transparently converted to each other. */
2148 gnat_types_compatible_p (tree t1
, tree t2
)
2150 enum tree_code code
;
2152 /* This is the default criterion. */
2153 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2156 /* We only check structural equivalence here. */
2157 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2160 /* Vector types are also compatible if they have the same number of subparts
2161 and the same form of (scalar) element type. */
2162 if (code
== VECTOR_TYPE
2163 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2164 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2165 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2168 /* Array types are also compatible if they are constrained and have the same
2169 domain(s) and the same component type. */
2170 if (code
== ARRAY_TYPE
2171 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2172 || (TYPE_DOMAIN (t1
)
2174 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2175 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2176 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2177 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2178 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2179 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2180 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2183 /* Padding record types are also compatible if they pad the same
2184 type and have the same constant size. */
2185 if (code
== RECORD_TYPE
2186 && TYPE_PADDING_P (t1
) && TYPE_PADDING_P (t2
)
2187 && TREE_TYPE (TYPE_FIELDS (t1
)) == TREE_TYPE (TYPE_FIELDS (t2
))
2188 && tree_int_cst_equal (TYPE_SIZE (t1
), TYPE_SIZE (t2
)))
2194 /* Return true if EXPR is a useless type conversion. */
2197 gnat_useless_type_conversion (tree expr
)
2199 if (CONVERT_EXPR_P (expr
)
2200 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2201 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
2202 return gnat_types_compatible_p (TREE_TYPE (expr
),
2203 TREE_TYPE (TREE_OPERAND (expr
, 0)));
2208 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2211 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
2212 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
2214 return TYPE_CI_CO_LIST (t
) == cico_list
2215 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
2216 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
2217 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
2220 /* EXP is an expression for the size of an object. If this size contains
2221 discriminant references, replace them with the maximum (if MAX_P) or
2222 minimum (if !MAX_P) possible value of the discriminant. */
2225 max_size (tree exp
, bool max_p
)
2227 enum tree_code code
= TREE_CODE (exp
);
2228 tree type
= TREE_TYPE (exp
);
2230 switch (TREE_CODE_CLASS (code
))
2232 case tcc_declaration
:
2237 if (code
== CALL_EXPR
)
2242 t
= maybe_inline_call_in_expr (exp
);
2244 return max_size (t
, max_p
);
2246 n
= call_expr_nargs (exp
);
2248 argarray
= XALLOCAVEC (tree
, n
);
2249 for (i
= 0; i
< n
; i
++)
2250 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
2251 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
2256 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2257 modify. Otherwise, we treat it like a variable. */
2258 if (!CONTAINS_PLACEHOLDER_P (exp
))
2261 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2263 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
2265 case tcc_comparison
:
2266 return max_p
? size_one_node
: size_zero_node
;
2270 case tcc_expression
:
2271 switch (TREE_CODE_LENGTH (code
))
2274 if (code
== SAVE_EXPR
)
2276 else if (code
== NON_LVALUE_EXPR
)
2277 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2280 fold_build1 (code
, type
,
2281 max_size (TREE_OPERAND (exp
, 0),
2282 code
== NEGATE_EXPR
? !max_p
: max_p
));
2285 if (code
== COMPOUND_EXPR
)
2286 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2289 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2290 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2291 code
== MINUS_EXPR
? !max_p
: max_p
);
2293 /* Special-case wanting the maximum value of a MIN_EXPR.
2294 In that case, if one side overflows, return the other.
2295 sizetype is signed, but we know sizes are non-negative.
2296 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2297 overflowing and the RHS a variable. */
2300 && TREE_CODE (rhs
) == INTEGER_CST
2301 && TREE_OVERFLOW (rhs
))
2305 && TREE_CODE (lhs
) == INTEGER_CST
2306 && TREE_OVERFLOW (lhs
))
2308 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2309 && TREE_CODE (lhs
) == INTEGER_CST
2310 && TREE_OVERFLOW (lhs
)
2311 && !TREE_CONSTANT (rhs
))
2314 return fold_build2 (code
, type
, lhs
, rhs
);
2318 if (code
== COND_EXPR
)
2319 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2320 max_size (TREE_OPERAND (exp
, 1), max_p
),
2321 max_size (TREE_OPERAND (exp
, 2), max_p
));
2324 /* Other tree classes cannot happen. */
2332 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2333 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2334 Return a constructor for the template. */
2337 build_template (tree template_type
, tree array_type
, tree expr
)
2339 VEC(constructor_elt
,gc
) *template_elts
= NULL
;
2340 tree bound_list
= NULL_TREE
;
2343 while (TREE_CODE (array_type
) == RECORD_TYPE
2344 && (TYPE_PADDING_P (array_type
)
2345 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
2346 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2348 if (TREE_CODE (array_type
) == ARRAY_TYPE
2349 || (TREE_CODE (array_type
) == INTEGER_TYPE
2350 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2351 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2353 /* First make the list for a CONSTRUCTOR for the template. Go down the
2354 field list of the template instead of the type chain because this
2355 array might be an Ada array of arrays and we can't tell where the
2356 nested arrays stop being the underlying object. */
2358 for (field
= TYPE_FIELDS (template_type
); field
;
2360 ? (bound_list
= TREE_CHAIN (bound_list
))
2361 : (array_type
= TREE_TYPE (array_type
))),
2362 field
= DECL_CHAIN (DECL_CHAIN (field
)))
2364 tree bounds
, min
, max
;
2366 /* If we have a bound list, get the bounds from there. Likewise
2367 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2368 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2369 This will give us a maximum range. */
2371 bounds
= TREE_VALUE (bound_list
);
2372 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2373 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2374 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
2375 && DECL_BY_COMPONENT_PTR_P (expr
))
2376 bounds
= TREE_TYPE (field
);
2380 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
2381 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
2383 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2384 substitute it from OBJECT. */
2385 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2386 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2388 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
2389 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
2392 return gnat_build_constructor (template_type
, template_elts
);
2395 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2396 being built; the new decl is chained on to the front of the list. */
2399 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
2400 tree initial
, tree field_list
)
2403 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
2406 DECL_INITIAL (field
) = initial
;
2407 DECL_CHAIN (field
) = field_list
;
2411 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2412 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2413 type contains in its DECL_INITIAL the expression to use when a constructor
2414 is made for the type. GNAT_ENTITY is an entity used to print out an error
2415 message if the mechanism cannot be applied to an object of that type and
2416 also for the name. */
2419 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2421 tree record_type
= make_node (RECORD_TYPE
);
2422 tree pointer32_type
, pointer64_type
;
2423 tree field_list
= NULL_TREE
;
2424 int klass
, ndim
, i
, dtype
= 0;
2425 tree inner_type
, tem
;
2428 /* If TYPE is an unconstrained array, use the underlying array type. */
2429 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2430 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2432 /* If this is an array, compute the number of dimensions in the array,
2433 get the index types, and point to the inner type. */
2434 if (TREE_CODE (type
) != ARRAY_TYPE
)
2437 for (ndim
= 1, inner_type
= type
;
2438 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2439 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2440 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2443 idx_arr
= XALLOCAVEC (tree
, ndim
);
2445 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
2446 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2447 for (i
= ndim
- 1, inner_type
= type
;
2449 i
--, inner_type
= TREE_TYPE (inner_type
))
2450 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2452 for (i
= 0, inner_type
= type
;
2454 i
++, inner_type
= TREE_TYPE (inner_type
))
2455 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2457 /* Now get the DTYPE value. */
2458 switch (TREE_CODE (type
))
2463 if (TYPE_VAX_FLOATING_POINT_P (type
))
2464 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2477 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2480 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2483 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2486 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2489 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2492 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2498 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2502 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2503 && TYPE_VAX_FLOATING_POINT_P (type
))
2504 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2516 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2527 /* Get the CLASS value. */
2530 case By_Descriptor_A
:
2531 case By_Short_Descriptor_A
:
2534 case By_Descriptor_NCA
:
2535 case By_Short_Descriptor_NCA
:
2538 case By_Descriptor_SB
:
2539 case By_Short_Descriptor_SB
:
2543 case By_Short_Descriptor
:
2544 case By_Descriptor_S
:
2545 case By_Short_Descriptor_S
:
2551 /* Make the type for a descriptor for VMS. The first four fields are the
2552 same for all types. */
2554 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2555 size_in_bytes ((mech
== By_Descriptor_A
2556 || mech
== By_Short_Descriptor_A
)
2557 ? inner_type
: type
),
2560 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
2561 size_int (dtype
), field_list
);
2563 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
2564 size_int (klass
), field_list
);
2566 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
2567 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
2569 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2570 that we cannot build a template call to the CE routine as it would get a
2571 wrong source location; instead we use a second placeholder for it. */
2572 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
2573 build0 (PLACEHOLDER_EXPR
, type
));
2574 tem
= build3 (COND_EXPR
, pointer32_type
,
2576 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
2577 build_int_cstu (pointer64_type
, 0x80000000))
2578 : boolean_false_node
,
2579 build0 (PLACEHOLDER_EXPR
, void_type_node
),
2580 convert (pointer32_type
, tem
));
2583 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
2589 case By_Short_Descriptor
:
2590 case By_Descriptor_S
:
2591 case By_Short_Descriptor_S
:
2594 case By_Descriptor_SB
:
2595 case By_Short_Descriptor_SB
:
2597 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2599 (TREE_CODE (type
) == ARRAY_TYPE
2600 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
2604 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2606 (TREE_CODE (type
) == ARRAY_TYPE
2607 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
2612 case By_Descriptor_A
:
2613 case By_Short_Descriptor_A
:
2614 case By_Descriptor_NCA
:
2615 case By_Short_Descriptor_NCA
:
2617 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2618 record_type
, size_zero_node
, field_list
);
2621 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2622 record_type
, size_zero_node
, field_list
);
2625 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2627 size_int ((mech
== By_Descriptor_NCA
2628 || mech
== By_Short_Descriptor_NCA
)
2630 /* Set FL_COLUMN, FL_COEFF, and
2632 : (TREE_CODE (type
) == ARRAY_TYPE
2633 && TYPE_CONVENTION_FORTRAN_P
2639 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2640 record_type
, size_int (ndim
), field_list
);
2643 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2644 record_type
, size_in_bytes (type
),
2647 /* Now build a pointer to the 0,0,0... element. */
2648 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2649 for (i
= 0, inner_type
= type
; i
< ndim
;
2650 i
++, inner_type
= TREE_TYPE (inner_type
))
2651 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2652 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2653 NULL_TREE
, NULL_TREE
);
2656 = make_descriptor_field ("A0", pointer32_type
, record_type
,
2657 build1 (ADDR_EXPR
, pointer32_type
, tem
),
2660 /* Next come the addressing coefficients. */
2661 tem
= size_one_node
;
2662 for (i
= 0; i
< ndim
; i
++)
2666 = size_binop (MULT_EXPR
, tem
,
2667 size_binop (PLUS_EXPR
,
2668 size_binop (MINUS_EXPR
,
2669 TYPE_MAX_VALUE (idx_arr
[i
]),
2670 TYPE_MIN_VALUE (idx_arr
[i
])),
2673 fname
[0] = ((mech
== By_Descriptor_NCA
||
2674 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
2675 fname
[1] = '0' + i
, fname
[2] = 0;
2677 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2678 record_type
, idx_length
, field_list
);
2680 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
2684 /* Finally here are the bounds. */
2685 for (i
= 0; i
< ndim
; i
++)
2689 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2691 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2692 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
2697 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2698 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
2704 post_error ("unsupported descriptor type for &", gnat_entity
);
2707 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
2708 finish_record_type (record_type
, nreverse (field_list
), 0, false);
2712 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2713 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2714 type contains in its DECL_INITIAL the expression to use when a constructor
2715 is made for the type. GNAT_ENTITY is an entity used to print out an error
2716 message if the mechanism cannot be applied to an object of that type and
2717 also for the name. */
2720 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2722 tree record_type
= make_node (RECORD_TYPE
);
2723 tree pointer64_type
;
2724 tree field_list
= NULL_TREE
;
2725 int klass
, ndim
, i
, dtype
= 0;
2726 tree inner_type
, tem
;
2729 /* If TYPE is an unconstrained array, use the underlying array type. */
2730 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2731 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2733 /* If this is an array, compute the number of dimensions in the array,
2734 get the index types, and point to the inner type. */
2735 if (TREE_CODE (type
) != ARRAY_TYPE
)
2738 for (ndim
= 1, inner_type
= type
;
2739 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2740 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2741 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2744 idx_arr
= XALLOCAVEC (tree
, ndim
);
2746 if (mech
!= By_Descriptor_NCA
2747 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2748 for (i
= ndim
- 1, inner_type
= type
;
2750 i
--, inner_type
= TREE_TYPE (inner_type
))
2751 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2753 for (i
= 0, inner_type
= type
;
2755 i
++, inner_type
= TREE_TYPE (inner_type
))
2756 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2758 /* Now get the DTYPE value. */
2759 switch (TREE_CODE (type
))
2764 if (TYPE_VAX_FLOATING_POINT_P (type
))
2765 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2778 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2781 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2784 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2787 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2790 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2793 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2799 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2803 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2804 && TYPE_VAX_FLOATING_POINT_P (type
))
2805 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2817 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2828 /* Get the CLASS value. */
2831 case By_Descriptor_A
:
2834 case By_Descriptor_NCA
:
2837 case By_Descriptor_SB
:
2841 case By_Descriptor_S
:
2847 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2848 are the same for all types. */
2850 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2851 record_type
, size_int (1), field_list
);
2853 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2854 record_type
, size_int (dtype
), field_list
);
2856 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2857 record_type
, size_int (klass
), field_list
);
2859 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2860 record_type
, ssize_int (-1), field_list
);
2862 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2864 size_in_bytes (mech
== By_Descriptor_A
2865 ? inner_type
: type
),
2868 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
2871 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
2872 build_unary_op (ADDR_EXPR
, pointer64_type
,
2873 build0 (PLACEHOLDER_EXPR
, type
)),
2879 case By_Descriptor_S
:
2882 case By_Descriptor_SB
:
2884 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2886 (TREE_CODE (type
) == ARRAY_TYPE
2887 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
2891 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2893 (TREE_CODE (type
) == ARRAY_TYPE
2894 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
2899 case By_Descriptor_A
:
2900 case By_Descriptor_NCA
:
2902 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2903 record_type
, size_zero_node
, field_list
);
2906 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2907 record_type
, size_zero_node
, field_list
);
2909 dtype
= (mech
== By_Descriptor_NCA
2911 /* Set FL_COLUMN, FL_COEFF, and
2913 : (TREE_CODE (type
) == ARRAY_TYPE
2914 && TYPE_CONVENTION_FORTRAN_P (type
)
2917 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2918 record_type
, size_int (dtype
),
2922 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2923 record_type
, size_int (ndim
), field_list
);
2926 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2927 record_type
, size_int (0), field_list
);
2929 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2930 record_type
, size_in_bytes (type
),
2933 /* Now build a pointer to the 0,0,0... element. */
2934 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2935 for (i
= 0, inner_type
= type
; i
< ndim
;
2936 i
++, inner_type
= TREE_TYPE (inner_type
))
2937 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2938 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2939 NULL_TREE
, NULL_TREE
);
2942 = make_descriptor_field ("A0", pointer64_type
, record_type
,
2943 build1 (ADDR_EXPR
, pointer64_type
, tem
),
2946 /* Next come the addressing coefficients. */
2947 tem
= size_one_node
;
2948 for (i
= 0; i
< ndim
; i
++)
2952 = size_binop (MULT_EXPR
, tem
,
2953 size_binop (PLUS_EXPR
,
2954 size_binop (MINUS_EXPR
,
2955 TYPE_MAX_VALUE (idx_arr
[i
]),
2956 TYPE_MIN_VALUE (idx_arr
[i
])),
2959 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2960 fname
[1] = '0' + i
, fname
[2] = 0;
2962 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2963 record_type
, idx_length
, field_list
);
2965 if (mech
== By_Descriptor_NCA
)
2969 /* Finally here are the bounds. */
2970 for (i
= 0; i
< ndim
; i
++)
2974 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2976 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2978 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
2982 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2984 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
2989 post_error ("unsupported descriptor type for &", gnat_entity
);
2992 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
2993 finish_record_type (record_type
, nreverse (field_list
), 0, false);
2997 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
2998 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3001 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3003 VEC(constructor_elt
,gc
) *v
= NULL
;
3006 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3007 gnu_expr
= gnat_protect_expr (gnu_expr
);
3008 gnat_mark_addressable (gnu_expr
);
3010 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3011 routine in case we have a 32-bit descriptor. */
3012 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3013 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3014 N_Raise_Constraint_Error
),
3017 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3020 = convert (TREE_TYPE (field
),
3021 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3023 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3026 return gnat_build_constructor (gnu_type
, v
);
3029 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3030 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3031 which the VMS descriptor is passed. */
3034 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3036 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3037 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3038 /* The CLASS field is the 3rd field in the descriptor. */
3039 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3040 /* The POINTER field is the 6th field in the descriptor. */
3041 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3043 /* Retrieve the value of the POINTER field. */
3045 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3047 if (POINTER_TYPE_P (gnu_type
))
3048 return convert (gnu_type
, gnu_expr64
);
3050 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3052 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3053 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3054 tree template_type
= TREE_TYPE (p_bounds_type
);
3055 tree min_field
= TYPE_FIELDS (template_type
);
3056 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3057 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3058 /* See the head comment of build_vms_descriptor. */
3059 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3060 tree lfield
, ufield
;
3061 VEC(constructor_elt
,gc
) *v
;
3063 /* Convert POINTER to the pointer-to-array type. */
3064 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3068 case 1: /* Class S */
3069 case 15: /* Class SB */
3070 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3071 v
= VEC_alloc (constructor_elt
, gc
, 2);
3072 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3073 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3074 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3075 convert (TREE_TYPE (min_field
),
3077 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3078 convert (TREE_TYPE (max_field
), t
));
3079 template_tree
= gnat_build_constructor (template_type
, v
);
3080 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3082 /* For class S, we are done. */
3086 /* Test that we really have a SB descriptor, like DEC Ada. */
3087 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3088 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3089 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3090 /* If so, there is already a template in the descriptor and
3091 it is located right after the POINTER field. The fields are
3092 64bits so they must be repacked. */
3093 t
= DECL_CHAIN (pointer
);
3094 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3095 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3098 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3100 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3102 /* Build the template in the form of a constructor. */
3103 v
= VEC_alloc (constructor_elt
, gc
, 2);
3104 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3105 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3107 template_tree
= gnat_build_constructor (template_type
, v
);
3109 /* Otherwise use the {1, LENGTH} template we build above. */
3110 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3111 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3116 case 4: /* Class A */
3117 /* The AFLAGS field is the 3rd field after the pointer in the
3119 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3120 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3121 /* The DIMCT field is the next field in the descriptor after
3124 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3125 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3126 or FL_COEFF or FL_BOUNDS not set. */
3127 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3128 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3129 build_binary_op (NE_EXPR
, boolean_type_node
,
3131 convert (TREE_TYPE (dimct
),
3133 build_binary_op (NE_EXPR
, boolean_type_node
,
3134 build2 (BIT_AND_EXPR
,
3138 /* There is already a template in the descriptor and it is located
3139 in block 3. The fields are 64bits so they must be repacked. */
3140 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3142 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3143 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3146 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3148 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3150 /* Build the template in the form of a constructor. */
3151 v
= VEC_alloc (constructor_elt
, gc
, 2);
3152 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3153 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3155 template_tree
= gnat_build_constructor (template_type
, v
);
3156 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3157 build_call_raise (CE_Length_Check_Failed
, Empty
,
3158 N_Raise_Constraint_Error
),
3161 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3164 case 10: /* Class NCA */
3166 post_error ("unsupported descriptor type for &", gnat_subprog
);
3167 template_addr
= integer_zero_node
;
3171 /* Build the fat pointer in the form of a constructor. */
3172 v
= VEC_alloc (constructor_elt
, gc
, 2);
3173 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3174 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3176 return gnat_build_constructor (gnu_type
, v
);
3183 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3184 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3185 which the VMS descriptor is passed. */
3188 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3190 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3191 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3192 /* The CLASS field is the 3rd field in the descriptor. */
3193 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3194 /* The POINTER field is the 4th field in the descriptor. */
3195 tree pointer
= DECL_CHAIN (klass
);
3197 /* Retrieve the value of the POINTER field. */
3199 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3201 if (POINTER_TYPE_P (gnu_type
))
3202 return convert (gnu_type
, gnu_expr32
);
3204 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3206 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3207 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3208 tree template_type
= TREE_TYPE (p_bounds_type
);
3209 tree min_field
= TYPE_FIELDS (template_type
);
3210 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3211 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3212 /* See the head comment of build_vms_descriptor. */
3213 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3214 VEC(constructor_elt
,gc
) *v
;
3216 /* Convert POINTER to the pointer-to-array type. */
3217 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
3221 case 1: /* Class S */
3222 case 15: /* Class SB */
3223 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3224 v
= VEC_alloc (constructor_elt
, gc
, 2);
3225 t
= TYPE_FIELDS (desc_type
);
3226 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3227 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3228 convert (TREE_TYPE (min_field
),
3230 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3231 convert (TREE_TYPE (max_field
), t
));
3232 template_tree
= gnat_build_constructor (template_type
, v
);
3233 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3235 /* For class S, we are done. */
3239 /* Test that we really have a SB descriptor, like DEC Ada. */
3240 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3241 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3242 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3243 /* If so, there is already a template in the descriptor and
3244 it is located right after the POINTER field. */
3245 t
= DECL_CHAIN (pointer
);
3247 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3248 /* Otherwise use the {1, LENGTH} template we build above. */
3249 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3250 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3255 case 4: /* Class A */
3256 /* The AFLAGS field is the 7th field in the descriptor. */
3257 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3258 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3259 /* The DIMCT field is the 8th field in the descriptor. */
3261 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3262 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3263 or FL_COEFF or FL_BOUNDS not set. */
3264 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3265 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3266 build_binary_op (NE_EXPR
, boolean_type_node
,
3268 convert (TREE_TYPE (dimct
),
3270 build_binary_op (NE_EXPR
, boolean_type_node
,
3271 build2 (BIT_AND_EXPR
,
3275 /* There is already a template in the descriptor and it is
3276 located at the start of block 3 (12th field). */
3277 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
3279 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3280 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
3281 build_call_raise (CE_Length_Check_Failed
, Empty
,
3282 N_Raise_Constraint_Error
),
3285 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3288 case 10: /* Class NCA */
3290 post_error ("unsupported descriptor type for &", gnat_subprog
);
3291 template_addr
= integer_zero_node
;
3295 /* Build the fat pointer in the form of a constructor. */
3296 v
= VEC_alloc (constructor_elt
, gc
, 2);
3297 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
3298 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3301 return gnat_build_constructor (gnu_type
, v
);
3308 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3309 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3310 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3311 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3315 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
3316 bool by_ref
, Entity_Id gnat_subprog
)
3318 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3319 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3320 tree mbo
= TYPE_FIELDS (desc_type
);
3321 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
3322 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
3323 tree real_type
, is64bit
, gnu_expr32
, gnu_expr64
;
3326 real_type
= TREE_TYPE (gnu_type
);
3328 real_type
= gnu_type
;
3330 /* If the field name is not MBO, it must be 32-bit and no alternate.
3331 Otherwise primary must be 64-bit and alternate 32-bit. */
3332 if (strcmp (mbostr
, "MBO") != 0)
3334 tree ret
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
3336 ret
= build_unary_op (ADDR_EXPR
, gnu_type
, ret
);
3340 /* Build the test for 64-bit descriptor. */
3341 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
3342 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
3344 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
3345 build_binary_op (EQ_EXPR
, boolean_type_node
,
3346 convert (integer_type_node
, mbo
),
3348 build_binary_op (EQ_EXPR
, boolean_type_node
,
3349 convert (integer_type_node
, mbmo
),
3350 integer_minus_one_node
));
3352 /* Build the 2 possible end results. */
3353 gnu_expr64
= convert_vms_descriptor64 (real_type
, gnu_expr
, gnat_subprog
);
3355 gnu_expr64
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr64
);
3356 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
3357 gnu_expr32
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
3359 gnu_expr32
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr32
);
3361 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
3364 /* Build a type to be used to represent an aliased object whose nominal type
3365 is an unconstrained array. This consists of a RECORD_TYPE containing a
3366 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3367 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3368 an arbitrary unconstrained object. Use NAME as the name of the record.
3369 DEBUG_INFO_P is true if we need to write debug information for the type. */
3372 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
3375 tree type
= make_node (RECORD_TYPE
);
3377 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
3378 NULL_TREE
, NULL_TREE
, 0, 1);
3380 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
3381 NULL_TREE
, NULL_TREE
, 0, 1);
3383 TYPE_NAME (type
) = name
;
3384 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
3385 DECL_CHAIN (template_field
) = array_field
;
3386 finish_record_type (type
, template_field
, 0, true);
3388 /* Declare it now since it will never be declared otherwise. This is
3389 necessary to ensure that its subtrees are properly marked. */
3390 create_type_decl (name
, type
, NULL
, true, debug_info_p
, Empty
);
3395 /* Same, taking a thin or fat pointer type instead of a template type. */
3398 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
3399 tree name
, bool debug_info_p
)
3403 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
3406 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
3407 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
3408 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
3411 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
3414 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3415 In the normal case this is just two adjustments, but we have more to
3416 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3419 update_pointer_to (tree old_type
, tree new_type
)
3421 tree ptr
= TYPE_POINTER_TO (old_type
);
3422 tree ref
= TYPE_REFERENCE_TO (old_type
);
3425 /* If this is the main variant, process all the other variants first. */
3426 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
3427 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
3428 update_pointer_to (t
, new_type
);
3430 /* If no pointers and no references, we are done. */
3434 /* Merge the old type qualifiers in the new type.
3436 Each old variant has qualifiers for specific reasons, and the new
3437 designated type as well. Each set of qualifiers represents useful
3438 information grabbed at some point, and merging the two simply unifies
3439 these inputs into the final type description.
3441 Consider for instance a volatile type frozen after an access to constant
3442 type designating it; after the designated type's freeze, we get here with
3443 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3444 when the access type was processed. We will make a volatile and readonly
3445 designated type, because that's what it really is.
3447 We might also get here for a non-dummy OLD_TYPE variant with different
3448 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3449 to private record type elaboration (see the comments around the call to
3450 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3451 the qualifiers in those cases too, to avoid accidentally discarding the
3452 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3454 = build_qualified_type (new_type
,
3455 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
3457 /* If old type and new type are identical, there is nothing to do. */
3458 if (old_type
== new_type
)
3461 /* Otherwise, first handle the simple case. */
3462 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
3464 tree new_ptr
, new_ref
;
3466 /* If pointer or reference already points to new type, nothing to do.
3467 This can happen as update_pointer_to can be invoked multiple times
3468 on the same couple of types because of the type variants. */
3469 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
3470 || (ref
&& TREE_TYPE (ref
) == new_type
))
3473 /* Chain PTR and its variants at the end. */
3474 new_ptr
= TYPE_POINTER_TO (new_type
);
3477 while (TYPE_NEXT_PTR_TO (new_ptr
))
3478 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
3479 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
3482 TYPE_POINTER_TO (new_type
) = ptr
;
3484 /* Now adjust them. */
3485 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
3486 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
3488 TREE_TYPE (t
) = new_type
;
3489 if (TYPE_NULL_BOUNDS (t
))
3490 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
3493 /* Chain REF and its variants at the end. */
3494 new_ref
= TYPE_REFERENCE_TO (new_type
);
3497 while (TYPE_NEXT_REF_TO (new_ref
))
3498 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
3499 TYPE_NEXT_REF_TO (new_ref
) = ref
;
3502 TYPE_REFERENCE_TO (new_type
) = ref
;
3504 /* Now adjust them. */
3505 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
3506 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
3507 TREE_TYPE (t
) = new_type
;
3509 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
3510 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
3513 /* Now deal with the unconstrained array case. In this case the pointer
3514 is actually a record where both fields are pointers to dummy nodes.
3515 Turn them into pointers to the correct types using update_pointer_to.
3516 Likewise for the pointer to the object record (thin pointer). */
3519 tree new_ptr
= TYPE_POINTER_TO (new_type
);
3521 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
3523 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3524 since update_pointer_to can be invoked multiple times on the same
3525 couple of types because of the type variants. */
3526 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
3530 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
3531 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
3534 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
3535 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
3537 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
3538 TYPE_OBJECT_RECORD_TYPE (new_type
));
3540 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
3544 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3545 unconstrained one. This involves making or finding a template. */
3548 convert_to_fat_pointer (tree type
, tree expr
)
3550 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
3551 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
3552 tree etype
= TREE_TYPE (expr
);
3554 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
3556 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3557 array (compare_fat_pointers ensures that this is the full discriminant)
3558 and a valid pointer to the bounds. This latter property is necessary
3559 since the compiler can hoist the load of the bounds done through it. */
3560 if (integer_zerop (expr
))
3562 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
3563 tree null_bounds
, t
;
3565 if (TYPE_NULL_BOUNDS (ptr_template_type
))
3566 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
3569 /* The template type can still be dummy at this point so we build an
3570 empty constructor. The middle-end will fill it in with zeros. */
3571 t
= build_constructor (template_type
, NULL
);
3572 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
3573 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
3574 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
3577 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3578 fold_convert (p_array_type
, null_pointer_node
));
3579 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
3580 t
= build_constructor (type
, v
);
3581 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3582 TREE_CONSTANT (t
) = 0;
3583 TREE_STATIC (t
) = 1;
3588 /* If EXPR is a thin pointer, make template and data from the record. */
3589 if (TYPE_IS_THIN_POINTER_P (etype
))
3591 tree field
= TYPE_FIELDS (TREE_TYPE (etype
));
3593 expr
= gnat_protect_expr (expr
);
3594 if (TREE_CODE (expr
) == ADDR_EXPR
)
3595 expr
= TREE_OPERAND (expr
, 0);
3598 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3599 the thin pointer value has been shifted so we first need to shift
3600 it back to get the template address. */
3601 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)))
3603 = build_binary_op (POINTER_PLUS_EXPR
, etype
, expr
,
3604 fold_build1 (NEGATE_EXPR
, sizetype
,
3606 (DECL_CHAIN (field
))));
3607 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
3610 template_tree
= build_component_ref (expr
, NULL_TREE
, field
, false);
3611 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
3612 build_component_ref (expr
, NULL_TREE
,
3613 DECL_CHAIN (field
), false));
3616 /* Otherwise, build the constructor for the template. */
3618 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
3620 /* The final result is a constructor for the fat pointer.
3622 If EXPR is an argument of a foreign convention subprogram, the type it
3623 points to is directly the component type. In this case, the expression
3624 type may not match the corresponding FIELD_DECL type at this point, so we
3625 call "convert" here to fix that up if necessary. This type consistency is
3626 required, for instance because it ensures that possible later folding of
3627 COMPONENT_REFs against this constructor always yields something of the
3628 same type as the initial reference.
3630 Note that the call to "build_template" above is still fine because it
3631 will only refer to the provided TEMPLATE_TYPE in this case. */
3632 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3633 convert (p_array_type
, expr
));
3634 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
3635 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3637 return gnat_build_constructor (type
, v
);
3640 /* Create an expression whose value is that of EXPR,
3641 converted to type TYPE. The TREE_TYPE of the value
3642 is always TYPE. This function implements all reasonable
3643 conversions; callers should filter out those that are
3644 not permitted by the language being compiled. */
3647 convert (tree type
, tree expr
)
3649 tree etype
= TREE_TYPE (expr
);
3650 enum tree_code ecode
= TREE_CODE (etype
);
3651 enum tree_code code
= TREE_CODE (type
);
3653 /* If the expression is already of the right type, we are done. */
3657 /* If both input and output have padding and are of variable size, do this
3658 as an unchecked conversion. Likewise if one is a mere variant of the
3659 other, so we avoid a pointless unpad/repad sequence. */
3660 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
3661 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
3662 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3663 || !TREE_CONSTANT (TYPE_SIZE (etype
))
3664 || gnat_types_compatible_p (type
, etype
)
3665 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
3666 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
3669 /* If the output type has padding, convert to the inner type and make a
3670 constructor to build the record, unless a variable size is involved. */
3671 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
3673 VEC(constructor_elt
,gc
) *v
;
3675 /* If we previously converted from another type and our type is
3676 of variable size, remove the conversion to avoid the need for
3677 variable-sized temporaries. Likewise for a conversion between
3678 original and packable version. */
3679 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3680 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3681 || (ecode
== RECORD_TYPE
3682 && TYPE_NAME (etype
)
3683 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
3684 expr
= TREE_OPERAND (expr
, 0);
3686 /* If we are just removing the padding from expr, convert the original
3687 object if we have variable size in order to avoid the need for some
3688 variable-sized temporaries. Likewise if the padding is a variant
3689 of the other, so we avoid a pointless unpad/repad sequence. */
3690 if (TREE_CODE (expr
) == COMPONENT_REF
3691 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
3692 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3693 || gnat_types_compatible_p (type
,
3694 TREE_TYPE (TREE_OPERAND (expr
, 0)))
3695 || (ecode
== RECORD_TYPE
3696 && TYPE_NAME (etype
)
3697 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
3698 return convert (type
, TREE_OPERAND (expr
, 0));
3700 /* If the inner type is of self-referential size and the expression type
3701 is a record, do this as an unchecked conversion. But first pad the
3702 expression if possible to have the same size on both sides. */
3703 if (ecode
== RECORD_TYPE
3704 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
3706 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
3707 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
3708 false, false, false, true),
3710 return unchecked_convert (type
, expr
, false);
3713 /* If we are converting between array types with variable size, do the
3714 final conversion as an unchecked conversion, again to avoid the need
3715 for some variable-sized temporaries. If valid, this conversion is
3716 very likely purely technical and without real effects. */
3717 if (ecode
== ARRAY_TYPE
3718 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
3719 && !TREE_CONSTANT (TYPE_SIZE (etype
))
3720 && !TREE_CONSTANT (TYPE_SIZE (type
)))
3721 return unchecked_convert (type
,
3722 convert (TREE_TYPE (TYPE_FIELDS (type
)),
3726 v
= VEC_alloc (constructor_elt
, gc
, 1);
3727 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3728 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
3729 return gnat_build_constructor (type
, v
);
3732 /* If the input type has padding, remove it and convert to the output type.
3733 The conditions ordering is arranged to ensure that the output type is not
3734 a padding type here, as it is not clear whether the conversion would
3735 always be correct if this was to happen. */
3736 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
3740 /* If we have just converted to this padded type, just get the
3741 inner expression. */
3742 if (TREE_CODE (expr
) == CONSTRUCTOR
3743 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
3744 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
3745 == TYPE_FIELDS (etype
))
3747 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
3749 /* Otherwise, build an explicit component reference. */
3752 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
3754 return convert (type
, unpadded
);
3757 /* If the input is a biased type, adjust first. */
3758 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
3759 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
3760 fold_convert (TREE_TYPE (etype
),
3762 TYPE_MIN_VALUE (etype
)));
3764 /* If the input is a justified modular type, we need to extract the actual
3765 object before converting it to any other type with the exceptions of an
3766 unconstrained array or of a mere type variant. It is useful to avoid the
3767 extraction and conversion in the type variant case because it could end
3768 up replacing a VAR_DECL expr by a constructor and we might be about the
3769 take the address of the result. */
3770 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
3771 && code
!= UNCONSTRAINED_ARRAY_TYPE
3772 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
3773 return convert (type
, build_component_ref (expr
, NULL_TREE
,
3774 TYPE_FIELDS (etype
), false));
3776 /* If converting to a type that contains a template, convert to the data
3777 type and then build the template. */
3778 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
3780 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
3781 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
3783 /* If the source already has a template, get a reference to the
3784 associated array only, as we are going to rebuild a template
3785 for the target type anyway. */
3786 expr
= maybe_unconstrained_array (expr
);
3788 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3789 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
3790 obj_type
, NULL_TREE
));
3791 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
3792 convert (obj_type
, expr
));
3793 return gnat_build_constructor (type
, v
);
3796 /* There are some cases of expressions that we process specially. */
3797 switch (TREE_CODE (expr
))
3803 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3804 conversion in gnat_expand_expr. NULL_EXPR does not represent
3805 and actual value, so no conversion is needed. */
3806 expr
= copy_node (expr
);
3807 TREE_TYPE (expr
) = type
;
3811 /* If we are converting a STRING_CST to another constrained array type,
3812 just make a new one in the proper type. */
3813 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
3814 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
3815 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
3817 expr
= copy_node (expr
);
3818 TREE_TYPE (expr
) = type
;
3824 /* If we are converting a VECTOR_CST to a mere variant type, just make
3825 a new one in the proper type. */
3826 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
3828 expr
= copy_node (expr
);
3829 TREE_TYPE (expr
) = type
;
3834 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3835 a new one in the proper type. */
3836 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
3838 expr
= copy_node (expr
);
3839 TREE_TYPE (expr
) = type
;
3840 CONSTRUCTOR_ELTS (expr
)
3841 = VEC_copy (constructor_elt
, gc
, CONSTRUCTOR_ELTS (expr
));
3845 /* Likewise for a conversion between original and packable version, or
3846 conversion between types of the same size and with the same list of
3847 fields, but we have to work harder to preserve type consistency. */
3849 && code
== RECORD_TYPE
3850 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
3851 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
3854 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
3855 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
3856 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, len
);
3857 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
3858 unsigned HOST_WIDE_INT idx
;
3861 /* Whether we need to clear TREE_CONSTANT et al. on the output
3862 constructor when we convert in place. */
3863 bool clear_constant
= false;
3865 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
3867 constructor_elt
*elt
;
3868 /* We expect only simple constructors. */
3869 if (!SAME_FIELD_P (index
, efield
))
3871 /* The field must be the same. */
3872 if (!SAME_FIELD_P (efield
, field
))
3874 elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
3876 elt
->value
= convert (TREE_TYPE (field
), value
);
3878 /* If packing has made this field a bitfield and the input
3879 value couldn't be emitted statically any more, we need to
3880 clear TREE_CONSTANT on our output. */
3882 && TREE_CONSTANT (expr
)
3883 && !CONSTRUCTOR_BITFIELD_P (efield
)
3884 && CONSTRUCTOR_BITFIELD_P (field
)
3885 && !initializer_constant_valid_for_bitfield_p (value
))
3886 clear_constant
= true;
3888 efield
= DECL_CHAIN (efield
);
3889 field
= DECL_CHAIN (field
);
3892 /* If we have been able to match and convert all the input fields
3893 to their output type, convert in place now. We'll fallback to a
3894 view conversion downstream otherwise. */
3897 expr
= copy_node (expr
);
3898 TREE_TYPE (expr
) = type
;
3899 CONSTRUCTOR_ELTS (expr
) = v
;
3901 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
3906 /* Likewise for a conversion between array type and vector type with a
3907 compatible representative array. */
3908 else if (code
== VECTOR_TYPE
3909 && ecode
== ARRAY_TYPE
3910 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
3913 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
3914 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
3915 VEC(constructor_elt
,gc
) *v
;
3916 unsigned HOST_WIDE_INT ix
;
3919 /* Build a VECTOR_CST from a *constant* array constructor. */
3920 if (TREE_CONSTANT (expr
))
3922 bool constant_p
= true;
3924 /* Iterate through elements and check if all constructor
3925 elements are *_CSTs. */
3926 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
3927 if (!CONSTANT_CLASS_P (value
))
3934 return build_vector_from_ctor (type
,
3935 CONSTRUCTOR_ELTS (expr
));
3938 /* Otherwise, build a regular vector constructor. */
3939 v
= VEC_alloc (constructor_elt
, gc
, len
);
3940 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
3942 constructor_elt
*elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
3943 elt
->index
= NULL_TREE
;
3946 expr
= copy_node (expr
);
3947 TREE_TYPE (expr
) = type
;
3948 CONSTRUCTOR_ELTS (expr
) = v
;
3953 case UNCONSTRAINED_ARRAY_REF
:
3954 /* First retrieve the underlying array. */
3955 expr
= maybe_unconstrained_array (expr
);
3956 etype
= TREE_TYPE (expr
);
3957 ecode
= TREE_CODE (etype
);
3960 case VIEW_CONVERT_EXPR
:
3962 /* GCC 4.x is very sensitive to type consistency overall, and view
3963 conversions thus are very frequent. Even though just "convert"ing
3964 the inner operand to the output type is fine in most cases, it
3965 might expose unexpected input/output type mismatches in special
3966 circumstances so we avoid such recursive calls when we can. */
3967 tree op0
= TREE_OPERAND (expr
, 0);
3969 /* If we are converting back to the original type, we can just
3970 lift the input conversion. This is a common occurrence with
3971 switches back-and-forth amongst type variants. */
3972 if (type
== TREE_TYPE (op0
))
3975 /* Otherwise, if we're converting between two aggregate or vector
3976 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
3977 target type in place or to just convert the inner expression. */
3978 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
3979 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
3981 /* If we are converting between mere variants, we can just
3982 substitute the VIEW_CONVERT_EXPR in place. */
3983 if (gnat_types_compatible_p (type
, etype
))
3984 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
3986 /* Otherwise, we may just bypass the input view conversion unless
3987 one of the types is a fat pointer, which is handled by
3988 specialized code below which relies on exact type matching. */
3989 else if (!TYPE_IS_FAT_POINTER_P (type
)
3990 && !TYPE_IS_FAT_POINTER_P (etype
))
3991 return convert (type
, op0
);
4001 /* Check for converting to a pointer to an unconstrained array. */
4002 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4003 return convert_to_fat_pointer (type
, expr
);
4005 /* If we are converting between two aggregate or vector types that are mere
4006 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4007 to a vector type from its representative array type. */
4008 else if ((code
== ecode
4009 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4010 && gnat_types_compatible_p (type
, etype
))
4011 || (code
== VECTOR_TYPE
4012 && ecode
== ARRAY_TYPE
4013 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4015 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4017 /* If we are converting between tagged types, try to upcast properly. */
4018 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4019 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4021 tree child_etype
= etype
;
4023 tree field
= TYPE_FIELDS (child_etype
);
4024 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4025 return build_component_ref (expr
, NULL_TREE
, field
, false);
4026 child_etype
= TREE_TYPE (field
);
4027 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4030 /* If we are converting from a smaller form of record type back to it, just
4031 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4032 size on both sides. */
4033 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4034 && smaller_form_type_p (etype
, type
))
4036 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4037 false, false, false, true),
4039 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4042 /* In all other cases of related types, make a NOP_EXPR. */
4043 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4044 return fold_convert (type
, expr
);
4049 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4052 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4053 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4054 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4055 return unchecked_convert (type
, expr
, false);
4056 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4057 return fold_convert (type
,
4058 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4059 convert (TREE_TYPE (type
), expr
),
4060 TYPE_MIN_VALUE (type
)));
4062 /* ... fall through ... */
4066 /* If we are converting an additive expression to an integer type
4067 with lower precision, be wary of the optimization that can be
4068 applied by convert_to_integer. There are 2 problematic cases:
4069 - if the first operand was originally of a biased type,
4070 because we could be recursively called to convert it
4071 to an intermediate type and thus rematerialize the
4072 additive operator endlessly,
4073 - if the expression contains a placeholder, because an
4074 intermediate conversion that changes the sign could
4075 be inserted and thus introduce an artificial overflow
4076 at compile time when the placeholder is substituted. */
4077 if (code
== INTEGER_TYPE
4078 && ecode
== INTEGER_TYPE
4079 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4080 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4082 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4084 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4085 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4086 || CONTAINS_PLACEHOLDER_P (expr
))
4087 return build1 (NOP_EXPR
, type
, expr
);
4090 return fold (convert_to_integer (type
, expr
));
4093 case REFERENCE_TYPE
:
4094 /* If converting between two thin pointers, adjust if needed to account
4095 for differing offsets from the base pointer, depending on whether
4096 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4097 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4100 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)) != NULL_TREE
4101 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype
))))
4104 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != NULL_TREE
4105 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type
))))
4107 tree byte_diff
= size_diffop (type_pos
, etype_pos
);
4109 expr
= build1 (NOP_EXPR
, type
, expr
);
4110 if (integer_zerop (byte_diff
))
4113 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4114 fold_convert (sizetype
, byte_diff
));
4117 /* If converting fat pointer to normal or thin pointer, get the pointer
4118 to the array and then convert it. */
4119 if (TYPE_IS_FAT_POINTER_P (etype
))
4121 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4123 return fold (convert_to_pointer (type
, expr
));
4126 return fold (convert_to_real (type
, expr
));
4129 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4131 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4133 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4134 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4136 return gnat_build_constructor (type
, v
);
4139 /* ... fall through ... */
4142 /* In these cases, assume the front-end has validated the conversion.
4143 If the conversion is valid, it will be a bit-wise conversion, so
4144 it can be viewed as an unchecked conversion. */
4145 return unchecked_convert (type
, expr
, false);
4148 /* This is a either a conversion between a tagged type and some
4149 subtype, which we have to mark as a UNION_TYPE because of
4150 overlapping fields or a conversion of an Unchecked_Union. */
4151 return unchecked_convert (type
, expr
, false);
4153 case UNCONSTRAINED_ARRAY_TYPE
:
4154 /* If the input is a VECTOR_TYPE, convert to the representative
4155 array type first. */
4156 if (ecode
== VECTOR_TYPE
)
4158 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4159 etype
= TREE_TYPE (expr
);
4160 ecode
= TREE_CODE (etype
);
4163 /* If EXPR is a constrained array, take its address, convert it to a
4164 fat pointer, and then dereference it. Likewise if EXPR is a
4165 record containing both a template and a constrained array.
4166 Note that a record representing a justified modular type
4167 always represents a packed constrained array. */
4168 if (ecode
== ARRAY_TYPE
4169 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4170 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4171 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4174 (INDIRECT_REF
, NULL_TREE
,
4175 convert_to_fat_pointer (TREE_TYPE (type
),
4176 build_unary_op (ADDR_EXPR
,
4179 /* Do something very similar for converting one unconstrained
4180 array to another. */
4181 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4183 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4184 convert (TREE_TYPE (type
),
4185 build_unary_op (ADDR_EXPR
,
4191 return fold (convert_to_complex (type
, expr
));
4198 /* Create an expression whose value is that of EXPR converted to the common
4199 index type, which is sizetype. EXPR is supposed to be in the base type
4200 of the GNAT index type. Calling it is equivalent to doing
4202 convert (sizetype, expr)
4204 but we try to distribute the type conversion with the knowledge that EXPR
4205 cannot overflow in its type. This is a best-effort approach and we fall
4206 back to the above expression as soon as difficulties are encountered.
4208 This is necessary to overcome issues that arise when the GNAT base index
4209 type and the GCC common index type (sizetype) don't have the same size,
4210 which is quite frequent on 64-bit architectures. In this case, and if
4211 the GNAT base index type is signed but the iteration type of the loop has
4212 been forced to unsigned, the loop scalar evolution engine cannot compute
4213 a simple evolution for the general induction variables associated with the
4214 array indices, because it will preserve the wrap-around semantics in the
4215 unsigned type of their "inner" part. As a result, many loop optimizations
4218 The solution is to use a special (basic) induction variable that is at
4219 least as large as sizetype, and to express the aforementioned general
4220 induction variables in terms of this induction variable, eliminating
4221 the problematic intermediate truncation to the GNAT base index type.
4222 This is possible as long as the original expression doesn't overflow
4223 and if the middle-end hasn't introduced artificial overflows in the
4224 course of the various simplification it can make to the expression. */
4227 convert_to_index_type (tree expr
)
4229 enum tree_code code
= TREE_CODE (expr
);
4230 tree type
= TREE_TYPE (expr
);
4232 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4233 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4234 if (TYPE_UNSIGNED (type
) || !optimize
)
4235 return convert (sizetype
, expr
);
4240 /* The main effect of the function: replace a loop parameter with its
4241 associated special induction variable. */
4242 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
4243 expr
= DECL_INDUCTION_VAR (expr
);
4248 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4249 /* Bail out as soon as we suspect some sort of type frobbing. */
4250 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
4251 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
4255 /* ... fall through ... */
4257 case NON_LVALUE_EXPR
:
4258 return fold_build1 (code
, sizetype
,
4259 convert_to_index_type (TREE_OPERAND (expr
, 0)));
4264 return fold_build2 (code
, sizetype
,
4265 convert_to_index_type (TREE_OPERAND (expr
, 0)),
4266 convert_to_index_type (TREE_OPERAND (expr
, 1)));
4269 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
4270 convert_to_index_type (TREE_OPERAND (expr
, 1)));
4273 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
4274 convert_to_index_type (TREE_OPERAND (expr
, 1)),
4275 convert_to_index_type (TREE_OPERAND (expr
, 2)));
4281 return convert (sizetype
, expr
);
4284 /* Remove all conversions that are done in EXP. This includes converting
4285 from a padded type or to a justified modular type. If TRUE_ADDRESS
4286 is true, always return the address of the containing object even if
4287 the address is not bit-aligned. */
4290 remove_conversions (tree exp
, bool true_address
)
4292 switch (TREE_CODE (exp
))
4296 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
4297 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
4299 remove_conversions (VEC_index (constructor_elt
,
4300 CONSTRUCTOR_ELTS (exp
), 0)->value
,
4305 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4306 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4310 case VIEW_CONVERT_EXPR
:
4311 case NON_LVALUE_EXPR
:
4312 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4321 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4322 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4323 likewise return an expression pointing to the underlying array. */
4326 maybe_unconstrained_array (tree exp
)
4328 enum tree_code code
= TREE_CODE (exp
);
4329 tree type
= TREE_TYPE (exp
);
4331 switch (TREE_CODE (type
))
4333 case UNCONSTRAINED_ARRAY_TYPE
:
4334 if (code
== UNCONSTRAINED_ARRAY_REF
)
4336 const bool read_only
= TREE_READONLY (exp
);
4337 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
4339 exp
= TREE_OPERAND (exp
, 0);
4340 type
= TREE_TYPE (exp
);
4342 if (TREE_CODE (exp
) == COND_EXPR
)
4345 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
4346 build_component_ref (TREE_OPERAND (exp
, 1),
4351 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
4352 build_component_ref (TREE_OPERAND (exp
, 2),
4357 exp
= build3 (COND_EXPR
,
4358 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
4359 TREE_OPERAND (exp
, 0), op1
, op2
);
4363 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
4364 build_component_ref (exp
, NULL_TREE
,
4367 TREE_READONLY (exp
) = read_only
;
4368 TREE_THIS_NOTRAP (exp
) = no_trap
;
4372 else if (code
== NULL_EXPR
)
4373 exp
= build1 (NULL_EXPR
,
4374 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
4375 TREE_OPERAND (exp
, 0));
4379 /* If this is a padded type and it contains a template, convert to the
4380 unpadded type first. */
4381 if (TYPE_PADDING_P (type
)
4382 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
4383 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
4385 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
4386 type
= TREE_TYPE (exp
);
4389 if (TYPE_CONTAINS_TEMPLATE_P (type
))
4391 exp
= build_component_ref (exp
, NULL_TREE
,
4392 DECL_CHAIN (TYPE_FIELDS (type
)),
4394 type
= TREE_TYPE (exp
);
4396 /* If the array type is padded, convert to the unpadded type. */
4397 if (TYPE_IS_PADDING_P (type
))
4398 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
4409 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4410 TYPE_REPRESENTATIVE_ARRAY. */
4413 maybe_vector_array (tree exp
)
4415 tree etype
= TREE_TYPE (exp
);
4417 if (VECTOR_TYPE_P (etype
))
4418 exp
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), exp
);
4423 /* Return true if EXPR is an expression that can be folded as an operand
4424 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4427 can_fold_for_view_convert_p (tree expr
)
4431 /* The folder will fold NOP_EXPRs between integral types with the same
4432 precision (in the middle-end's sense). We cannot allow it if the
4433 types don't have the same precision in the Ada sense as well. */
4434 if (TREE_CODE (expr
) != NOP_EXPR
)
4437 t1
= TREE_TYPE (expr
);
4438 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4440 /* Defer to the folder for non-integral conversions. */
4441 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
4444 /* Only fold conversions that preserve both precisions. */
4445 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
4446 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
4452 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4453 If NOTRUNC_P is true, truncation operations should be suppressed.
4455 Special care is required with (source or target) integral types whose
4456 precision is not equal to their size, to make sure we fetch or assign
4457 the value bits whose location might depend on the endianness, e.g.
4459 Rmsize : constant := 8;
4460 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4462 type Bit_Array is array (1 .. Rmsize) of Boolean;
4463 pragma Pack (Bit_Array);
4465 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4467 Value : Int := 2#1000_0001#;
4468 Vbits : Bit_Array := To_Bit_Array (Value);
4470 we expect the 8 bits at Vbits'Address to always contain Value, while
4471 their original location depends on the endianness, at Value'Address
4472 on a little-endian architecture but not on a big-endian one. */
4475 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
4477 tree etype
= TREE_TYPE (expr
);
4478 enum tree_code ecode
= TREE_CODE (etype
);
4479 enum tree_code code
= TREE_CODE (type
);
4482 /* If the expression is already of the right type, we are done. */
4486 /* If both types types are integral just do a normal conversion.
4487 Likewise for a conversion to an unconstrained array. */
4488 if ((((INTEGRAL_TYPE_P (type
)
4489 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
4490 || (POINTER_TYPE_P (type
) && !TYPE_IS_THIN_POINTER_P (type
))
4491 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
4492 && ((INTEGRAL_TYPE_P (etype
)
4493 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
4494 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
4495 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
4496 || code
== UNCONSTRAINED_ARRAY_TYPE
)
4498 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4500 tree ntype
= copy_type (etype
);
4501 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
4502 TYPE_MAIN_VARIANT (ntype
) = ntype
;
4503 expr
= build1 (NOP_EXPR
, ntype
, expr
);
4506 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
4508 tree rtype
= copy_type (type
);
4509 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
4510 TYPE_MAIN_VARIANT (rtype
) = rtype
;
4511 expr
= convert (rtype
, expr
);
4512 expr
= build1 (NOP_EXPR
, type
, expr
);
4515 expr
= convert (type
, expr
);
4518 /* If we are converting to an integral type whose precision is not equal
4519 to its size, first unchecked convert to a record type that contains an
4520 field of the given precision. Then extract the field. */
4521 else if (INTEGRAL_TYPE_P (type
)
4522 && TYPE_RM_SIZE (type
)
4523 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4524 GET_MODE_BITSIZE (TYPE_MODE (type
))))
4526 tree rec_type
= make_node (RECORD_TYPE
);
4527 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
4528 tree field_type
, field
;
4530 if (TYPE_UNSIGNED (type
))
4531 field_type
= make_unsigned_type (prec
);
4533 field_type
= make_signed_type (prec
);
4534 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
4536 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
4537 NULL_TREE
, NULL_TREE
, 1, 0);
4539 TYPE_FIELDS (rec_type
) = field
;
4540 layout_type (rec_type
);
4542 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
4543 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
4544 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
4547 /* Similarly if we are converting from an integral type whose precision is
4548 not equal to its size, first copy into a field of the given precision
4549 and unchecked convert the record type. */
4550 else if (INTEGRAL_TYPE_P (etype
)
4551 && TYPE_RM_SIZE (etype
)
4552 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
4553 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
4555 tree rec_type
= make_node (RECORD_TYPE
);
4556 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
4557 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4558 tree field_type
, field
;
4560 if (TYPE_UNSIGNED (etype
))
4561 field_type
= make_unsigned_type (prec
);
4563 field_type
= make_signed_type (prec
);
4564 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
4566 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
4567 NULL_TREE
, NULL_TREE
, 1, 0);
4569 TYPE_FIELDS (rec_type
) = field
;
4570 layout_type (rec_type
);
4572 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
4573 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
4574 expr
= gnat_build_constructor (rec_type
, v
);
4575 expr
= unchecked_convert (type
, expr
, notrunc_p
);
4578 /* If we are converting from a scalar type to a type with a different size,
4579 we need to pad to have the same size on both sides.
4581 ??? We cannot do it unconditionally because unchecked conversions are
4582 used liberally by the front-end to implement polymorphism, e.g. in:
4584 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4585 return p___size__4 (p__object!(S191s.all));
4587 so we skip all expressions that are references. */
4588 else if (!REFERENCE_CLASS_P (expr
)
4589 && !AGGREGATE_TYPE_P (etype
)
4590 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
4591 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
4595 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4596 false, false, false, true),
4598 expr
= unchecked_convert (type
, expr
, notrunc_p
);
4602 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
4603 false, false, false, true);
4604 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
4605 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
4610 /* We have a special case when we are converting between two unconstrained
4611 array types. In that case, take the address, convert the fat pointer
4612 types, and dereference. */
4613 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
4614 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
4615 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
4616 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4619 /* Another special case is when we are converting to a vector type from its
4620 representative array type; this a regular conversion. */
4621 else if (code
== VECTOR_TYPE
4622 && ecode
== ARRAY_TYPE
4623 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4625 expr
= convert (type
, expr
);
4629 expr
= maybe_unconstrained_array (expr
);
4630 etype
= TREE_TYPE (expr
);
4631 ecode
= TREE_CODE (etype
);
4632 if (can_fold_for_view_convert_p (expr
))
4633 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4635 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4638 /* If the result is an integral type whose precision is not equal to its
4639 size, sign- or zero-extend the result. We need not do this if the input
4640 is an integral type of the same precision and signedness or if the output
4641 is a biased type or if both the input and output are unsigned. */
4643 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
4644 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
4645 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4646 GET_MODE_BITSIZE (TYPE_MODE (type
)))
4647 && !(INTEGRAL_TYPE_P (etype
)
4648 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
4649 && operand_equal_p (TYPE_RM_SIZE (type
),
4650 (TYPE_RM_SIZE (etype
) != 0
4651 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
4653 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
4656 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
4658 = convert (base_type
,
4659 size_binop (MINUS_EXPR
,
4661 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
4662 TYPE_RM_SIZE (type
)));
4665 build_binary_op (RSHIFT_EXPR
, base_type
,
4666 build_binary_op (LSHIFT_EXPR
, base_type
,
4667 convert (base_type
, expr
),
4672 /* An unchecked conversion should never raise Constraint_Error. The code
4673 below assumes that GCC's conversion routines overflow the same way that
4674 the underlying hardware does. This is probably true. In the rare case
4675 when it is false, we can rely on the fact that such conversions are
4676 erroneous anyway. */
4677 if (TREE_CODE (expr
) == INTEGER_CST
)
4678 TREE_OVERFLOW (expr
) = 0;
4680 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4681 show no longer constant. */
4682 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4683 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
4685 TREE_CONSTANT (expr
) = 0;
4690 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4691 the latter being a record type as predicated by Is_Record_Type. */
4694 tree_code_for_record_type (Entity_Id gnat_type
)
4696 Node_Id component_list
, component
;
4698 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
4699 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
4700 if (!Is_Unchecked_Union (gnat_type
))
4703 gnat_type
= Implementation_Base_Type (gnat_type
);
4705 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
4707 for (component
= First_Non_Pragma (Component_Items (component_list
));
4708 Present (component
);
4709 component
= Next_Non_Pragma (component
))
4710 if (Ekind (Defining_Entity (component
)) == E_Component
)
4716 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4717 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4718 according to the presence of an alignment clause on the type or, if it
4719 is an array, on the component type. */
4722 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
4724 gnat_type
= Underlying_Type (gnat_type
);
4726 *align_clause
= Present (Alignment_Clause (gnat_type
));
4728 if (Is_Array_Type (gnat_type
))
4730 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
4731 if (Present (Alignment_Clause (gnat_type
)))
4732 *align_clause
= true;
4735 if (!Is_Floating_Point_Type (gnat_type
))
4738 if (UI_To_Int (Esize (gnat_type
)) != 64)
4744 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4745 size is greater or equal to 64 bits, or an array of such a type. Set
4746 ALIGN_CLAUSE according to the presence of an alignment clause on the
4747 type or, if it is an array, on the component type. */
4750 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
4752 gnat_type
= Underlying_Type (gnat_type
);
4754 *align_clause
= Present (Alignment_Clause (gnat_type
));
4756 if (Is_Array_Type (gnat_type
))
4758 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
4759 if (Present (Alignment_Clause (gnat_type
)))
4760 *align_clause
= true;
4763 if (!Is_Scalar_Type (gnat_type
))
4766 if (UI_To_Int (Esize (gnat_type
)) < 64)
4772 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4773 component of an aggregate type. */
4776 type_for_nonaliased_component_p (tree gnu_type
)
4778 /* If the type is passed by reference, we may have pointers to the
4779 component so it cannot be made non-aliased. */
4780 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
4783 /* We used to say that any component of aggregate type is aliased
4784 because the front-end may take 'Reference of it. The front-end
4785 has been enhanced in the meantime so as to use a renaming instead
4786 in most cases, but the back-end can probably take the address of
4787 such a component too so we go for the conservative stance.
4789 For instance, we might need the address of any array type, even
4790 if normally passed by copy, to construct a fat pointer if the
4791 component is used as an actual for an unconstrained formal.
4793 Likewise for record types: even if a specific record subtype is
4794 passed by copy, the parent type might be passed by ref (e.g. if
4795 it's of variable size) and we might take the address of a child
4796 component to pass to a parent formal. We have no way to check
4797 for such conditions here. */
4798 if (AGGREGATE_TYPE_P (gnu_type
))
4804 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4807 smaller_form_type_p (tree type
, tree orig_type
)
4811 /* We're not interested in variants here. */
4812 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
4815 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4816 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
4819 size
= TYPE_SIZE (type
);
4820 osize
= TYPE_SIZE (orig_type
);
4822 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
4825 return tree_int_cst_lt (size
, osize
) != 0;
4828 /* Perform final processing on global variables. */
4830 static GTY (()) tree dummy_global
;
4833 gnat_write_global_declarations (void)
4838 /* If we have declared types as used at the global level, insert them in
4839 the global hash table. We use a dummy variable for this purpose. */
4840 if (!VEC_empty (tree
, types_used_by_cur_var_decl
))
4842 struct varpool_node
*node
;
4844 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, NULL_TREE
, void_type_node
);
4845 TREE_STATIC (dummy_global
) = 1;
4846 TREE_ASM_WRITTEN (dummy_global
) = 1;
4847 node
= varpool_node (dummy_global
);
4848 node
->force_output
= 1;
4849 varpool_mark_needed_node (node
);
4851 while (!VEC_empty (tree
, types_used_by_cur_var_decl
))
4853 tree t
= VEC_pop (tree
, types_used_by_cur_var_decl
);
4854 types_used_by_var_decl_insert (t
, dummy_global
);
4858 /* Output debug information for all global type declarations first. This
4859 ensures that global types whose compilation hasn't been finalized yet,
4860 for example pointers to Taft amendment types, have their compilation
4861 finalized in the right context. */
4862 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
4863 if (TREE_CODE (iter
) == TYPE_DECL
)
4864 debug_hooks
->global_decl (iter
);
4866 /* Proceed to optimize and emit assembly.
4867 FIXME: shouldn't be the front end's responsibility to call this. */
4868 cgraph_finalize_compilation_unit ();
4870 /* After cgraph has had a chance to emit everything that's going to
4871 be emitted, output debug information for the rest of globals. */
4874 timevar_push (TV_SYMOUT
);
4875 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
4876 if (TREE_CODE (iter
) != TYPE_DECL
)
4877 debug_hooks
->global_decl (iter
);
4878 timevar_pop (TV_SYMOUT
);
4882 /* ************************************************************************
4883 * * GCC builtins support *
4884 * ************************************************************************ */
4886 /* The general scheme is fairly simple:
4888 For each builtin function/type to be declared, gnat_install_builtins calls
4889 internal facilities which eventually get to gnat_push_decl, which in turn
4890 tracks the so declared builtin function decls in the 'builtin_decls' global
4891 datastructure. When an Intrinsic subprogram declaration is processed, we
4892 search this global datastructure to retrieve the associated BUILT_IN DECL
4895 /* Search the chain of currently available builtin declarations for a node
4896 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4897 found, if any, or NULL_TREE otherwise. */
4899 builtin_decl_for (tree name
)
4904 FOR_EACH_VEC_ELT (tree
, builtin_decls
, i
, decl
)
4905 if (DECL_NAME (decl
) == name
)
4911 /* The code below eventually exposes gnat_install_builtins, which declares
4912 the builtin types and functions we might need, either internally or as
4913 user accessible facilities.
4915 ??? This is a first implementation shot, still in rough shape. It is
4916 heavily inspired from the "C" family implementation, with chunks copied
4917 verbatim from there.
4919 Two obvious TODO candidates are
4920 o Use a more efficient name/decl mapping scheme
4921 o Devise a middle-end infrastructure to avoid having to copy
4922 pieces between front-ends. */
4924 /* ----------------------------------------------------------------------- *
4925 * BUILTIN ELEMENTARY TYPES *
4926 * ----------------------------------------------------------------------- */
4928 /* Standard data types to be used in builtin argument declarations. */
4932 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
4934 CTI_CONST_STRING_TYPE
,
4939 static tree c_global_trees
[CTI_MAX
];
4941 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4942 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4943 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4945 /* ??? In addition some attribute handlers, we currently don't support a
4946 (small) number of builtin-types, which in turns inhibits support for a
4947 number of builtin functions. */
4948 #define wint_type_node void_type_node
4949 #define intmax_type_node void_type_node
4950 #define uintmax_type_node void_type_node
4952 /* Build the void_list_node (void_type_node having been created). */
4955 build_void_list_node (void)
4957 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
4961 /* Used to help initialize the builtin-types.def table. When a type of
4962 the correct size doesn't exist, use error_mark_node instead of NULL.
4963 The later results in segfaults even when a decl using the type doesn't
4967 builtin_type_for_size (int size
, bool unsignedp
)
4969 tree type
= gnat_type_for_size (size
, unsignedp
);
4970 return type
? type
: error_mark_node
;
4973 /* Build/push the elementary type decls that builtin functions/types
4977 install_builtin_elementary_types (void)
4979 signed_size_type_node
= gnat_signed_type (size_type_node
);
4980 pid_type_node
= integer_type_node
;
4981 void_list_node
= build_void_list_node ();
4983 string_type_node
= build_pointer_type (char_type_node
);
4984 const_string_type_node
4985 = build_pointer_type (build_qualified_type
4986 (char_type_node
, TYPE_QUAL_CONST
));
4989 /* ----------------------------------------------------------------------- *
4990 * BUILTIN FUNCTION TYPES *
4991 * ----------------------------------------------------------------------- */
4993 /* Now, builtin function types per se. */
4997 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4998 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4999 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5000 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5001 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5002 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5003 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5004 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5005 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5006 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5007 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5008 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5009 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5010 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5011 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5013 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5014 #include "builtin-types.def"
5015 #undef DEF_PRIMITIVE_TYPE
5016 #undef DEF_FUNCTION_TYPE_0
5017 #undef DEF_FUNCTION_TYPE_1
5018 #undef DEF_FUNCTION_TYPE_2
5019 #undef DEF_FUNCTION_TYPE_3
5020 #undef DEF_FUNCTION_TYPE_4
5021 #undef DEF_FUNCTION_TYPE_5
5022 #undef DEF_FUNCTION_TYPE_6
5023 #undef DEF_FUNCTION_TYPE_7
5024 #undef DEF_FUNCTION_TYPE_VAR_0
5025 #undef DEF_FUNCTION_TYPE_VAR_1
5026 #undef DEF_FUNCTION_TYPE_VAR_2
5027 #undef DEF_FUNCTION_TYPE_VAR_3
5028 #undef DEF_FUNCTION_TYPE_VAR_4
5029 #undef DEF_FUNCTION_TYPE_VAR_5
5030 #undef DEF_POINTER_TYPE
5034 typedef enum c_builtin_type builtin_type
;
5036 /* A temporary array used in communication with def_fn_type. */
5037 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5039 /* A helper function for install_builtin_types. Build function type
5040 for DEF with return type RET and N arguments. If VAR is true, then the
5041 function should be variadic after those N arguments.
5043 Takes special care not to ICE if any of the types involved are
5044 error_mark_node, which indicates that said type is not in fact available
5045 (see builtin_type_for_size). In which case the function type as a whole
5046 should be error_mark_node. */
5049 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5052 tree
*args
= XALLOCAVEC (tree
, n
);
5057 for (i
= 0; i
< n
; ++i
)
5059 builtin_type a
= (builtin_type
) va_arg (list
, int);
5060 t
= builtin_types
[a
];
5061 if (t
== error_mark_node
)
5066 t
= builtin_types
[ret
];
5067 if (t
== error_mark_node
)
5070 t
= build_varargs_function_type_array (t
, n
, args
);
5072 t
= build_function_type_array (t
, n
, args
);
5075 builtin_types
[def
] = t
;
5079 /* Build the builtin function types and install them in the builtin_types
5080 array for later use in builtin function decls. */
5083 install_builtin_function_types (void)
5085 tree va_list_ref_type_node
;
5086 tree va_list_arg_type_node
;
5088 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5090 va_list_arg_type_node
= va_list_ref_type_node
=
5091 build_pointer_type (TREE_TYPE (va_list_type_node
));
5095 va_list_arg_type_node
= va_list_type_node
;
5096 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5099 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5100 builtin_types[ENUM] = VALUE;
5101 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5102 def_fn_type (ENUM, RETURN, 0, 0);
5103 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5104 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5105 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5106 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5107 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5108 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5109 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5110 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5111 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5112 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5113 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5115 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5116 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5118 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5119 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5120 def_fn_type (ENUM, RETURN, 1, 0);
5121 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5122 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5123 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5124 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5125 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5126 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5127 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5128 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5129 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5130 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5131 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5132 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5134 #include "builtin-types.def"
5136 #undef DEF_PRIMITIVE_TYPE
5137 #undef DEF_FUNCTION_TYPE_1
5138 #undef DEF_FUNCTION_TYPE_2
5139 #undef DEF_FUNCTION_TYPE_3
5140 #undef DEF_FUNCTION_TYPE_4
5141 #undef DEF_FUNCTION_TYPE_5
5142 #undef DEF_FUNCTION_TYPE_6
5143 #undef DEF_FUNCTION_TYPE_VAR_0
5144 #undef DEF_FUNCTION_TYPE_VAR_1
5145 #undef DEF_FUNCTION_TYPE_VAR_2
5146 #undef DEF_FUNCTION_TYPE_VAR_3
5147 #undef DEF_FUNCTION_TYPE_VAR_4
5148 #undef DEF_FUNCTION_TYPE_VAR_5
5149 #undef DEF_POINTER_TYPE
5150 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5153 /* ----------------------------------------------------------------------- *
5154 * BUILTIN ATTRIBUTES *
5155 * ----------------------------------------------------------------------- */
5157 enum built_in_attribute
5159 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5160 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5161 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5162 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5163 #include "builtin-attrs.def"
5164 #undef DEF_ATTR_NULL_TREE
5166 #undef DEF_ATTR_IDENT
5167 #undef DEF_ATTR_TREE_LIST
5171 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5174 install_builtin_attributes (void)
5176 /* Fill in the built_in_attributes array. */
5177 #define DEF_ATTR_NULL_TREE(ENUM) \
5178 built_in_attributes[(int) ENUM] = NULL_TREE;
5179 #define DEF_ATTR_INT(ENUM, VALUE) \
5180 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5181 #define DEF_ATTR_IDENT(ENUM, STRING) \
5182 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5183 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5184 built_in_attributes[(int) ENUM] \
5185 = tree_cons (built_in_attributes[(int) PURPOSE], \
5186 built_in_attributes[(int) VALUE], \
5187 built_in_attributes[(int) CHAIN]);
5188 #include "builtin-attrs.def"
5189 #undef DEF_ATTR_NULL_TREE
5191 #undef DEF_ATTR_IDENT
5192 #undef DEF_ATTR_TREE_LIST
5195 /* Handle a "const" attribute; arguments as in
5196 struct attribute_spec.handler. */
5199 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5200 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5203 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5204 TREE_READONLY (*node
) = 1;
5206 *no_add_attrs
= true;
5211 /* Handle a "nothrow" attribute; arguments as in
5212 struct attribute_spec.handler. */
5215 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5216 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5219 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5220 TREE_NOTHROW (*node
) = 1;
5222 *no_add_attrs
= true;
5227 /* Handle a "pure" attribute; arguments as in
5228 struct attribute_spec.handler. */
5231 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5232 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5234 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5235 DECL_PURE_P (*node
) = 1;
5236 /* ??? TODO: Support types. */
5239 warning (OPT_Wattributes
, "%qs attribute ignored",
5240 IDENTIFIER_POINTER (name
));
5241 *no_add_attrs
= true;
5247 /* Handle a "no vops" attribute; arguments as in
5248 struct attribute_spec.handler. */
5251 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5252 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5253 bool *ARG_UNUSED (no_add_attrs
))
5255 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
5256 DECL_IS_NOVOPS (*node
) = 1;
5260 /* Helper for nonnull attribute handling; fetch the operand number
5261 from the attribute argument list. */
5264 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
5266 /* Verify the arg number is a constant. */
5267 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
5268 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
5271 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
5275 /* Handle the "nonnull" attribute. */
5277 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5278 tree args
, int ARG_UNUSED (flags
),
5282 unsigned HOST_WIDE_INT attr_arg_num
;
5284 /* If no arguments are specified, all pointer arguments should be
5285 non-null. Verify a full prototype is given so that the arguments
5286 will have the correct types when we actually check them later. */
5289 if (!prototype_p (type
))
5291 error ("nonnull attribute without arguments on a non-prototype");
5292 *no_add_attrs
= true;
5297 /* Argument list specified. Verify that each argument number references
5298 a pointer argument. */
5299 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
5301 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
5303 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
5305 error ("nonnull argument has invalid operand number (argument %lu)",
5306 (unsigned long) attr_arg_num
);
5307 *no_add_attrs
= true;
5311 if (prototype_p (type
))
5313 function_args_iterator iter
;
5316 function_args_iter_init (&iter
, type
);
5317 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
5319 argument
= function_args_iter_cond (&iter
);
5320 if (!argument
|| ck_num
== arg_num
)
5325 || TREE_CODE (argument
) == VOID_TYPE
)
5327 error ("nonnull argument with out-of-range operand number "
5328 "(argument %lu, operand %lu)",
5329 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5330 *no_add_attrs
= true;
5334 if (TREE_CODE (argument
) != POINTER_TYPE
)
5336 error ("nonnull argument references non-pointer operand "
5337 "(argument %lu, operand %lu)",
5338 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5339 *no_add_attrs
= true;
5348 /* Handle a "sentinel" attribute. */
5351 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
5352 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5354 if (!prototype_p (*node
))
5356 warning (OPT_Wattributes
,
5357 "%qs attribute requires prototypes with named arguments",
5358 IDENTIFIER_POINTER (name
));
5359 *no_add_attrs
= true;
5363 if (!stdarg_p (*node
))
5365 warning (OPT_Wattributes
,
5366 "%qs attribute only applies to variadic functions",
5367 IDENTIFIER_POINTER (name
));
5368 *no_add_attrs
= true;
5374 tree position
= TREE_VALUE (args
);
5376 if (TREE_CODE (position
) != INTEGER_CST
)
5378 warning (0, "requested position is not an integer constant");
5379 *no_add_attrs
= true;
5383 if (tree_int_cst_lt (position
, integer_zero_node
))
5385 warning (0, "requested position is less than zero");
5386 *no_add_attrs
= true;
5394 /* Handle a "noreturn" attribute; arguments as in
5395 struct attribute_spec.handler. */
5398 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5399 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5401 tree type
= TREE_TYPE (*node
);
5403 /* See FIXME comment in c_common_attribute_table. */
5404 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5405 TREE_THIS_VOLATILE (*node
) = 1;
5406 else if (TREE_CODE (type
) == POINTER_TYPE
5407 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
5409 = build_pointer_type
5410 (build_type_variant (TREE_TYPE (type
),
5411 TYPE_READONLY (TREE_TYPE (type
)), 1));
5414 warning (OPT_Wattributes
, "%qs attribute ignored",
5415 IDENTIFIER_POINTER (name
));
5416 *no_add_attrs
= true;
5422 /* Handle a "leaf" attribute; arguments as in
5423 struct attribute_spec.handler. */
5426 handle_leaf_attribute (tree
*node
, tree name
,
5427 tree
ARG_UNUSED (args
),
5428 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5430 if (TREE_CODE (*node
) != FUNCTION_DECL
)
5432 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
5433 *no_add_attrs
= true;
5435 if (!TREE_PUBLIC (*node
))
5437 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
5438 *no_add_attrs
= true;
5444 /* Handle a "malloc" attribute; arguments as in
5445 struct attribute_spec.handler. */
5448 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5449 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5451 if (TREE_CODE (*node
) == FUNCTION_DECL
5452 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
5453 DECL_IS_MALLOC (*node
) = 1;
5456 warning (OPT_Wattributes
, "%qs attribute ignored",
5457 IDENTIFIER_POINTER (name
));
5458 *no_add_attrs
= true;
5464 /* Fake handler for attributes we don't properly support. */
5467 fake_attribute_handler (tree
* ARG_UNUSED (node
),
5468 tree
ARG_UNUSED (name
),
5469 tree
ARG_UNUSED (args
),
5470 int ARG_UNUSED (flags
),
5471 bool * ARG_UNUSED (no_add_attrs
))
5476 /* Handle a "type_generic" attribute. */
5479 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5480 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5481 bool * ARG_UNUSED (no_add_attrs
))
5483 /* Ensure we have a function type. */
5484 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
5486 /* Ensure we have a variadic function. */
5487 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
5492 /* Handle a "vector_size" attribute; arguments as in
5493 struct attribute_spec.handler. */
5496 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
5497 int ARG_UNUSED (flags
),
5500 unsigned HOST_WIDE_INT vecsize
, nunits
;
5501 enum machine_mode orig_mode
;
5502 tree type
= *node
, new_type
, size
;
5504 *no_add_attrs
= true;
5506 size
= TREE_VALUE (args
);
5508 if (!host_integerp (size
, 1))
5510 warning (OPT_Wattributes
, "%qs attribute ignored",
5511 IDENTIFIER_POINTER (name
));
5515 /* Get the vector size (in bytes). */
5516 vecsize
= tree_low_cst (size
, 1);
5518 /* We need to provide for vector pointers, vector arrays, and
5519 functions returning vectors. For example:
5521 __attribute__((vector_size(16))) short *foo;
5523 In this case, the mode is SI, but the type being modified is
5524 HI, so we need to look further. */
5526 while (POINTER_TYPE_P (type
)
5527 || TREE_CODE (type
) == FUNCTION_TYPE
5528 || TREE_CODE (type
) == ARRAY_TYPE
)
5529 type
= TREE_TYPE (type
);
5531 /* Get the mode of the type being modified. */
5532 orig_mode
= TYPE_MODE (type
);
5534 if ((!INTEGRAL_TYPE_P (type
)
5535 && !SCALAR_FLOAT_TYPE_P (type
)
5536 && !FIXED_POINT_TYPE_P (type
))
5537 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
5538 && GET_MODE_CLASS (orig_mode
) != MODE_INT
5539 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
5540 || !host_integerp (TYPE_SIZE_UNIT (type
), 1)
5541 || TREE_CODE (type
) == BOOLEAN_TYPE
)
5543 error ("invalid vector type for attribute %qs",
5544 IDENTIFIER_POINTER (name
));
5548 if (vecsize
% tree_low_cst (TYPE_SIZE_UNIT (type
), 1))
5550 error ("vector size not an integral multiple of component size");
5556 error ("zero vector size");
5560 /* Calculate how many units fit in the vector. */
5561 nunits
= vecsize
/ tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
5562 if (nunits
& (nunits
- 1))
5564 error ("number of components of the vector not a power of two");
5568 new_type
= build_vector_type (type
, nunits
);
5570 /* Build back pointers if needed. */
5571 *node
= reconstruct_complex_type (*node
, new_type
);
5576 /* Handle a "vector_type" attribute; arguments as in
5577 struct attribute_spec.handler. */
5580 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5581 int ARG_UNUSED (flags
),
5584 /* Vector representative type and size. */
5585 tree rep_type
= *node
;
5586 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
5589 /* Vector size in bytes and number of units. */
5590 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
5592 /* Vector element type and mode. */
5594 enum machine_mode elem_mode
;
5596 *no_add_attrs
= true;
5598 /* Get the representative array type, possibly nested within a
5599 padding record e.g. for alignment purposes. */
5601 if (TYPE_IS_PADDING_P (rep_type
))
5602 rep_type
= TREE_TYPE (TYPE_FIELDS (rep_type
));
5604 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
5606 error ("attribute %qs applies to array types only",
5607 IDENTIFIER_POINTER (name
));
5611 /* Silently punt on variable sizes. We can't make vector types for them,
5612 need to ignore them on front-end generated subtypes of unconstrained
5613 bases, and this attribute is for binding implementors, not end-users, so
5614 we should never get there from legitimate explicit uses. */
5616 if (!host_integerp (rep_size
, 1))
5619 /* Get the element type/mode and check this is something we know
5620 how to make vectors of. */
5622 elem_type
= TREE_TYPE (rep_type
);
5623 elem_mode
= TYPE_MODE (elem_type
);
5625 if ((!INTEGRAL_TYPE_P (elem_type
)
5626 && !SCALAR_FLOAT_TYPE_P (elem_type
)
5627 && !FIXED_POINT_TYPE_P (elem_type
))
5628 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
5629 && GET_MODE_CLASS (elem_mode
) != MODE_INT
5630 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
5631 || !host_integerp (TYPE_SIZE_UNIT (elem_type
), 1))
5633 error ("invalid element type for attribute %qs",
5634 IDENTIFIER_POINTER (name
));
5638 /* Sanity check the vector size and element type consistency. */
5640 vec_bytes
= tree_low_cst (rep_size
, 1);
5642 if (vec_bytes
% tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1))
5644 error ("vector size not an integral multiple of component size");
5650 error ("zero vector size");
5654 vec_units
= vec_bytes
/ tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1);
5655 if (vec_units
& (vec_units
- 1))
5657 error ("number of components of the vector not a power of two");
5661 /* Build the vector type and replace. */
5663 *node
= build_vector_type (elem_type
, vec_units
);
5664 rep_name
= TYPE_NAME (rep_type
);
5665 if (TREE_CODE (rep_name
) == TYPE_DECL
)
5666 rep_name
= DECL_NAME (rep_name
);
5667 TYPE_NAME (*node
) = rep_name
;
5668 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
5673 /* ----------------------------------------------------------------------- *
5674 * BUILTIN FUNCTIONS *
5675 * ----------------------------------------------------------------------- */
5677 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5678 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5679 if nonansi_p and flag_no_nonansi_builtin. */
5682 def_builtin_1 (enum built_in_function fncode
,
5684 enum built_in_class fnclass
,
5685 tree fntype
, tree libtype
,
5686 bool both_p
, bool fallback_p
,
5687 bool nonansi_p ATTRIBUTE_UNUSED
,
5688 tree fnattrs
, bool implicit_p
)
5691 const char *libname
;
5693 /* Preserve an already installed decl. It most likely was setup in advance
5694 (e.g. as part of the internal builtins) for specific reasons. */
5695 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
5698 gcc_assert ((!both_p
&& !fallback_p
)
5699 || !strncmp (name
, "__builtin_",
5700 strlen ("__builtin_")));
5702 libname
= name
+ strlen ("__builtin_");
5703 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
5704 (fallback_p
? libname
: NULL
),
5707 /* ??? This is normally further controlled by command-line options
5708 like -fno-builtin, but we don't have them for Ada. */
5709 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
5712 set_builtin_decl (fncode
, decl
, implicit_p
);
5715 static int flag_isoc94
= 0;
5716 static int flag_isoc99
= 0;
5718 /* Install what the common builtins.def offers. */
5721 install_builtin_functions (void)
5723 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5724 NONANSI_P, ATTRS, IMPLICIT, COND) \
5726 def_builtin_1 (ENUM, NAME, CLASS, \
5727 builtin_types[(int) TYPE], \
5728 builtin_types[(int) LIBTYPE], \
5729 BOTH_P, FALLBACK_P, NONANSI_P, \
5730 built_in_attributes[(int) ATTRS], IMPLICIT);
5731 #include "builtins.def"
5735 /* ----------------------------------------------------------------------- *
5736 * BUILTIN FUNCTIONS *
5737 * ----------------------------------------------------------------------- */
5739 /* Install the builtin functions we might need. */
5742 gnat_install_builtins (void)
5744 install_builtin_elementary_types ();
5745 install_builtin_function_types ();
5746 install_builtin_attributes ();
5748 /* Install builtins used by generic middle-end pieces first. Some of these
5749 know about internal specificities and control attributes accordingly, for
5750 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5751 the generic definition from builtins.def. */
5752 build_common_builtin_nodes ();
5754 /* Now, install the target specific builtins, such as the AltiVec family on
5755 ppc, and the common set as exposed by builtins.def. */
5756 targetm
.init_builtins ();
5757 install_builtin_functions ();
5760 #include "gt-ada-utils.h"
5761 #include "gtype-ada.h"