1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
42 #include "diagnostic.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
61 /* If nonzero, pretend we are allocating at global level. */
64 /* The default alignment of "double" floating-point types, i.e. floating
65 point types whose size is equal to 64 bits, or 0 if this alignment is
66 not specifically capped. */
67 int double_float_alignment
;
69 /* The default alignment of "double" or larger scalar types, i.e. scalar
70 types whose size is greater or equal to 64 bits, or 0 if this alignment
71 is not specifically capped. */
72 int double_scalar_alignment
;
74 /* Tree nodes for the various types and decls we create. */
75 tree gnat_std_decls
[(int) ADT_LAST
];
77 /* Functions to call for each of the possible raise reasons. */
78 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
80 /* Likewise, but with extra info for each of the possible raise reasons. */
81 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
83 /* Forward declarations for handlers of attributes. */
84 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
85 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
86 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
87 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
88 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
97 /* Fake handler for attributes we don't properly support, typically because
98 they'd require dragging a lot of the common-c front-end circuitry. */
99 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
101 /* Table of machine-independent internal attributes for Ada. We support
102 this minimal set of attributes to accommodate the needs of builtins. */
103 const struct attribute_spec gnat_internal_attribute_table
[] =
105 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
106 affects_type_identity } */
107 { "const", 0, 0, true, false, false, handle_const_attribute
,
109 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
111 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
113 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
115 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
117 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
119 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
121 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
123 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
125 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
128 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
130 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
132 { "may_alias", 0, 0, false, true, false, NULL
, false },
134 /* ??? format and format_arg are heavy and not supported, which actually
135 prevents support for stdio builtins, which we however declare as part
136 of the common builtins.def contents. */
137 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
138 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
140 { NULL
, 0, 0, false, false, false, NULL
, false }
143 /* Associates a GNAT tree node to a GCC tree node. It is used in
144 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
145 of `save_gnu_tree' for more info. */
146 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
148 #define GET_GNU_TREE(GNAT_ENTITY) \
149 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
151 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
152 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
154 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
155 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
157 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
158 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
160 #define GET_DUMMY_NODE(GNAT_ENTITY) \
161 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
163 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
164 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
167 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169 /* This variable keeps a table for types for each precision so that we only
170 allocate each of them once. Signed and unsigned types are kept separate.
172 Note that these types are only used when fold-const requests something
173 special. Perhaps we should NOT share these types; we'll see how it
175 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
177 /* Likewise for float types, but record these by mode. */
178 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
180 /* For each binding contour we allocate a binding_level structure to indicate
181 the binding depth. */
183 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
184 /* The binding level containing this one (the enclosing binding level). */
185 struct gnat_binding_level
*chain
;
186 /* The BLOCK node for this level. */
188 /* If nonzero, the setjmp buffer that needs to be updated for any
189 variable-sized definition within this context. */
193 /* The binding level currently in effect. */
194 static GTY(()) struct gnat_binding_level
*current_binding_level
;
196 /* A chain of gnat_binding_level structures awaiting reuse. */
197 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
199 /* The context to be used for global declarations. */
200 static GTY(()) tree global_context
;
202 /* An array of global declarations. */
203 static GTY(()) VEC(tree
,gc
) *global_decls
;
205 /* An array of builtin function declarations. */
206 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
208 /* An array of global renaming pointers. */
209 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
211 /* A chain of unused BLOCK nodes. */
212 static GTY((deletable
)) tree free_block_chain
;
214 static int pad_type_hash_marked_p (const void *p
);
215 static hashval_t
pad_type_hash_hash (const void *p
);
216 static int pad_type_hash_eq (const void *p1
, const void *p2
);
218 /* A hash table of padded types. It is modelled on the generic type
219 hash table in tree.c, which must thus be used as a reference. */
220 struct GTY(()) pad_type_hash
{
225 static GTY ((if_marked ("pad_type_hash_marked_p"),
226 param_is (struct pad_type_hash
)))
227 htab_t pad_type_hash_table
;
229 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
230 static tree
compute_related_constant (tree
, tree
);
231 static tree
split_plus (tree
, tree
*);
232 static tree
float_type_for_precision (int, enum machine_mode
);
233 static tree
convert_to_fat_pointer (tree
, tree
);
234 static bool potential_alignment_gap (tree
, tree
, tree
);
235 static void process_attributes (tree
, struct attrib
*);
237 /* Initialize data structures of the utils.c module. */
240 init_gnat_utils (void)
242 /* Initialize the association of GNAT nodes to GCC trees. */
243 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
245 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
246 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
248 /* Initialize the hash table of padded types. */
249 pad_type_hash_table
= htab_create_ggc (512, pad_type_hash_hash
,
250 pad_type_hash_eq
, 0);
253 /* Destroy data structures of the utils.c module. */
256 destroy_gnat_utils (void)
258 /* Destroy the association of GNAT nodes to GCC trees. */
259 ggc_free (associate_gnat_to_gnu
);
260 associate_gnat_to_gnu
= NULL
;
262 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
263 ggc_free (dummy_node_table
);
264 dummy_node_table
= NULL
;
266 /* Destroy the hash table of padded types. */
267 htab_delete (pad_type_hash_table
);
268 pad_type_hash_table
= NULL
;
270 /* Invalidate the global renaming pointers. */
271 invalidate_global_renaming_pointers ();
274 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
275 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
276 If NO_CHECK is true, the latter check is suppressed.
278 If GNU_DECL is zero, reset a previous association. */
281 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
283 /* Check that GNAT_ENTITY is not already defined and that it is being set
284 to something which is a decl. If that is not the case, this usually
285 means GNAT_ENTITY is defined twice, but occasionally is due to some
287 gcc_assert (!(gnu_decl
288 && (PRESENT_GNU_TREE (gnat_entity
)
289 || (!no_check
&& !DECL_P (gnu_decl
)))));
291 SET_GNU_TREE (gnat_entity
, gnu_decl
);
294 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
295 that was associated with it. If there is no such tree node, abort.
297 In some cases, such as delayed elaboration or expressions that need to
298 be elaborated only once, GNAT_ENTITY is really not an entity. */
301 get_gnu_tree (Entity_Id gnat_entity
)
303 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
304 return GET_GNU_TREE (gnat_entity
);
307 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
310 present_gnu_tree (Entity_Id gnat_entity
)
312 return PRESENT_GNU_TREE (gnat_entity
);
315 /* Make a dummy type corresponding to GNAT_TYPE. */
318 make_dummy_type (Entity_Id gnat_type
)
320 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
323 /* If there is an equivalent type, get its underlying type. */
324 if (Present (gnat_underlying
))
325 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
327 /* If there was no equivalent type (can only happen when just annotating
328 types) or underlying type, go back to the original type. */
329 if (No (gnat_underlying
))
330 gnat_underlying
= gnat_type
;
332 /* If it there already a dummy type, use that one. Else make one. */
333 if (PRESENT_DUMMY_NODE (gnat_underlying
))
334 return GET_DUMMY_NODE (gnat_underlying
);
336 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
338 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
339 ? tree_code_for_record_type (gnat_underlying
)
341 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
342 TYPE_DUMMY_P (gnu_type
) = 1;
343 TYPE_STUB_DECL (gnu_type
)
344 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
345 if (Is_By_Reference_Type (gnat_underlying
))
346 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
348 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
353 /* Return the dummy type that was made for GNAT_TYPE, if any. */
356 get_dummy_type (Entity_Id gnat_type
)
358 return GET_DUMMY_NODE (gnat_type
);
361 /* Build dummy fat and thin pointer types whose designated type is specified
362 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
365 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
367 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
368 tree gnu_fat_type
, fields
, gnu_object_type
;
370 gnu_template_type
= make_node (RECORD_TYPE
);
371 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
372 TYPE_DUMMY_P (gnu_template_type
) = 1;
373 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
375 gnu_array_type
= make_node (ENUMERAL_TYPE
);
376 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
377 TYPE_DUMMY_P (gnu_array_type
) = 1;
378 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
380 gnu_fat_type
= make_node (RECORD_TYPE
);
381 /* Build a stub DECL to trigger the special processing for fat pointer types
383 TYPE_NAME (gnu_fat_type
)
384 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
386 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
387 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
389 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
390 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
391 finish_fat_pointer_type (gnu_fat_type
, fields
);
392 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
393 /* Suppress debug info until after the type is completed. */
394 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
396 gnu_object_type
= make_node (RECORD_TYPE
);
397 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
398 TYPE_DUMMY_P (gnu_object_type
) = 1;
400 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
401 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
404 /* Return true if we are in the global binding level. */
407 global_bindings_p (void)
409 return force_global
|| current_function_decl
== NULL_TREE
;
412 /* Enter a new binding level. */
415 gnat_pushlevel (void)
417 struct gnat_binding_level
*newlevel
= NULL
;
419 /* Reuse a struct for this binding level, if there is one. */
420 if (free_binding_level
)
422 newlevel
= free_binding_level
;
423 free_binding_level
= free_binding_level
->chain
;
426 newlevel
= ggc_alloc_gnat_binding_level ();
428 /* Use a free BLOCK, if any; otherwise, allocate one. */
429 if (free_block_chain
)
431 newlevel
->block
= free_block_chain
;
432 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
433 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
436 newlevel
->block
= make_node (BLOCK
);
438 /* Point the BLOCK we just made to its parent. */
439 if (current_binding_level
)
440 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
442 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
443 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
444 TREE_USED (newlevel
->block
) = 1;
446 /* Add this level to the front of the chain (stack) of active levels. */
447 newlevel
->chain
= current_binding_level
;
448 newlevel
->jmpbuf_decl
= NULL_TREE
;
449 current_binding_level
= newlevel
;
452 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
453 and point FNDECL to this BLOCK. */
456 set_current_block_context (tree fndecl
)
458 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
459 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
460 set_block_for_group (current_binding_level
->block
);
463 /* Set the jmpbuf_decl for the current binding level to DECL. */
466 set_block_jmpbuf_decl (tree decl
)
468 current_binding_level
->jmpbuf_decl
= decl
;
471 /* Get the jmpbuf_decl, if any, for the current binding level. */
474 get_block_jmpbuf_decl (void)
476 return current_binding_level
->jmpbuf_decl
;
479 /* Exit a binding level. Set any BLOCK into the current code group. */
484 struct gnat_binding_level
*level
= current_binding_level
;
485 tree block
= level
->block
;
487 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
488 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
490 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
491 are no variables free the block and merge its subblocks into those of its
492 parent block. Otherwise, add it to the list of its parent. */
493 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
495 else if (BLOCK_VARS (block
) == NULL_TREE
)
497 BLOCK_SUBBLOCKS (level
->chain
->block
)
498 = block_chainon (BLOCK_SUBBLOCKS (block
),
499 BLOCK_SUBBLOCKS (level
->chain
->block
));
500 BLOCK_CHAIN (block
) = free_block_chain
;
501 free_block_chain
= block
;
505 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
506 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
507 TREE_USED (block
) = 1;
508 set_block_for_group (block
);
511 /* Free this binding structure. */
512 current_binding_level
= level
->chain
;
513 level
->chain
= free_binding_level
;
514 free_binding_level
= level
;
517 /* Exit a binding level and discard the associated BLOCK. */
522 struct gnat_binding_level
*level
= current_binding_level
;
523 tree block
= level
->block
;
525 BLOCK_CHAIN (block
) = free_block_chain
;
526 free_block_chain
= block
;
528 /* Free this binding structure. */
529 current_binding_level
= level
->chain
;
530 level
->chain
= free_binding_level
;
531 free_binding_level
= level
;
534 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
535 for location information and flag propagation. */
538 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
540 /* If DECL is public external or at top level, it has global context. */
541 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
544 global_context
= build_translation_unit_decl (NULL_TREE
);
545 DECL_CONTEXT (decl
) = global_context
;
549 DECL_CONTEXT (decl
) = current_function_decl
;
551 /* Functions imported in another function are not really nested.
552 For really nested functions mark them initially as needing
553 a static chain for uses of that flag before unnesting;
554 lower_nested_functions will then recompute it. */
555 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
556 DECL_STATIC_CHAIN (decl
) = 1;
559 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
561 /* Set the location of DECL and emit a declaration for it. */
562 if (Present (gnat_node
))
563 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
565 add_decl_expr (decl
, gnat_node
);
567 /* Put the declaration on the list. The list of declarations is in reverse
568 order. The list will be reversed later. Put global declarations in the
569 globals list and local ones in the current block. But skip TYPE_DECLs
570 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
571 with the debugger and aren't needed anyway. */
572 if (!(TREE_CODE (decl
) == TYPE_DECL
573 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
575 if (DECL_EXTERNAL (decl
))
577 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
578 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
580 else if (global_bindings_p ())
581 VEC_safe_push (tree
, gc
, global_decls
, decl
);
584 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
585 BLOCK_VARS (current_binding_level
->block
) = decl
;
589 /* For the declaration of a type, set its name if it either is not already
590 set or if the previous type name was not derived from a source name.
591 We'd rather have the type named with a real name and all the pointer
592 types to the same object have the same POINTER_TYPE node. Code in the
593 equivalent function of c-decl.c makes a copy of the type node here, but
594 that may cause us trouble with incomplete types. We make an exception
595 for fat pointer types because the compiler automatically builds them
596 for unconstrained array types and the debugger uses them to represent
597 both these and pointers to these. */
598 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
600 tree t
= TREE_TYPE (decl
);
602 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
604 /* Array and pointer types aren't "tagged" types so we force the
605 type to be associated with its typedef in the DWARF back-end,
606 in order to make sure that the latter is always preserved. */
607 if (!DECL_ARTIFICIAL (decl
)
608 && (TREE_CODE (t
) == ARRAY_TYPE
609 || TREE_CODE (t
) == POINTER_TYPE
))
611 tree tt
= build_distinct_type_copy (t
);
612 if (TREE_CODE (t
) == POINTER_TYPE
)
613 TYPE_NEXT_PTR_TO (t
) = tt
;
614 TYPE_NAME (tt
) = DECL_NAME (decl
);
615 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
616 DECL_ORIGINAL_TYPE (decl
) = tt
;
619 else if (TYPE_IS_FAT_POINTER_P (t
))
621 /* We need a variant for the placeholder machinery to work. */
622 tree tt
= build_variant_type_copy (t
);
623 TYPE_NAME (tt
) = decl
;
624 TREE_USED (tt
) = TREE_USED (t
);
625 TREE_TYPE (decl
) = tt
;
626 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
627 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
629 DECL_ORIGINAL_TYPE (decl
) = t
;
630 DECL_ARTIFICIAL (decl
) = 0;
633 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
638 /* Propagate the name to all the anonymous variants. This is needed
639 for the type qualifiers machinery to work properly. */
641 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
642 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
643 TYPE_NAME (t
) = decl
;
647 /* Create a record type that contains a SIZE bytes long field of TYPE with a
648 starting bit position so that it is aligned to ALIGN bits, and leaving at
649 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
650 record is guaranteed to get. */
653 make_aligning_type (tree type
, unsigned int align
, tree size
,
654 unsigned int base_align
, int room
)
656 /* We will be crafting a record type with one field at a position set to be
657 the next multiple of ALIGN past record'address + room bytes. We use a
658 record placeholder to express record'address. */
659 tree record_type
= make_node (RECORD_TYPE
);
660 tree record
= build0 (PLACEHOLDER_EXPR
, record_type
);
663 = convert (sizetype
, build_unary_op (ADDR_EXPR
, NULL_TREE
, record
));
665 /* The diagram below summarizes the shape of what we manipulate:
667 <--------- pos ---------->
668 { +------------+-------------+-----------------+
669 record =>{ |############| ... | field (type) |
670 { +------------+-------------+-----------------+
671 |<-- room -->|<- voffset ->|<---- size ----->|
674 record_addr vblock_addr
676 Every length is in sizetype bytes there, except "pos" which has to be
677 set as a bit position in the GCC tree for the record. */
678 tree room_st
= size_int (room
);
679 tree vblock_addr_st
= size_binop (PLUS_EXPR
, record_addr_st
, room_st
);
680 tree voffset_st
, pos
, field
;
682 tree name
= TYPE_NAME (type
);
684 if (TREE_CODE (name
) == TYPE_DECL
)
685 name
= DECL_NAME (name
);
686 name
= concat_name (name
, "ALIGN");
687 TYPE_NAME (record_type
) = name
;
689 /* Compute VOFFSET and then POS. The next byte position multiple of some
690 alignment after some address is obtained by "and"ing the alignment minus
691 1 with the two's complement of the address. */
692 voffset_st
= size_binop (BIT_AND_EXPR
,
693 fold_build1 (NEGATE_EXPR
, sizetype
, vblock_addr_st
),
694 size_int ((align
/ BITS_PER_UNIT
) - 1));
696 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
697 pos
= size_binop (MULT_EXPR
,
698 convert (bitsizetype
,
699 size_binop (PLUS_EXPR
, room_st
, voffset_st
)),
702 /* Craft the GCC record representation. We exceptionally do everything
703 manually here because 1) our generic circuitry is not quite ready to
704 handle the complex position/size expressions we are setting up, 2) we
705 have a strong simplifying factor at hand: we know the maximum possible
706 value of voffset, and 3) we have to set/reset at least the sizes in
707 accordance with this maximum value anyway, as we need them to convey
708 what should be "alloc"ated for this type.
710 Use -1 as the 'addressable' indication for the field to prevent the
711 creation of a bitfield. We don't need one, it would have damaging
712 consequences on the alignment computation, and create_field_decl would
713 make one without this special argument, for instance because of the
714 complex position expression. */
715 field
= create_field_decl (get_identifier ("F"), type
, record_type
, size
,
717 TYPE_FIELDS (record_type
) = field
;
719 TYPE_ALIGN (record_type
) = base_align
;
720 TYPE_USER_ALIGN (record_type
) = 1;
722 TYPE_SIZE (record_type
)
723 = size_binop (PLUS_EXPR
,
724 size_binop (MULT_EXPR
, convert (bitsizetype
, size
),
726 bitsize_int (align
+ room
* BITS_PER_UNIT
));
727 TYPE_SIZE_UNIT (record_type
)
728 = size_binop (PLUS_EXPR
, size
,
729 size_int (room
+ align
/ BITS_PER_UNIT
));
731 SET_TYPE_MODE (record_type
, BLKmode
);
732 relate_alias_sets (record_type
, type
, ALIAS_SET_COPY
);
734 /* Declare it now since it will never be declared otherwise. This is
735 necessary to ensure that its subtrees are properly marked. */
736 create_type_decl (name
, record_type
, NULL
, true, false, Empty
);
741 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
742 as the field type of a packed record if IN_RECORD is true, or as the
743 component type of a packed array if IN_RECORD is false. See if we can
744 rewrite it either as a type that has a non-BLKmode, which we can pack
745 tighter in the packed record case, or as a smaller type. If so, return
746 the new type. If not, return the original type. */
749 make_packable_type (tree type
, bool in_record
)
751 unsigned HOST_WIDE_INT size
= tree_low_cst (TYPE_SIZE (type
), 1);
752 unsigned HOST_WIDE_INT new_size
;
753 tree new_type
, old_field
, field_list
= NULL_TREE
;
756 /* No point in doing anything if the size is zero. */
760 new_type
= make_node (TREE_CODE (type
));
762 /* Copy the name and flags from the old type to that of the new.
763 Note that we rely on the pointer equality created here for
764 TYPE_NAME to look through conversions in various places. */
765 TYPE_NAME (new_type
) = TYPE_NAME (type
);
766 TYPE_JUSTIFIED_MODULAR_P (new_type
) = TYPE_JUSTIFIED_MODULAR_P (type
);
767 TYPE_CONTAINS_TEMPLATE_P (new_type
) = TYPE_CONTAINS_TEMPLATE_P (type
);
768 if (TREE_CODE (type
) == RECORD_TYPE
)
769 TYPE_PADDING_P (new_type
) = TYPE_PADDING_P (type
);
771 /* If we are in a record and have a small size, set the alignment to
772 try for an integral mode. Otherwise set it to try for a smaller
773 type with BLKmode. */
774 if (in_record
&& size
<= MAX_FIXED_MODE_SIZE
)
776 align
= ceil_pow2 (size
);
777 TYPE_ALIGN (new_type
) = align
;
778 new_size
= (size
+ align
- 1) & -align
;
782 unsigned HOST_WIDE_INT align
;
784 /* Do not try to shrink the size if the RM size is not constant. */
785 if (TYPE_CONTAINS_TEMPLATE_P (type
)
786 || !host_integerp (TYPE_ADA_SIZE (type
), 1))
789 /* Round the RM size up to a unit boundary to get the minimal size
790 for a BLKmode record. Give up if it's already the size. */
791 new_size
= TREE_INT_CST_LOW (TYPE_ADA_SIZE (type
));
792 new_size
= (new_size
+ BITS_PER_UNIT
- 1) & -BITS_PER_UNIT
;
793 if (new_size
== size
)
796 align
= new_size
& -new_size
;
797 TYPE_ALIGN (new_type
) = MIN (TYPE_ALIGN (type
), align
);
800 TYPE_USER_ALIGN (new_type
) = 1;
802 /* Now copy the fields, keeping the position and size as we don't want
803 to change the layout by propagating the packedness downwards. */
804 for (old_field
= TYPE_FIELDS (type
); old_field
;
805 old_field
= DECL_CHAIN (old_field
))
807 tree new_field_type
= TREE_TYPE (old_field
);
808 tree new_field
, new_size
;
810 if (RECORD_OR_UNION_TYPE_P (new_field_type
)
811 && !TYPE_FAT_POINTER_P (new_field_type
)
812 && host_integerp (TYPE_SIZE (new_field_type
), 1))
813 new_field_type
= make_packable_type (new_field_type
, true);
815 /* However, for the last field in a not already packed record type
816 that is of an aggregate type, we need to use the RM size in the
817 packable version of the record type, see finish_record_type. */
818 if (!DECL_CHAIN (old_field
)
819 && !TYPE_PACKED (type
)
820 && RECORD_OR_UNION_TYPE_P (new_field_type
)
821 && !TYPE_FAT_POINTER_P (new_field_type
)
822 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type
)
823 && TYPE_ADA_SIZE (new_field_type
))
824 new_size
= TYPE_ADA_SIZE (new_field_type
);
826 new_size
= DECL_SIZE (old_field
);
829 = create_field_decl (DECL_NAME (old_field
), new_field_type
, new_type
,
830 new_size
, bit_position (old_field
),
832 !DECL_NONADDRESSABLE_P (old_field
));
834 DECL_INTERNAL_P (new_field
) = DECL_INTERNAL_P (old_field
);
835 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field
, old_field
);
836 if (TREE_CODE (new_type
) == QUAL_UNION_TYPE
)
837 DECL_QUALIFIER (new_field
) = DECL_QUALIFIER (old_field
);
839 DECL_CHAIN (new_field
) = field_list
;
840 field_list
= new_field
;
843 finish_record_type (new_type
, nreverse (field_list
), 2, false);
844 relate_alias_sets (new_type
, type
, ALIAS_SET_COPY
);
845 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type
),
846 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type
)));
848 /* If this is a padding record, we never want to make the size smaller
849 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
850 if (TYPE_IS_PADDING_P (type
) || TREE_CODE (type
) == QUAL_UNION_TYPE
)
852 TYPE_SIZE (new_type
) = TYPE_SIZE (type
);
853 TYPE_SIZE_UNIT (new_type
) = TYPE_SIZE_UNIT (type
);
858 TYPE_SIZE (new_type
) = bitsize_int (new_size
);
859 TYPE_SIZE_UNIT (new_type
)
860 = size_int ((new_size
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
863 if (!TYPE_CONTAINS_TEMPLATE_P (type
))
864 SET_TYPE_ADA_SIZE (new_type
, TYPE_ADA_SIZE (type
));
866 compute_record_mode (new_type
);
868 /* Try harder to get a packable type if necessary, for example
869 in case the record itself contains a BLKmode field. */
870 if (in_record
&& TYPE_MODE (new_type
) == BLKmode
)
871 SET_TYPE_MODE (new_type
,
872 mode_for_size_tree (TYPE_SIZE (new_type
), MODE_INT
, 1));
874 /* If neither the mode nor the size has shrunk, return the old type. */
875 if (TYPE_MODE (new_type
) == BLKmode
&& new_size
>= size
)
881 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
882 If TYPE is the best type, return it. Otherwise, make a new type. We
883 only support new integral and pointer types. FOR_BIASED is true if
884 we are making a biased type. */
887 make_type_from_size (tree type
, tree size_tree
, bool for_biased
)
889 unsigned HOST_WIDE_INT size
;
893 /* If size indicates an error, just return TYPE to avoid propagating
894 the error. Likewise if it's too large to represent. */
895 if (!size_tree
|| !host_integerp (size_tree
, 1))
898 size
= tree_low_cst (size_tree
, 1);
900 switch (TREE_CODE (type
))
905 biased_p
= (TREE_CODE (type
) == INTEGER_TYPE
906 && TYPE_BIASED_REPRESENTATION_P (type
));
908 /* Integer types with precision 0 are forbidden. */
912 /* Only do something if the type isn't a packed array type and doesn't
913 already have the proper size and the size isn't too large. */
914 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type
)
915 || (TYPE_PRECISION (type
) == size
&& biased_p
== for_biased
)
916 || size
> LONG_LONG_TYPE_SIZE
)
919 biased_p
|= for_biased
;
920 if (TYPE_UNSIGNED (type
) || biased_p
)
921 new_type
= make_unsigned_type (size
);
923 new_type
= make_signed_type (size
);
924 TREE_TYPE (new_type
) = TREE_TYPE (type
) ? TREE_TYPE (type
) : type
;
925 SET_TYPE_RM_MIN_VALUE (new_type
,
926 convert (TREE_TYPE (new_type
),
927 TYPE_MIN_VALUE (type
)));
928 SET_TYPE_RM_MAX_VALUE (new_type
,
929 convert (TREE_TYPE (new_type
),
930 TYPE_MAX_VALUE (type
)));
931 /* Copy the name to show that it's essentially the same type and
932 not a subrange type. */
933 TYPE_NAME (new_type
) = TYPE_NAME (type
);
934 TYPE_BIASED_REPRESENTATION_P (new_type
) = biased_p
;
935 SET_TYPE_RM_SIZE (new_type
, bitsize_int (size
));
939 /* Do something if this is a fat pointer, in which case we
940 may need to return the thin pointer. */
941 if (TYPE_FAT_POINTER_P (type
) && size
< POINTER_SIZE
* 2)
943 enum machine_mode p_mode
= mode_for_size (size
, MODE_INT
, 0);
944 if (!targetm
.valid_pointer_mode (p_mode
))
947 build_pointer_type_for_mode
948 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type
)),
954 /* Only do something if this is a thin pointer, in which case we
955 may need to return the fat pointer. */
956 if (TYPE_IS_THIN_POINTER_P (type
) && size
>= POINTER_SIZE
* 2)
958 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)));
968 /* See if the data pointed to by the hash table slot is marked. */
971 pad_type_hash_marked_p (const void *p
)
973 const_tree
const type
= ((const struct pad_type_hash
*) p
)->type
;
975 return ggc_marked_p (type
);
978 /* Return the cached hash value. */
981 pad_type_hash_hash (const void *p
)
983 return ((const struct pad_type_hash
*) p
)->hash
;
986 /* Return 1 iff the padded types are equivalent. */
989 pad_type_hash_eq (const void *p1
, const void *p2
)
991 const struct pad_type_hash
*const t1
= (const struct pad_type_hash
*) p1
;
992 const struct pad_type_hash
*const t2
= (const struct pad_type_hash
*) p2
;
995 if (t1
->hash
!= t2
->hash
)
1001 /* We consider that the padded types are equivalent if they pad the same
1002 type and have the same size, alignment and RM size. Taking the mode
1003 into account is redundant since it is determined by the others. */
1005 TREE_TYPE (TYPE_FIELDS (type1
)) == TREE_TYPE (TYPE_FIELDS (type2
))
1006 && TYPE_SIZE (type1
) == TYPE_SIZE (type2
)
1007 && TYPE_ALIGN (type1
) == TYPE_ALIGN (type2
)
1008 && TYPE_ADA_SIZE (type1
) == TYPE_ADA_SIZE (type2
);
1011 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1012 if needed. We have already verified that SIZE and TYPE are large enough.
1013 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1014 IS_COMPONENT_TYPE is true if this is being done for the component type of
1015 an array. IS_USER_TYPE is true if the original type needs to be completed.
1016 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1017 the RM size of the resulting type is to be set to SIZE too. */
1020 maybe_pad_type (tree type
, tree size
, unsigned int align
,
1021 Entity_Id gnat_entity
, bool is_component_type
,
1022 bool is_user_type
, bool definition
, bool set_rm_size
)
1024 tree orig_size
= TYPE_SIZE (type
);
1027 /* If TYPE is a padded type, see if it agrees with any size and alignment
1028 we were given. If so, return the original type. Otherwise, strip
1029 off the padding, since we will either be returning the inner type
1030 or repadding it. If no size or alignment is specified, use that of
1031 the original padded type. */
1032 if (TYPE_IS_PADDING_P (type
))
1035 || operand_equal_p (round_up (size
,
1036 MAX (align
, TYPE_ALIGN (type
))),
1037 round_up (TYPE_SIZE (type
),
1038 MAX (align
, TYPE_ALIGN (type
))),
1040 && (align
== 0 || align
== TYPE_ALIGN (type
)))
1044 size
= TYPE_SIZE (type
);
1046 align
= TYPE_ALIGN (type
);
1048 type
= TREE_TYPE (TYPE_FIELDS (type
));
1049 orig_size
= TYPE_SIZE (type
);
1052 /* If the size is either not being changed or is being made smaller (which
1053 is not done here and is only valid for bitfields anyway), show the size
1054 isn't changing. Likewise, clear the alignment if it isn't being
1055 changed. Then return if we aren't doing anything. */
1057 && (operand_equal_p (size
, orig_size
, 0)
1058 || (TREE_CODE (orig_size
) == INTEGER_CST
1059 && tree_int_cst_lt (size
, orig_size
))))
1062 if (align
== TYPE_ALIGN (type
))
1065 if (align
== 0 && !size
)
1068 /* If requested, complete the original type and give it a name. */
1070 create_type_decl (get_entity_name (gnat_entity
), type
,
1071 NULL
, !Comes_From_Source (gnat_entity
),
1073 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1074 && DECL_IGNORED_P (TYPE_NAME (type
))),
1077 /* We used to modify the record in place in some cases, but that could
1078 generate incorrect debugging information. So make a new record
1080 record
= make_node (RECORD_TYPE
);
1081 TYPE_PADDING_P (record
) = 1;
1083 if (Present (gnat_entity
))
1084 TYPE_NAME (record
) = create_concat_name (gnat_entity
, "PAD");
1086 TYPE_ALIGN (record
) = align
;
1087 TYPE_SIZE (record
) = size
? size
: orig_size
;
1088 TYPE_SIZE_UNIT (record
)
1089 = convert (sizetype
,
1090 size_binop (CEIL_DIV_EXPR
, TYPE_SIZE (record
),
1091 bitsize_unit_node
));
1093 /* If we are changing the alignment and the input type is a record with
1094 BLKmode and a small constant size, try to make a form that has an
1095 integral mode. This might allow the padding record to also have an
1096 integral mode, which will be much more efficient. There is no point
1097 in doing so if a size is specified unless it is also a small constant
1098 size and it is incorrect to do so if we cannot guarantee that the mode
1099 will be naturally aligned since the field must always be addressable.
1101 ??? This might not always be a win when done for a stand-alone object:
1102 since the nominal and the effective type of the object will now have
1103 different modes, a VIEW_CONVERT_EXPR will be required for converting
1104 between them and it might be hard to overcome afterwards, including
1105 at the RTL level when the stand-alone object is accessed as a whole. */
1107 && RECORD_OR_UNION_TYPE_P (type
)
1108 && TYPE_MODE (type
) == BLKmode
1109 && !TYPE_BY_REFERENCE_P (type
)
1110 && TREE_CODE (orig_size
) == INTEGER_CST
1111 && !TREE_OVERFLOW (orig_size
)
1112 && compare_tree_int (orig_size
, MAX_FIXED_MODE_SIZE
) <= 0
1114 || (TREE_CODE (size
) == INTEGER_CST
1115 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) <= 0)))
1117 tree packable_type
= make_packable_type (type
, true);
1118 if (TYPE_MODE (packable_type
) != BLKmode
1119 && align
>= TYPE_ALIGN (packable_type
))
1120 type
= packable_type
;
1123 /* Now create the field with the original size. */
1124 field
= create_field_decl (get_identifier ("F"), type
, record
, orig_size
,
1125 bitsize_zero_node
, 0, 1);
1126 DECL_INTERNAL_P (field
) = 1;
1128 /* Do not emit debug info until after the auxiliary record is built. */
1129 finish_record_type (record
, field
, 1, false);
1131 /* Set the RM size if requested. */
1134 SET_TYPE_ADA_SIZE (record
, size
? size
: orig_size
);
1136 /* If the padded type is complete and has constant size, we canonicalize
1137 it by means of the hash table. This is consistent with the language
1138 semantics and ensures that gigi and the middle-end have a common view
1139 of these padded types. */
1140 if (TREE_CONSTANT (TYPE_SIZE (record
)))
1143 struct pad_type_hash in
, *h
;
1146 hashcode
= iterative_hash_object (TYPE_HASH (type
), 0);
1147 hashcode
= iterative_hash_expr (TYPE_SIZE (record
), hashcode
);
1148 hashcode
= iterative_hash_hashval_t (TYPE_ALIGN (record
), hashcode
);
1149 hashcode
= iterative_hash_expr (TYPE_ADA_SIZE (record
), hashcode
);
1153 h
= (struct pad_type_hash
*)
1154 htab_find_with_hash (pad_type_hash_table
, &in
, hashcode
);
1161 h
= ggc_alloc_pad_type_hash ();
1164 loc
= htab_find_slot_with_hash (pad_type_hash_table
, h
, hashcode
,
1170 /* Unless debugging information isn't being written for the input type,
1171 write a record that shows what we are a subtype of and also make a
1172 variable that indicates our size, if still variable. */
1173 if (TREE_CODE (orig_size
) != INTEGER_CST
1174 && TYPE_NAME (record
)
1176 && !(TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1177 && DECL_IGNORED_P (TYPE_NAME (type
))))
1179 tree marker
= make_node (RECORD_TYPE
);
1180 tree name
= TYPE_NAME (record
);
1181 tree orig_name
= TYPE_NAME (type
);
1183 if (TREE_CODE (name
) == TYPE_DECL
)
1184 name
= DECL_NAME (name
);
1186 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1187 orig_name
= DECL_NAME (orig_name
);
1189 TYPE_NAME (marker
) = concat_name (name
, "XVS");
1190 finish_record_type (marker
,
1191 create_field_decl (orig_name
,
1192 build_reference_type (type
),
1193 marker
, NULL_TREE
, NULL_TREE
,
1197 add_parallel_type (record
, marker
);
1199 if (definition
&& size
&& TREE_CODE (size
) != INTEGER_CST
)
1200 TYPE_SIZE_UNIT (marker
)
1201 = create_var_decl (concat_name (name
, "XVZ"), NULL_TREE
, sizetype
,
1202 TYPE_SIZE_UNIT (record
), false, false, false,
1203 false, NULL
, gnat_entity
);
1206 rest_of_record_type_compilation (record
);
1209 /* If the size was widened explicitly, maybe give a warning. Take the
1210 original size as the maximum size of the input if there was an
1211 unconstrained record involved and round it up to the specified alignment,
1212 if one was specified. But don't do it if we are just annotating types
1213 and the type is tagged, since tagged types aren't fully laid out in this
1216 || TREE_CODE (size
) == COND_EXPR
1217 || TREE_CODE (size
) == MAX_EXPR
1219 || (type_annotate_only
&& Is_Tagged_Type (Etype (gnat_entity
))))
1222 if (CONTAINS_PLACEHOLDER_P (orig_size
))
1223 orig_size
= max_size (orig_size
, true);
1226 orig_size
= round_up (orig_size
, align
);
1228 if (!operand_equal_p (size
, orig_size
, 0)
1229 && !(TREE_CODE (size
) == INTEGER_CST
1230 && TREE_CODE (orig_size
) == INTEGER_CST
1231 && (TREE_OVERFLOW (size
)
1232 || TREE_OVERFLOW (orig_size
)
1233 || tree_int_cst_lt (size
, orig_size
))))
1235 Node_Id gnat_error_node
= Empty
;
1237 if (Is_Packed_Array_Type (gnat_entity
))
1238 gnat_entity
= Original_Array_Type (gnat_entity
);
1240 if ((Ekind (gnat_entity
) == E_Component
1241 || Ekind (gnat_entity
) == E_Discriminant
)
1242 && Present (Component_Clause (gnat_entity
)))
1243 gnat_error_node
= Last_Bit (Component_Clause (gnat_entity
));
1244 else if (Present (Size_Clause (gnat_entity
)))
1245 gnat_error_node
= Expression (Size_Clause (gnat_entity
));
1247 /* Generate message only for entities that come from source, since
1248 if we have an entity created by expansion, the message will be
1249 generated for some other corresponding source entity. */
1250 if (Comes_From_Source (gnat_entity
))
1252 if (Present (gnat_error_node
))
1253 post_error_ne_tree ("{^ }bits of & unused?",
1254 gnat_error_node
, gnat_entity
,
1255 size_diffop (size
, orig_size
));
1256 else if (is_component_type
)
1257 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1258 gnat_entity
, gnat_entity
,
1259 size_diffop (size
, orig_size
));
1266 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1267 If this is a multi-dimensional array type, do this recursively.
1270 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1271 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1272 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1275 relate_alias_sets (tree gnu_new_type
, tree gnu_old_type
, enum alias_set_op op
)
1277 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1278 of a one-dimensional array, since the padding has the same alias set
1279 as the field type, but if it's a multi-dimensional array, we need to
1280 see the inner types. */
1281 while (TREE_CODE (gnu_old_type
) == RECORD_TYPE
1282 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type
)
1283 || TYPE_PADDING_P (gnu_old_type
)))
1284 gnu_old_type
= TREE_TYPE (TYPE_FIELDS (gnu_old_type
));
1286 /* Unconstrained array types are deemed incomplete and would thus be given
1287 alias set 0. Retrieve the underlying array type. */
1288 if (TREE_CODE (gnu_old_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1290 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type
))));
1291 if (TREE_CODE (gnu_new_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1293 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type
))));
1295 if (TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1296 && TREE_CODE (TREE_TYPE (gnu_new_type
)) == ARRAY_TYPE
1297 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type
)))
1298 relate_alias_sets (TREE_TYPE (gnu_new_type
), TREE_TYPE (gnu_old_type
), op
);
1302 case ALIAS_SET_COPY
:
1303 /* The alias set shouldn't be copied between array types with different
1304 aliasing settings because this can break the aliasing relationship
1305 between the array type and its element type. */
1306 #ifndef ENABLE_CHECKING
1307 if (flag_strict_aliasing
)
1309 gcc_assert (!(TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1310 && TREE_CODE (gnu_old_type
) == ARRAY_TYPE
1311 && TYPE_NONALIASED_COMPONENT (gnu_new_type
)
1312 != TYPE_NONALIASED_COMPONENT (gnu_old_type
)));
1314 TYPE_ALIAS_SET (gnu_new_type
) = get_alias_set (gnu_old_type
);
1317 case ALIAS_SET_SUBSET
:
1318 case ALIAS_SET_SUPERSET
:
1320 alias_set_type old_set
= get_alias_set (gnu_old_type
);
1321 alias_set_type new_set
= get_alias_set (gnu_new_type
);
1323 /* Do nothing if the alias sets conflict. This ensures that we
1324 never call record_alias_subset several times for the same pair
1325 or at all for alias set 0. */
1326 if (!alias_sets_conflict_p (old_set
, new_set
))
1328 if (op
== ALIAS_SET_SUBSET
)
1329 record_alias_subset (old_set
, new_set
);
1331 record_alias_subset (new_set
, old_set
);
1340 record_component_aliases (gnu_new_type
);
1343 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1344 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1347 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
1349 tree type_decl
= build_decl (input_location
,
1350 TYPE_DECL
, get_identifier (name
), type
);
1351 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1352 TYPE_ARTIFICIAL (type
) = artificial_p
;
1353 gnat_pushdecl (type_decl
, Empty
);
1355 if (debug_hooks
->type_decl
)
1356 debug_hooks
->type_decl (type_decl
, false);
1359 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1360 finish constructing the record type as a fat pointer type. */
1363 finish_fat_pointer_type (tree record_type
, tree field_list
)
1365 /* Make sure we can put it into a register. */
1366 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
1368 /* Show what it really is. */
1369 TYPE_FAT_POINTER_P (record_type
) = 1;
1371 /* Do not emit debug info for it since the types of its fields may still be
1372 incomplete at this point. */
1373 finish_record_type (record_type
, field_list
, 0, false);
1375 /* Force type_contains_placeholder_p to return true on it. Although the
1376 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1377 type but the representation of the unconstrained array. */
1378 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
1381 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1382 finish constructing the record or union type. If REP_LEVEL is zero, this
1383 record has no representation clause and so will be entirely laid out here.
1384 If REP_LEVEL is one, this record has a representation clause and has been
1385 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1386 this record is derived from a parent record and thus inherits its layout;
1387 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1388 we need to write debug information about this type. */
1391 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
1394 enum tree_code code
= TREE_CODE (record_type
);
1395 tree name
= TYPE_NAME (record_type
);
1396 tree ada_size
= bitsize_zero_node
;
1397 tree size
= bitsize_zero_node
;
1398 bool had_size
= TYPE_SIZE (record_type
) != 0;
1399 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
1400 bool had_align
= TYPE_ALIGN (record_type
) != 0;
1403 TYPE_FIELDS (record_type
) = field_list
;
1405 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1406 generate debug info and have a parallel type. */
1407 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
1408 name
= DECL_NAME (name
);
1409 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
1411 /* Globally initialize the record first. If this is a rep'ed record,
1412 that just means some initializations; otherwise, layout the record. */
1415 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
1418 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
1421 TYPE_SIZE (record_type
) = bitsize_zero_node
;
1423 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1424 out just like a UNION_TYPE, since the size will be fixed. */
1425 else if (code
== QUAL_UNION_TYPE
)
1430 /* Ensure there isn't a size already set. There can be in an error
1431 case where there is a rep clause but all fields have errors and
1432 no longer have a position. */
1433 TYPE_SIZE (record_type
) = 0;
1435 /* Ensure we use the traditional GCC layout for bitfields when we need
1436 to pack the record type or have a representation clause. The other
1437 possible layout (Microsoft C compiler), if available, would prevent
1438 efficient packing in almost all cases. */
1439 #ifdef TARGET_MS_BITFIELD_LAYOUT
1440 if (TARGET_MS_BITFIELD_LAYOUT
&& TYPE_PACKED (record_type
))
1441 decl_attributes (&record_type
,
1442 tree_cons (get_identifier ("gcc_struct"),
1443 NULL_TREE
, NULL_TREE
),
1444 ATTR_FLAG_TYPE_IN_PLACE
);
1447 layout_type (record_type
);
1450 /* At this point, the position and size of each field is known. It was
1451 either set before entry by a rep clause, or by laying out the type above.
1453 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1454 to compute the Ada size; the GCC size and alignment (for rep'ed records
1455 that are not padding types); and the mode (for rep'ed records). We also
1456 clear the DECL_BIT_FIELD indication for the cases we know have not been
1457 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1459 if (code
== QUAL_UNION_TYPE
)
1460 field_list
= nreverse (field_list
);
1462 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
1464 tree type
= TREE_TYPE (field
);
1465 tree pos
= bit_position (field
);
1466 tree this_size
= DECL_SIZE (field
);
1469 if (RECORD_OR_UNION_TYPE_P (type
)
1470 && !TYPE_FAT_POINTER_P (type
)
1471 && !TYPE_CONTAINS_TEMPLATE_P (type
)
1472 && TYPE_ADA_SIZE (type
))
1473 this_ada_size
= TYPE_ADA_SIZE (type
);
1475 this_ada_size
= this_size
;
1477 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1478 if (DECL_BIT_FIELD (field
)
1479 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
1481 unsigned int align
= TYPE_ALIGN (type
);
1483 /* In the general case, type alignment is required. */
1484 if (value_factor_p (pos
, align
))
1486 /* The enclosing record type must be sufficiently aligned.
1487 Otherwise, if no alignment was specified for it and it
1488 has been laid out already, bump its alignment to the
1489 desired one if this is compatible with its size. */
1490 if (TYPE_ALIGN (record_type
) >= align
)
1492 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1493 DECL_BIT_FIELD (field
) = 0;
1497 && value_factor_p (TYPE_SIZE (record_type
), align
))
1499 TYPE_ALIGN (record_type
) = align
;
1500 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1501 DECL_BIT_FIELD (field
) = 0;
1505 /* In the non-strict alignment case, only byte alignment is. */
1506 if (!STRICT_ALIGNMENT
1507 && DECL_BIT_FIELD (field
)
1508 && value_factor_p (pos
, BITS_PER_UNIT
))
1509 DECL_BIT_FIELD (field
) = 0;
1512 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1513 field is technically not addressable. Except that it can actually
1514 be addressed if it is BLKmode and happens to be properly aligned. */
1515 if (DECL_BIT_FIELD (field
)
1516 && !(DECL_MODE (field
) == BLKmode
1517 && value_factor_p (pos
, BITS_PER_UNIT
)))
1518 DECL_NONADDRESSABLE_P (field
) = 1;
1520 /* A type must be as aligned as its most aligned field that is not
1521 a bit-field. But this is already enforced by layout_type. */
1522 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
1523 TYPE_ALIGN (record_type
)
1524 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
1529 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
1530 size
= size_binop (MAX_EXPR
, size
, this_size
);
1533 case QUAL_UNION_TYPE
:
1535 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1536 this_ada_size
, ada_size
);
1537 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1542 /* Since we know here that all fields are sorted in order of
1543 increasing bit position, the size of the record is one
1544 higher than the ending bit of the last field processed
1545 unless we have a rep clause, since in that case we might
1546 have a field outside a QUAL_UNION_TYPE that has a higher ending
1547 position. So use a MAX in that case. Also, if this field is a
1548 QUAL_UNION_TYPE, we need to take into account the previous size in
1549 the case of empty variants. */
1551 = merge_sizes (ada_size
, pos
, this_ada_size
,
1552 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1554 = merge_sizes (size
, pos
, this_size
,
1555 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1563 if (code
== QUAL_UNION_TYPE
)
1564 nreverse (field_list
);
1568 /* If this is a padding record, we never want to make the size smaller
1569 than what was specified in it, if any. */
1570 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
1571 size
= TYPE_SIZE (record_type
);
1573 /* Now set any of the values we've just computed that apply. */
1574 if (!TYPE_FAT_POINTER_P (record_type
)
1575 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
1576 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
1580 tree size_unit
= had_size_unit
1581 ? TYPE_SIZE_UNIT (record_type
)
1582 : convert (sizetype
,
1583 size_binop (CEIL_DIV_EXPR
, size
,
1584 bitsize_unit_node
));
1585 unsigned int align
= TYPE_ALIGN (record_type
);
1587 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
1588 TYPE_SIZE_UNIT (record_type
)
1589 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
1591 compute_record_mode (record_type
);
1596 rest_of_record_type_compilation (record_type
);
1599 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1602 add_parallel_type (tree type
, tree parallel_type
)
1604 tree decl
= TYPE_STUB_DECL (type
);
1606 while (DECL_PARALLEL_TYPE (decl
))
1607 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
1609 SET_DECL_PARALLEL_TYPE (decl
, parallel_type
);
1612 /* Return true if TYPE has a parallel type. */
1615 has_parallel_type (tree type
)
1617 tree decl
= TYPE_STUB_DECL (type
);
1619 return DECL_PARALLEL_TYPE (decl
) != NULL_TREE
;
1622 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1623 associated with it. It need not be invoked directly in most cases since
1624 finish_record_type takes care of doing so, but this can be necessary if
1625 a parallel type is to be attached to the record type. */
1628 rest_of_record_type_compilation (tree record_type
)
1630 bool var_size
= false;
1633 /* If this is a padded type, the bulk of the debug info has already been
1634 generated for the field's type. */
1635 if (TYPE_IS_PADDING_P (record_type
))
1638 /* If the type already has a parallel type (XVS type), then we're done. */
1639 if (has_parallel_type (record_type
))
1642 for (field
= TYPE_FIELDS (record_type
); field
; field
= DECL_CHAIN (field
))
1644 /* We need to make an XVE/XVU record if any field has variable size,
1645 whether or not the record does. For example, if we have a union,
1646 it may be that all fields, rounded up to the alignment, have the
1647 same size, in which case we'll use that size. But the debug
1648 output routines (except Dwarf2) won't be able to output the fields,
1649 so we need to make the special record. */
1650 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
1651 /* If a field has a non-constant qualifier, the record will have
1652 variable size too. */
1653 || (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1654 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
1661 /* If this record type is of variable size, make a parallel record type that
1662 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1665 tree new_record_type
1666 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1667 ? UNION_TYPE
: TREE_CODE (record_type
));
1668 tree orig_name
= TYPE_NAME (record_type
), new_name
;
1669 tree last_pos
= bitsize_zero_node
;
1670 tree old_field
, prev_old_field
= NULL_TREE
;
1672 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1673 orig_name
= DECL_NAME (orig_name
);
1676 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
1678 TYPE_NAME (new_record_type
) = new_name
;
1679 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
1680 TYPE_STUB_DECL (new_record_type
)
1681 = create_type_stub_decl (new_name
, new_record_type
);
1682 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
1683 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
1684 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
1685 TYPE_SIZE_UNIT (new_record_type
)
1686 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
1688 /* Now scan all the fields, replacing each field with a new
1689 field corresponding to the new encoding. */
1690 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
1691 old_field
= DECL_CHAIN (old_field
))
1693 tree field_type
= TREE_TYPE (old_field
);
1694 tree field_name
= DECL_NAME (old_field
);
1696 tree curpos
= bit_position (old_field
);
1698 unsigned int align
= 0;
1701 /* See how the position was modified from the last position.
1703 There are two basic cases we support: a value was added
1704 to the last position or the last position was rounded to
1705 a boundary and they something was added. Check for the
1706 first case first. If not, see if there is any evidence
1707 of rounding. If so, round the last position and try
1710 If this is a union, the position can be taken as zero. */
1712 /* Some computations depend on the shape of the position expression,
1713 so strip conversions to make sure it's exposed. */
1714 curpos
= remove_conversions (curpos
, true);
1716 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
1717 pos
= bitsize_zero_node
, align
= 0;
1719 pos
= compute_related_constant (curpos
, last_pos
);
1721 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
1722 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
1724 tree offset
= TREE_OPERAND (curpos
, 0);
1725 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
1727 /* An offset which is a bitwise AND with a negative power of 2
1728 means an alignment corresponding to this power of 2. Note
1729 that, as sizetype is sign-extended but nonetheless unsigned,
1730 we don't directly use tree_int_cst_sgn. */
1731 offset
= remove_conversions (offset
, true);
1732 if (TREE_CODE (offset
) == BIT_AND_EXPR
1733 && host_integerp (TREE_OPERAND (offset
, 1), 0)
1734 && TREE_INT_CST_HIGH (TREE_OPERAND (offset
, 1)) < 0)
1737 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
1738 if (exact_log2 (pow
) > 0)
1742 pos
= compute_related_constant (curpos
,
1743 round_up (last_pos
, align
));
1745 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
1746 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
1747 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
1748 && host_integerp (TREE_OPERAND
1749 (TREE_OPERAND (curpos
, 0), 1),
1754 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
1755 pos
= compute_related_constant (curpos
,
1756 round_up (last_pos
, align
));
1758 else if (potential_alignment_gap (prev_old_field
, old_field
,
1761 align
= TYPE_ALIGN (field_type
);
1762 pos
= compute_related_constant (curpos
,
1763 round_up (last_pos
, align
));
1766 /* If we can't compute a position, set it to zero.
1768 ??? We really should abort here, but it's too much work
1769 to get this correct for all cases. */
1772 pos
= bitsize_zero_node
;
1774 /* See if this type is variable-sized and make a pointer type
1775 and indicate the indirection if so. Beware that the debug
1776 back-end may adjust the position computed above according
1777 to the alignment of the field type, i.e. the pointer type
1778 in this case, if we don't preventively counter that. */
1779 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1781 field_type
= build_pointer_type (field_type
);
1782 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1784 field_type
= copy_node (field_type
);
1785 TYPE_ALIGN (field_type
) = align
;
1790 /* Make a new field name, if necessary. */
1791 if (var
|| align
!= 0)
1796 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1797 align
/ BITS_PER_UNIT
);
1799 strcpy (suffix
, "XVL");
1801 field_name
= concat_name (field_name
, suffix
);
1805 = create_field_decl (field_name
, field_type
, new_record_type
,
1806 DECL_SIZE (old_field
), pos
, 0, 0);
1807 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1808 TYPE_FIELDS (new_record_type
) = new_field
;
1810 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1811 zero. The only time it's not the last field of the record
1812 is when there are other components at fixed positions after
1813 it (meaning there was a rep clause for every field) and we
1814 want to be able to encode them. */
1815 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1816 (TREE_CODE (TREE_TYPE (old_field
))
1819 : DECL_SIZE (old_field
));
1820 prev_old_field
= old_field
;
1823 TYPE_FIELDS (new_record_type
) = nreverse (TYPE_FIELDS (new_record_type
));
1825 add_parallel_type (record_type
, new_record_type
);
1829 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1830 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1831 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1832 replace a value of zero with the old size. If HAS_REP is true, we take the
1833 MAX of the end position of this field with LAST_SIZE. In all other cases,
1834 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1837 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1840 tree type
= TREE_TYPE (last_size
);
1843 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1845 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1847 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1851 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1852 integer_zerop (TREE_OPERAND (size
, 1))
1853 ? last_size
: merge_sizes (last_size
, first_bit
,
1854 TREE_OPERAND (size
, 1),
1856 integer_zerop (TREE_OPERAND (size
, 2))
1857 ? last_size
: merge_sizes (last_size
, first_bit
,
1858 TREE_OPERAND (size
, 2),
1861 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1862 when fed through substitute_in_expr) into thinking that a constant
1863 size is not constant. */
1864 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1865 new_size
= TREE_OPERAND (new_size
, 0);
1870 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1871 related by the addition of a constant. Return that constant if so. */
1874 compute_related_constant (tree op0
, tree op1
)
1876 tree op0_var
, op1_var
;
1877 tree op0_con
= split_plus (op0
, &op0_var
);
1878 tree op1_con
= split_plus (op1
, &op1_var
);
1879 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1881 if (operand_equal_p (op0_var
, op1_var
, 0))
1883 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1889 /* Utility function of above to split a tree OP which may be a sum, into a
1890 constant part, which is returned, and a variable part, which is stored
1891 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1895 split_plus (tree in
, tree
*pvar
)
1897 /* Strip conversions in order to ease the tree traversal and maximize the
1898 potential for constant or plus/minus discovery. We need to be careful
1899 to always return and set *pvar to bitsizetype trees, but it's worth
1901 in
= remove_conversions (in
, false);
1903 *pvar
= convert (bitsizetype
, in
);
1905 if (TREE_CODE (in
) == INTEGER_CST
)
1907 *pvar
= bitsize_zero_node
;
1908 return convert (bitsizetype
, in
);
1910 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1912 tree lhs_var
, rhs_var
;
1913 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1914 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1916 if (lhs_var
== TREE_OPERAND (in
, 0)
1917 && rhs_var
== TREE_OPERAND (in
, 1))
1918 return bitsize_zero_node
;
1920 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1921 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1924 return bitsize_zero_node
;
1927 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1928 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1929 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1930 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1931 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1932 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1933 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1934 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1935 invisible reference. */
1938 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1939 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1940 bool return_by_invisi_ref_p
)
1942 /* A list of the data type nodes of the subprogram formal parameters.
1943 This list is generated by traversing the input list of PARM_DECL
1945 VEC(tree
,gc
) *param_type_list
= NULL
;
1948 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1949 VEC_safe_push (tree
, gc
, param_type_list
, TREE_TYPE (t
));
1951 type
= build_function_type_vec (return_type
, param_type_list
);
1953 /* TYPE may have been shared since GCC hashes types. If it has a different
1954 CICO_LIST, make a copy. Likewise for the various flags. */
1955 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1956 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1958 type
= copy_type (type
);
1959 TYPE_CI_CO_LIST (type
) = cico_list
;
1960 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1961 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1962 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1968 /* Return a copy of TYPE but safe to modify in any way. */
1971 copy_type (tree type
)
1973 tree new_type
= copy_node (type
);
1975 /* Unshare the language-specific data. */
1976 if (TYPE_LANG_SPECIFIC (type
))
1978 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1979 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1982 /* And the contents of the language-specific slot if needed. */
1983 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1984 && TYPE_RM_VALUES (type
))
1986 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1987 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1988 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
1989 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
1992 /* copy_node clears this field instead of copying it, because it is
1993 aliased with TREE_CHAIN. */
1994 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
1996 TYPE_POINTER_TO (new_type
) = 0;
1997 TYPE_REFERENCE_TO (new_type
) = 0;
1998 TYPE_MAIN_VARIANT (new_type
) = new_type
;
1999 TYPE_NEXT_VARIANT (new_type
) = 0;
2004 /* Return a subtype of sizetype with range MIN to MAX and whose
2005 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2006 of the associated TYPE_DECL. */
2009 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
2011 /* First build a type for the desired range. */
2012 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
2014 /* Then set the index type. */
2015 SET_TYPE_INDEX_TYPE (type
, index
);
2016 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
2021 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2022 sizetype is used. */
2025 create_range_type (tree type
, tree min
, tree max
)
2029 if (type
== NULL_TREE
)
2032 /* First build a type with the base range. */
2033 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
2034 TYPE_MAX_VALUE (type
));
2036 /* Then set the actual range. */
2037 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
2038 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
2043 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2044 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2048 create_type_stub_decl (tree type_name
, tree type
)
2050 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2051 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2052 emitted in DWARF. */
2053 tree type_decl
= build_decl (input_location
,
2054 TYPE_DECL
, type_name
, type
);
2055 DECL_ARTIFICIAL (type_decl
) = 1;
2056 TYPE_ARTIFICIAL (type
) = 1;
2060 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2061 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2062 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2063 true if we need to write debug information about this type. GNAT_NODE
2064 is used for the position of the decl. */
2067 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
2068 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
2070 enum tree_code code
= TREE_CODE (type
);
2071 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
2074 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2075 gcc_assert (!TYPE_IS_DUMMY_P (type
));
2077 /* If the type hasn't been named yet, we're naming it; preserve an existing
2078 TYPE_STUB_DECL that has been attached to it for some purpose. */
2079 if (!named
&& TYPE_STUB_DECL (type
))
2081 type_decl
= TYPE_STUB_DECL (type
);
2082 DECL_NAME (type_decl
) = type_name
;
2085 type_decl
= build_decl (input_location
,
2086 TYPE_DECL
, type_name
, type
);
2088 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
2089 TYPE_ARTIFICIAL (type
) = artificial_p
;
2091 /* Add this decl to the current binding level. */
2092 gnat_pushdecl (type_decl
, gnat_node
);
2094 process_attributes (type_decl
, attr_list
);
2096 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2097 This causes the name to be also viewed as a "tag" by the debug
2098 back-end, with the advantage that no DW_TAG_typedef is emitted
2099 for artificial "tagged" types in DWARF. */
2101 TYPE_STUB_DECL (type
) = type_decl
;
2103 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2104 back-end doesn't support, and for others if we don't need to. */
2105 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
2106 DECL_IGNORED_P (type_decl
) = 1;
2111 /* Return a VAR_DECL or CONST_DECL node.
2113 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2114 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2115 the GCC tree for an optional initial expression; NULL_TREE if none.
2117 CONST_FLAG is true if this variable is constant, in which case we might
2118 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2120 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2121 definition to be made visible outside of the current compilation unit, for
2122 instance variable definitions in a package specification.
2124 EXTERN_FLAG is true when processing an external variable declaration (as
2125 opposed to a definition: no storage is to be allocated for the variable).
2127 STATIC_FLAG is only relevant when not at top level. In that case
2128 it indicates whether to always allocate storage to the variable.
2130 GNAT_NODE is used for the position of the decl. */
2133 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
2134 bool const_flag
, bool public_flag
, bool extern_flag
,
2135 bool static_flag
, bool const_decl_allowed_p
,
2136 struct attrib
*attr_list
, Node_Id gnat_node
)
2138 /* Whether the initializer is a constant initializer. At the global level
2139 or for an external object or an object to be allocated in static memory,
2140 we check that it is a valid constant expression for use in initializing
2141 a static variable; otherwise, we only check that it is constant. */
2144 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
2145 && (global_bindings_p () || extern_flag
|| static_flag
2146 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
2147 : TREE_CONSTANT (var_init
)));
2149 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2150 case the initializer may be used in-lieu of the DECL node (as done in
2151 Identifier_to_gnu). This is useful to prevent the need of elaboration
2152 code when an identifier for which such a decl is made is in turn used as
2153 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2154 but extra constraints apply to this choice (see below) and are not
2155 relevant to the distinction we wish to make. */
2156 bool constant_p
= const_flag
&& init_const
;
2158 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2159 and may be used for scalars in general but not for aggregates. */
2161 = build_decl (input_location
,
2162 (constant_p
&& const_decl_allowed_p
2163 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
2166 /* If this is external, throw away any initializations (they will be done
2167 elsewhere) unless this is a constant for which we would like to remain
2168 able to get the initializer. If we are defining a global here, leave a
2169 constant initialization and save any variable elaborations for the
2170 elaboration routine. If we are just annotating types, throw away the
2171 initialization if it isn't a constant. */
2172 if ((extern_flag
&& !constant_p
)
2173 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
2174 var_init
= NULL_TREE
;
2176 /* At the global level, an initializer requiring code to be generated
2177 produces elaboration statements. Check that such statements are allowed,
2178 that is, not violating a No_Elaboration_Code restriction. */
2179 if (global_bindings_p () && var_init
!= 0 && !init_const
)
2180 Check_Elaboration_Code_Allowed (gnat_node
);
2182 DECL_INITIAL (var_decl
) = var_init
;
2183 TREE_READONLY (var_decl
) = const_flag
;
2184 DECL_EXTERNAL (var_decl
) = extern_flag
;
2185 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
2186 TREE_CONSTANT (var_decl
) = constant_p
;
2187 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
2188 = TYPE_VOLATILE (type
);
2190 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2191 try to fiddle with DECL_COMMON. However, on platforms that don't
2192 support global BSS sections, uninitialized global variables would
2193 go in DATA instead, thus increasing the size of the executable. */
2195 && TREE_CODE (var_decl
) == VAR_DECL
2196 && TREE_PUBLIC (var_decl
)
2197 && !have_global_bss_p ())
2198 DECL_COMMON (var_decl
) = 1;
2200 /* At the global binding level, we need to allocate static storage for the
2201 variable if it isn't external. Otherwise, we allocate automatic storage
2202 unless requested not to. */
2203 TREE_STATIC (var_decl
)
2204 = !extern_flag
&& (static_flag
|| global_bindings_p ());
2206 /* For an external constant whose initializer is not absolute, do not emit
2207 debug info. In DWARF this would mean a global relocation in a read-only
2208 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2212 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
2213 != null_pointer_node
)
2214 DECL_IGNORED_P (var_decl
) = 1;
2216 /* Add this decl to the current binding level. */
2217 gnat_pushdecl (var_decl
, gnat_node
);
2219 if (TREE_SIDE_EFFECTS (var_decl
))
2220 TREE_ADDRESSABLE (var_decl
) = 1;
2222 if (TREE_CODE (var_decl
) == VAR_DECL
)
2225 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
2226 process_attributes (var_decl
, attr_list
);
2227 if (global_bindings_p ())
2228 rest_of_decl_compilation (var_decl
, true, 0);
2234 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2237 aggregate_type_contains_array_p (tree type
)
2239 switch (TREE_CODE (type
))
2243 case QUAL_UNION_TYPE
:
2246 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
2247 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
2248 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
2261 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2262 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2263 nonzero, it is the specified size of the field. If POS is nonzero, it is
2264 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2265 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2266 means we are allowed to take the address of the field; if it is negative,
2267 we should not make a bitfield, which is used by make_aligning_type. */
2270 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
2271 tree size
, tree pos
, int packed
, int addressable
)
2273 tree field_decl
= build_decl (input_location
,
2274 FIELD_DECL
, field_name
, field_type
);
2276 DECL_CONTEXT (field_decl
) = record_type
;
2277 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
2279 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2280 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2281 Likewise for an aggregate without specified position that contains an
2282 array, because in this case slices of variable length of this array
2283 must be handled by GCC and variable-sized objects need to be aligned
2284 to at least a byte boundary. */
2285 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
2287 && AGGREGATE_TYPE_P (field_type
)
2288 && aggregate_type_contains_array_p (field_type
))))
2289 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
2291 /* If a size is specified, use it. Otherwise, if the record type is packed
2292 compute a size to use, which may differ from the object's natural size.
2293 We always set a size in this case to trigger the checks for bitfield
2294 creation below, which is typically required when no position has been
2297 size
= convert (bitsizetype
, size
);
2298 else if (packed
== 1)
2300 size
= rm_size (field_type
);
2301 if (TYPE_MODE (field_type
) == BLKmode
)
2302 size
= round_up (size
, BITS_PER_UNIT
);
2305 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2306 specified for two reasons: first if the size differs from the natural
2307 size. Second, if the alignment is insufficient. There are a number of
2308 ways the latter can be true.
2310 We never make a bitfield if the type of the field has a nonconstant size,
2311 because no such entity requiring bitfield operations should reach here.
2313 We do *preventively* make a bitfield when there might be the need for it
2314 but we don't have all the necessary information to decide, as is the case
2315 of a field with no specified position in a packed record.
2317 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2318 in layout_decl or finish_record_type to clear the bit_field indication if
2319 it is in fact not needed. */
2320 if (addressable
>= 0
2322 && TREE_CODE (size
) == INTEGER_CST
2323 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
2324 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
2325 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
2327 || (TYPE_ALIGN (record_type
) != 0
2328 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
2330 DECL_BIT_FIELD (field_decl
) = 1;
2331 DECL_SIZE (field_decl
) = size
;
2332 if (!packed
&& !pos
)
2334 if (TYPE_ALIGN (record_type
) != 0
2335 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
2336 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
2338 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2342 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
2344 /* Bump the alignment if need be, either for bitfield/packing purposes or
2345 to satisfy the type requirements if no such consideration applies. When
2346 we get the alignment from the type, indicate if this is from an explicit
2347 user request, which prevents stor-layout from lowering it later on. */
2349 unsigned int bit_align
2350 = (DECL_BIT_FIELD (field_decl
) ? 1
2351 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
2353 if (bit_align
> DECL_ALIGN (field_decl
))
2354 DECL_ALIGN (field_decl
) = bit_align
;
2355 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
2357 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2358 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
2364 /* We need to pass in the alignment the DECL is known to have.
2365 This is the lowest-order bit set in POS, but no more than
2366 the alignment of the record, if one is specified. Note
2367 that an alignment of 0 is taken as infinite. */
2368 unsigned int known_align
;
2370 if (host_integerp (pos
, 1))
2371 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
2373 known_align
= BITS_PER_UNIT
;
2375 if (TYPE_ALIGN (record_type
)
2376 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
2377 known_align
= TYPE_ALIGN (record_type
);
2379 layout_decl (field_decl
, known_align
);
2380 SET_DECL_OFFSET_ALIGN (field_decl
,
2381 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
2383 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
2384 &DECL_FIELD_BIT_OFFSET (field_decl
),
2385 DECL_OFFSET_ALIGN (field_decl
), pos
);
2388 /* In addition to what our caller says, claim the field is addressable if we
2389 know that its type is not suitable.
2391 The field may also be "technically" nonaddressable, meaning that even if
2392 we attempt to take the field's address we will actually get the address
2393 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2394 value we have at this point is not accurate enough, so we don't account
2395 for this here and let finish_record_type decide. */
2396 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
2399 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
2404 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2405 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2406 (either an In parameter or an address of a pass-by-ref parameter). */
2409 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
2411 tree param_decl
= build_decl (input_location
,
2412 PARM_DECL
, param_name
, param_type
);
2414 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2415 can lead to various ABI violations. */
2416 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
2417 && INTEGRAL_TYPE_P (param_type
)
2418 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
2420 /* We have to be careful about biased types here. Make a subtype
2421 of integer_type_node with the proper biasing. */
2422 if (TREE_CODE (param_type
) == INTEGER_TYPE
2423 && TYPE_BIASED_REPRESENTATION_P (param_type
))
2426 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
2427 TREE_TYPE (subtype
) = integer_type_node
;
2428 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
2429 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
2430 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
2431 param_type
= subtype
;
2434 param_type
= integer_type_node
;
2437 DECL_ARG_TYPE (param_decl
) = param_type
;
2438 TREE_READONLY (param_decl
) = readonly
;
2442 /* Given a DECL and ATTR_LIST, process the listed attributes. */
2445 process_attributes (tree decl
, struct attrib
*attr_list
)
2447 for (; attr_list
; attr_list
= attr_list
->next
)
2448 switch (attr_list
->type
)
2450 case ATTR_MACHINE_ATTRIBUTE
:
2451 input_location
= DECL_SOURCE_LOCATION (decl
);
2452 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
2454 ATTR_FLAG_TYPE_IN_PLACE
);
2457 case ATTR_LINK_ALIAS
:
2458 if (! DECL_EXTERNAL (decl
))
2460 TREE_STATIC (decl
) = 1;
2461 assemble_alias (decl
, attr_list
->name
);
2465 case ATTR_WEAK_EXTERNAL
:
2467 declare_weak (decl
);
2469 post_error ("?weak declarations not supported on this target",
2470 attr_list
->error_point
);
2473 case ATTR_LINK_SECTION
:
2474 if (targetm_common
.have_named_sections
)
2476 DECL_SECTION_NAME (decl
)
2477 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
2478 IDENTIFIER_POINTER (attr_list
->name
));
2479 DECL_COMMON (decl
) = 0;
2482 post_error ("?section attributes are not supported for this target",
2483 attr_list
->error_point
);
2486 case ATTR_LINK_CONSTRUCTOR
:
2487 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
2488 TREE_USED (decl
) = 1;
2491 case ATTR_LINK_DESTRUCTOR
:
2492 DECL_STATIC_DESTRUCTOR (decl
) = 1;
2493 TREE_USED (decl
) = 1;
2496 case ATTR_THREAD_LOCAL_STORAGE
:
2497 DECL_TLS_MODEL (decl
) = decl_default_tls_model (decl
);
2498 DECL_COMMON (decl
) = 0;
2503 /* Record DECL as a global renaming pointer. */
2506 record_global_renaming_pointer (tree decl
)
2508 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
2509 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
2512 /* Invalidate the global renaming pointers. */
2515 invalidate_global_renaming_pointers (void)
2520 FOR_EACH_VEC_ELT (tree
, global_renaming_pointers
, i
, iter
)
2521 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
2523 VEC_free (tree
, gc
, global_renaming_pointers
);
2526 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2530 value_factor_p (tree value
, HOST_WIDE_INT factor
)
2532 if (host_integerp (value
, 1))
2533 return tree_low_cst (value
, 1) % factor
== 0;
2535 if (TREE_CODE (value
) == MULT_EXPR
)
2536 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
2537 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
2542 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2543 unless we can prove these 2 fields are laid out in such a way that no gap
2544 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2545 is the distance in bits between the end of PREV_FIELD and the starting
2546 position of CURR_FIELD. It is ignored if null. */
2549 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
2551 /* If this is the first field of the record, there cannot be any gap */
2555 /* If the previous field is a union type, then return False: The only
2556 time when such a field is not the last field of the record is when
2557 there are other components at fixed positions after it (meaning there
2558 was a rep clause for every field), in which case we don't want the
2559 alignment constraint to override them. */
2560 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
2563 /* If the distance between the end of prev_field and the beginning of
2564 curr_field is constant, then there is a gap if the value of this
2565 constant is not null. */
2566 if (offset
&& host_integerp (offset
, 1))
2567 return !integer_zerop (offset
);
2569 /* If the size and position of the previous field are constant,
2570 then check the sum of this size and position. There will be a gap
2571 iff it is not multiple of the current field alignment. */
2572 if (host_integerp (DECL_SIZE (prev_field
), 1)
2573 && host_integerp (bit_position (prev_field
), 1))
2574 return ((tree_low_cst (bit_position (prev_field
), 1)
2575 + tree_low_cst (DECL_SIZE (prev_field
), 1))
2576 % DECL_ALIGN (curr_field
) != 0);
2578 /* If both the position and size of the previous field are multiples
2579 of the current field alignment, there cannot be any gap. */
2580 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
2581 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
2584 /* Fallback, return that there may be a potential gap */
2588 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2592 create_label_decl (tree label_name
, Node_Id gnat_node
)
2595 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
2597 DECL_MODE (label_decl
) = VOIDmode
;
2599 /* Add this decl to the current binding level. */
2600 gnat_pushdecl (label_decl
, gnat_node
);
2605 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2606 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2607 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2608 PARM_DECL nodes chained through the DECL_CHAIN field).
2610 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2611 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2612 used for the position of the decl. */
2615 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
2616 tree param_decl_list
, bool inline_flag
, bool public_flag
,
2617 bool extern_flag
, bool artificial_flag
,
2618 struct attrib
*attr_list
, Node_Id gnat_node
)
2620 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
2622 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
2623 TREE_TYPE (subprog_type
));
2624 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
2626 /* If this is a non-inline function nested inside an inlined external
2627 function, we cannot honor both requests without cloning the nested
2628 function in the current unit since it is private to the other unit.
2629 We could inline the nested function as well but it's probably better
2630 to err on the side of too little inlining. */
2633 && current_function_decl
2634 && DECL_DECLARED_INLINE_P (current_function_decl
)
2635 && DECL_EXTERNAL (current_function_decl
))
2636 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
2638 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
2639 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
2640 DECL_DECLARED_INLINE_P (subprog_decl
) = inline_flag
;
2641 DECL_NO_INLINE_WARNING_P (subprog_decl
) = inline_flag
&& artificial_flag
;
2643 TREE_PUBLIC (subprog_decl
) = public_flag
;
2644 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
2645 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2646 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2648 DECL_ARTIFICIAL (result_decl
) = 1;
2649 DECL_IGNORED_P (result_decl
) = 1;
2650 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
2651 DECL_RESULT (subprog_decl
) = result_decl
;
2655 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
2657 /* The expand_main_function circuitry expects "main_identifier_node" to
2658 designate the DECL_NAME of the 'main' entry point, in turn expected
2659 to be declared as the "main" function literally by default. Ada
2660 program entry points are typically declared with a different name
2661 within the binder generated file, exported as 'main' to satisfy the
2662 system expectations. Force main_identifier_node in this case. */
2663 if (asm_name
== main_identifier_node
)
2664 DECL_NAME (subprog_decl
) = main_identifier_node
;
2667 /* Add this decl to the current binding level. */
2668 gnat_pushdecl (subprog_decl
, gnat_node
);
2670 process_attributes (subprog_decl
, attr_list
);
2672 /* Output the assembler code and/or RTL for the declaration. */
2673 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
2675 return subprog_decl
;
2678 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2679 body. This routine needs to be invoked before processing the declarations
2680 appearing in the subprogram. */
2683 begin_subprog_body (tree subprog_decl
)
2687 announce_function (subprog_decl
);
2689 /* This function is being defined. */
2690 TREE_STATIC (subprog_decl
) = 1;
2692 current_function_decl
= subprog_decl
;
2694 /* Enter a new binding level and show that all the parameters belong to
2698 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
2699 param_decl
= DECL_CHAIN (param_decl
))
2700 DECL_CONTEXT (param_decl
) = subprog_decl
;
2702 make_decl_rtl (subprog_decl
);
2705 /* Finish translating the current subprogram and set its BODY. */
2708 end_subprog_body (tree body
)
2710 tree fndecl
= current_function_decl
;
2712 /* Attach the BLOCK for this level to the function and pop the level. */
2713 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2714 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2717 /* Mark the RESULT_DECL as being in this subprogram. */
2718 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2720 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2721 if (TREE_CODE (body
) == BIND_EXPR
)
2723 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
2724 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
2727 DECL_SAVED_TREE (fndecl
) = body
;
2729 current_function_decl
= decl_function_context (fndecl
);
2732 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2735 rest_of_subprog_body_compilation (tree subprog_decl
)
2737 /* We cannot track the location of errors past this point. */
2738 error_gnat_node
= Empty
;
2740 /* If we're only annotating types, don't actually compile this function. */
2741 if (type_annotate_only
)
2744 /* Dump functions before gimplification. */
2745 dump_function (TDI_original
, subprog_decl
);
2747 if (!decl_function_context (subprog_decl
))
2748 cgraph_finalize_function (subprog_decl
, false);
2750 /* Register this function with cgraph just far enough to get it
2751 added to our parent's nested function list. */
2752 (void) cgraph_get_create_node (subprog_decl
);
2756 gnat_builtin_function (tree decl
)
2758 gnat_pushdecl (decl
, Empty
);
2762 /* Return an integer type with the number of bits of precision given by
2763 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2764 it is a signed type. */
2767 gnat_type_for_size (unsigned precision
, int unsignedp
)
2772 if (precision
<= 2 * MAX_BITS_PER_WORD
2773 && signed_and_unsigned_types
[precision
][unsignedp
])
2774 return signed_and_unsigned_types
[precision
][unsignedp
];
2777 t
= make_unsigned_type (precision
);
2779 t
= make_signed_type (precision
);
2781 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2782 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2786 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2787 TYPE_NAME (t
) = get_identifier (type_name
);
2793 /* Likewise for floating-point types. */
2796 float_type_for_precision (int precision
, enum machine_mode mode
)
2801 if (float_types
[(int) mode
])
2802 return float_types
[(int) mode
];
2804 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2805 TYPE_PRECISION (t
) = precision
;
2808 gcc_assert (TYPE_MODE (t
) == mode
);
2811 sprintf (type_name
, "FLOAT_%d", precision
);
2812 TYPE_NAME (t
) = get_identifier (type_name
);
2818 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2819 an unsigned type; otherwise a signed type is returned. */
2822 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2824 if (mode
== BLKmode
)
2827 if (mode
== VOIDmode
)
2828 return void_type_node
;
2830 if (COMPLEX_MODE_P (mode
))
2833 if (SCALAR_FLOAT_MODE_P (mode
))
2834 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2836 if (SCALAR_INT_MODE_P (mode
))
2837 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2839 if (VECTOR_MODE_P (mode
))
2841 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2842 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2844 return build_vector_type_for_mode (inner_type
, mode
);
2850 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2853 gnat_unsigned_type (tree type_node
)
2855 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2857 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2859 type
= copy_node (type
);
2860 TREE_TYPE (type
) = type_node
;
2862 else if (TREE_TYPE (type_node
)
2863 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2864 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2866 type
= copy_node (type
);
2867 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2873 /* Return the signed version of a TYPE_NODE, a scalar type. */
2876 gnat_signed_type (tree type_node
)
2878 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2880 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2882 type
= copy_node (type
);
2883 TREE_TYPE (type
) = type_node
;
2885 else if (TREE_TYPE (type_node
)
2886 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2887 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2889 type
= copy_node (type
);
2890 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2896 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2897 transparently converted to each other. */
2900 gnat_types_compatible_p (tree t1
, tree t2
)
2902 enum tree_code code
;
2904 /* This is the default criterion. */
2905 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2908 /* We only check structural equivalence here. */
2909 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2912 /* Vector types are also compatible if they have the same number of subparts
2913 and the same form of (scalar) element type. */
2914 if (code
== VECTOR_TYPE
2915 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2916 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2917 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2920 /* Array types are also compatible if they are constrained and have the same
2921 domain(s) and the same component type. */
2922 if (code
== ARRAY_TYPE
2923 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2924 || (TYPE_DOMAIN (t1
)
2926 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2927 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2928 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2929 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2930 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2931 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2932 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2938 /* Return true if EXPR is a useless type conversion. */
2941 gnat_useless_type_conversion (tree expr
)
2943 if (CONVERT_EXPR_P (expr
)
2944 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2945 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
2946 return gnat_types_compatible_p (TREE_TYPE (expr
),
2947 TREE_TYPE (TREE_OPERAND (expr
, 0)));
2952 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2955 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
2956 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
2958 return TYPE_CI_CO_LIST (t
) == cico_list
2959 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
2960 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
2961 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
2964 /* EXP is an expression for the size of an object. If this size contains
2965 discriminant references, replace them with the maximum (if MAX_P) or
2966 minimum (if !MAX_P) possible value of the discriminant. */
2969 max_size (tree exp
, bool max_p
)
2971 enum tree_code code
= TREE_CODE (exp
);
2972 tree type
= TREE_TYPE (exp
);
2974 switch (TREE_CODE_CLASS (code
))
2976 case tcc_declaration
:
2981 if (code
== CALL_EXPR
)
2986 t
= maybe_inline_call_in_expr (exp
);
2988 return max_size (t
, max_p
);
2990 n
= call_expr_nargs (exp
);
2992 argarray
= XALLOCAVEC (tree
, n
);
2993 for (i
= 0; i
< n
; i
++)
2994 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
2995 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
3000 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3001 modify. Otherwise, we treat it like a variable. */
3002 if (!CONTAINS_PLACEHOLDER_P (exp
))
3005 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
3007 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
3009 case tcc_comparison
:
3010 return max_p
? size_one_node
: size_zero_node
;
3014 case tcc_expression
:
3015 switch (TREE_CODE_LENGTH (code
))
3018 if (code
== SAVE_EXPR
)
3020 else if (code
== NON_LVALUE_EXPR
)
3021 return max_size (TREE_OPERAND (exp
, 0), max_p
);
3024 fold_build1 (code
, type
,
3025 max_size (TREE_OPERAND (exp
, 0),
3026 code
== NEGATE_EXPR
? !max_p
: max_p
));
3029 if (code
== COMPOUND_EXPR
)
3030 return max_size (TREE_OPERAND (exp
, 1), max_p
);
3033 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
3034 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
3035 code
== MINUS_EXPR
? !max_p
: max_p
);
3037 /* Special-case wanting the maximum value of a MIN_EXPR.
3038 In that case, if one side overflows, return the other.
3039 sizetype is signed, but we know sizes are non-negative.
3040 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3041 overflowing and the RHS a variable. */
3044 && TREE_CODE (rhs
) == INTEGER_CST
3045 && TREE_OVERFLOW (rhs
))
3049 && TREE_CODE (lhs
) == INTEGER_CST
3050 && TREE_OVERFLOW (lhs
))
3052 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
3053 && TREE_CODE (lhs
) == INTEGER_CST
3054 && TREE_OVERFLOW (lhs
)
3055 && !TREE_CONSTANT (rhs
))
3058 return fold_build2 (code
, type
, lhs
, rhs
);
3062 if (code
== COND_EXPR
)
3063 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
3064 max_size (TREE_OPERAND (exp
, 1), max_p
),
3065 max_size (TREE_OPERAND (exp
, 2), max_p
));
3068 /* Other tree classes cannot happen. */
3076 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3077 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3078 Return a constructor for the template. */
3081 build_template (tree template_type
, tree array_type
, tree expr
)
3083 VEC(constructor_elt
,gc
) *template_elts
= NULL
;
3084 tree bound_list
= NULL_TREE
;
3087 while (TREE_CODE (array_type
) == RECORD_TYPE
3088 && (TYPE_PADDING_P (array_type
)
3089 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
3090 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
3092 if (TREE_CODE (array_type
) == ARRAY_TYPE
3093 || (TREE_CODE (array_type
) == INTEGER_TYPE
3094 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
3095 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
3097 /* First make the list for a CONSTRUCTOR for the template. Go down the
3098 field list of the template instead of the type chain because this
3099 array might be an Ada array of arrays and we can't tell where the
3100 nested arrays stop being the underlying object. */
3102 for (field
= TYPE_FIELDS (template_type
); field
;
3104 ? (bound_list
= TREE_CHAIN (bound_list
))
3105 : (array_type
= TREE_TYPE (array_type
))),
3106 field
= DECL_CHAIN (DECL_CHAIN (field
)))
3108 tree bounds
, min
, max
;
3110 /* If we have a bound list, get the bounds from there. Likewise
3111 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3112 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3113 This will give us a maximum range. */
3115 bounds
= TREE_VALUE (bound_list
);
3116 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
3117 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
3118 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
3119 && DECL_BY_COMPONENT_PTR_P (expr
))
3120 bounds
= TREE_TYPE (field
);
3124 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
3125 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
3127 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3128 substitute it from OBJECT. */
3129 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
3130 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
3132 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
3133 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
3136 return gnat_build_constructor (template_type
, template_elts
);
3139 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3140 being built; the new decl is chained on to the front of the list. */
3143 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
3144 tree initial
, tree field_list
)
3147 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
3150 DECL_INITIAL (field
) = initial
;
3151 DECL_CHAIN (field
) = field_list
;
3155 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3156 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3157 type contains in its DECL_INITIAL the expression to use when a constructor
3158 is made for the type. GNAT_ENTITY is an entity used to print out an error
3159 message if the mechanism cannot be applied to an object of that type and
3160 also for the name. */
3163 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3165 tree record_type
= make_node (RECORD_TYPE
);
3166 tree pointer32_type
, pointer64_type
;
3167 tree field_list
= NULL_TREE
;
3168 int klass
, ndim
, i
, dtype
= 0;
3169 tree inner_type
, tem
;
3172 /* If TYPE is an unconstrained array, use the underlying array type. */
3173 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3174 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3176 /* If this is an array, compute the number of dimensions in the array,
3177 get the index types, and point to the inner type. */
3178 if (TREE_CODE (type
) != ARRAY_TYPE
)
3181 for (ndim
= 1, inner_type
= type
;
3182 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3183 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3184 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3187 idx_arr
= XALLOCAVEC (tree
, ndim
);
3189 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
3190 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3191 for (i
= ndim
- 1, inner_type
= type
;
3193 i
--, inner_type
= TREE_TYPE (inner_type
))
3194 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3196 for (i
= 0, inner_type
= type
;
3198 i
++, inner_type
= TREE_TYPE (inner_type
))
3199 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3201 /* Now get the DTYPE value. */
3202 switch (TREE_CODE (type
))
3207 if (TYPE_VAX_FLOATING_POINT_P (type
))
3208 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3221 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3224 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3227 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3230 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3233 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3236 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3242 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3246 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3247 && TYPE_VAX_FLOATING_POINT_P (type
))
3248 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3260 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3271 /* Get the CLASS value. */
3274 case By_Descriptor_A
:
3275 case By_Short_Descriptor_A
:
3278 case By_Descriptor_NCA
:
3279 case By_Short_Descriptor_NCA
:
3282 case By_Descriptor_SB
:
3283 case By_Short_Descriptor_SB
:
3287 case By_Short_Descriptor
:
3288 case By_Descriptor_S
:
3289 case By_Short_Descriptor_S
:
3295 /* Make the type for a descriptor for VMS. The first four fields are the
3296 same for all types. */
3298 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
3299 size_in_bytes ((mech
== By_Descriptor_A
3300 || mech
== By_Short_Descriptor_A
)
3301 ? inner_type
: type
),
3304 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
3305 size_int (dtype
), field_list
);
3307 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
3308 size_int (klass
), field_list
);
3310 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
3311 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3313 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3314 that we cannot build a template call to the CE routine as it would get a
3315 wrong source location; instead we use a second placeholder for it. */
3316 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
3317 build0 (PLACEHOLDER_EXPR
, type
));
3318 tem
= build3 (COND_EXPR
, pointer32_type
,
3320 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
3321 build_int_cstu (pointer64_type
, 0x80000000))
3322 : boolean_false_node
,
3323 build0 (PLACEHOLDER_EXPR
, void_type_node
),
3324 convert (pointer32_type
, tem
));
3327 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
3333 case By_Short_Descriptor
:
3334 case By_Descriptor_S
:
3335 case By_Short_Descriptor_S
:
3338 case By_Descriptor_SB
:
3339 case By_Short_Descriptor_SB
:
3341 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3343 (TREE_CODE (type
) == ARRAY_TYPE
3344 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3348 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3350 (TREE_CODE (type
) == ARRAY_TYPE
3351 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3356 case By_Descriptor_A
:
3357 case By_Short_Descriptor_A
:
3358 case By_Descriptor_NCA
:
3359 case By_Short_Descriptor_NCA
:
3361 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3362 record_type
, size_zero_node
, field_list
);
3365 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3366 record_type
, size_zero_node
, field_list
);
3369 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3371 size_int ((mech
== By_Descriptor_NCA
3372 || mech
== By_Short_Descriptor_NCA
)
3374 /* Set FL_COLUMN, FL_COEFF, and
3376 : (TREE_CODE (type
) == ARRAY_TYPE
3377 && TYPE_CONVENTION_FORTRAN_P
3383 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3384 record_type
, size_int (ndim
), field_list
);
3387 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3388 record_type
, size_in_bytes (type
),
3391 /* Now build a pointer to the 0,0,0... element. */
3392 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3393 for (i
= 0, inner_type
= type
; i
< ndim
;
3394 i
++, inner_type
= TREE_TYPE (inner_type
))
3395 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3396 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3397 NULL_TREE
, NULL_TREE
);
3400 = make_descriptor_field ("A0", pointer32_type
, record_type
,
3401 build1 (ADDR_EXPR
, pointer32_type
, tem
),
3404 /* Next come the addressing coefficients. */
3405 tem
= size_one_node
;
3406 for (i
= 0; i
< ndim
; i
++)
3410 = size_binop (MULT_EXPR
, tem
,
3411 size_binop (PLUS_EXPR
,
3412 size_binop (MINUS_EXPR
,
3413 TYPE_MAX_VALUE (idx_arr
[i
]),
3414 TYPE_MIN_VALUE (idx_arr
[i
])),
3417 fname
[0] = ((mech
== By_Descriptor_NCA
||
3418 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
3419 fname
[1] = '0' + i
, fname
[2] = 0;
3421 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3422 record_type
, idx_length
, field_list
);
3424 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
3428 /* Finally here are the bounds. */
3429 for (i
= 0; i
< ndim
; i
++)
3433 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3435 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3436 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
3441 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3442 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
3448 post_error ("unsupported descriptor type for &", gnat_entity
);
3451 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
3452 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3456 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3457 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3458 type contains in its DECL_INITIAL the expression to use when a constructor
3459 is made for the type. GNAT_ENTITY is an entity used to print out an error
3460 message if the mechanism cannot be applied to an object of that type and
3461 also for the name. */
3464 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3466 tree record_type
= make_node (RECORD_TYPE
);
3467 tree pointer64_type
;
3468 tree field_list
= NULL_TREE
;
3469 int klass
, ndim
, i
, dtype
= 0;
3470 tree inner_type
, tem
;
3473 /* If TYPE is an unconstrained array, use the underlying array type. */
3474 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3475 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3477 /* If this is an array, compute the number of dimensions in the array,
3478 get the index types, and point to the inner type. */
3479 if (TREE_CODE (type
) != ARRAY_TYPE
)
3482 for (ndim
= 1, inner_type
= type
;
3483 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3484 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3485 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3488 idx_arr
= XALLOCAVEC (tree
, ndim
);
3490 if (mech
!= By_Descriptor_NCA
3491 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3492 for (i
= ndim
- 1, inner_type
= type
;
3494 i
--, inner_type
= TREE_TYPE (inner_type
))
3495 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3497 for (i
= 0, inner_type
= type
;
3499 i
++, inner_type
= TREE_TYPE (inner_type
))
3500 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3502 /* Now get the DTYPE value. */
3503 switch (TREE_CODE (type
))
3508 if (TYPE_VAX_FLOATING_POINT_P (type
))
3509 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3522 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3525 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3528 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3531 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3534 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3537 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3543 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3547 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3548 && TYPE_VAX_FLOATING_POINT_P (type
))
3549 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3561 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3572 /* Get the CLASS value. */
3575 case By_Descriptor_A
:
3578 case By_Descriptor_NCA
:
3581 case By_Descriptor_SB
:
3585 case By_Descriptor_S
:
3591 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3592 are the same for all types. */
3594 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3595 record_type
, size_int (1), field_list
);
3597 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3598 record_type
, size_int (dtype
), field_list
);
3600 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3601 record_type
, size_int (klass
), field_list
);
3603 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3604 record_type
, size_int (-1), field_list
);
3606 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3608 size_in_bytes (mech
== By_Descriptor_A
3609 ? inner_type
: type
),
3612 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3615 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
3616 build_unary_op (ADDR_EXPR
, pointer64_type
,
3617 build0 (PLACEHOLDER_EXPR
, type
)),
3623 case By_Descriptor_S
:
3626 case By_Descriptor_SB
:
3628 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3630 (TREE_CODE (type
) == ARRAY_TYPE
3631 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3635 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3637 (TREE_CODE (type
) == ARRAY_TYPE
3638 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3643 case By_Descriptor_A
:
3644 case By_Descriptor_NCA
:
3646 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3647 record_type
, size_zero_node
, field_list
);
3650 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3651 record_type
, size_zero_node
, field_list
);
3653 dtype
= (mech
== By_Descriptor_NCA
3655 /* Set FL_COLUMN, FL_COEFF, and
3657 : (TREE_CODE (type
) == ARRAY_TYPE
3658 && TYPE_CONVENTION_FORTRAN_P (type
)
3661 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3662 record_type
, size_int (dtype
),
3666 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3667 record_type
, size_int (ndim
), field_list
);
3670 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3671 record_type
, size_int (0), field_list
);
3673 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3674 record_type
, size_in_bytes (type
),
3677 /* Now build a pointer to the 0,0,0... element. */
3678 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3679 for (i
= 0, inner_type
= type
; i
< ndim
;
3680 i
++, inner_type
= TREE_TYPE (inner_type
))
3681 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3682 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3683 NULL_TREE
, NULL_TREE
);
3686 = make_descriptor_field ("A0", pointer64_type
, record_type
,
3687 build1 (ADDR_EXPR
, pointer64_type
, tem
),
3690 /* Next come the addressing coefficients. */
3691 tem
= size_one_node
;
3692 for (i
= 0; i
< ndim
; i
++)
3696 = size_binop (MULT_EXPR
, tem
,
3697 size_binop (PLUS_EXPR
,
3698 size_binop (MINUS_EXPR
,
3699 TYPE_MAX_VALUE (idx_arr
[i
]),
3700 TYPE_MIN_VALUE (idx_arr
[i
])),
3703 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
3704 fname
[1] = '0' + i
, fname
[2] = 0;
3706 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3707 record_type
, idx_length
, field_list
);
3709 if (mech
== By_Descriptor_NCA
)
3713 /* Finally here are the bounds. */
3714 for (i
= 0; i
< ndim
; i
++)
3718 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3720 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3722 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
3726 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3728 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
3733 post_error ("unsupported descriptor type for &", gnat_entity
);
3736 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
3737 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3741 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3742 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3745 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3747 VEC(constructor_elt
,gc
) *v
= NULL
;
3750 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3751 gnu_expr
= gnat_protect_expr (gnu_expr
);
3752 gnat_mark_addressable (gnu_expr
);
3754 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3755 routine in case we have a 32-bit descriptor. */
3756 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3757 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3758 N_Raise_Constraint_Error
),
3761 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3764 = convert (TREE_TYPE (field
),
3765 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3767 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3770 return gnat_build_constructor (gnu_type
, v
);
3773 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3774 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3775 which the VMS descriptor is passed. */
3778 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3780 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3781 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3782 /* The CLASS field is the 3rd field in the descriptor. */
3783 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3784 /* The POINTER field is the 6th field in the descriptor. */
3785 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3787 /* Retrieve the value of the POINTER field. */
3789 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3791 if (POINTER_TYPE_P (gnu_type
))
3792 return convert (gnu_type
, gnu_expr64
);
3794 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3796 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3797 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3798 tree template_type
= TREE_TYPE (p_bounds_type
);
3799 tree min_field
= TYPE_FIELDS (template_type
);
3800 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3801 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3802 /* See the head comment of build_vms_descriptor. */
3803 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3804 tree lfield
, ufield
;
3805 VEC(constructor_elt
,gc
) *v
;
3807 /* Convert POINTER to the pointer-to-array type. */
3808 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3812 case 1: /* Class S */
3813 case 15: /* Class SB */
3814 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3815 v
= VEC_alloc (constructor_elt
, gc
, 2);
3816 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3817 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3818 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3819 convert (TREE_TYPE (min_field
),
3821 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3822 convert (TREE_TYPE (max_field
), t
));
3823 template_tree
= gnat_build_constructor (template_type
, v
);
3824 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3826 /* For class S, we are done. */
3830 /* Test that we really have a SB descriptor, like DEC Ada. */
3831 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3832 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3833 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3834 /* If so, there is already a template in the descriptor and
3835 it is located right after the POINTER field. The fields are
3836 64bits so they must be repacked. */
3837 t
= DECL_CHAIN (pointer
);
3838 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3839 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3842 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3844 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3846 /* Build the template in the form of a constructor. */
3847 v
= VEC_alloc (constructor_elt
, gc
, 2);
3848 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3849 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3851 template_tree
= gnat_build_constructor (template_type
, v
);
3853 /* Otherwise use the {1, LENGTH} template we build above. */
3854 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3855 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3860 case 4: /* Class A */
3861 /* The AFLAGS field is the 3rd field after the pointer in the
3863 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3864 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3865 /* The DIMCT field is the next field in the descriptor after
3868 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3869 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3870 or FL_COEFF or FL_BOUNDS not set. */
3871 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3872 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3873 build_binary_op (NE_EXPR
, boolean_type_node
,
3875 convert (TREE_TYPE (dimct
),
3877 build_binary_op (NE_EXPR
, boolean_type_node
,
3878 build2 (BIT_AND_EXPR
,
3882 /* There is already a template in the descriptor and it is located
3883 in block 3. The fields are 64bits so they must be repacked. */
3884 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3886 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3887 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3890 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3892 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3894 /* Build the template in the form of a constructor. */
3895 v
= VEC_alloc (constructor_elt
, gc
, 2);
3896 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3897 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3899 template_tree
= gnat_build_constructor (template_type
, v
);
3900 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3901 build_call_raise (CE_Length_Check_Failed
, Empty
,
3902 N_Raise_Constraint_Error
),
3905 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3908 case 10: /* Class NCA */
3910 post_error ("unsupported descriptor type for &", gnat_subprog
);
3911 template_addr
= integer_zero_node
;
3915 /* Build the fat pointer in the form of a constructor. */
3916 v
= VEC_alloc (constructor_elt
, gc
, 2);
3917 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3918 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3920 return gnat_build_constructor (gnu_type
, v
);
3927 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3928 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3929 which the VMS descriptor is passed. */
3932 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3934 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3935 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3936 /* The CLASS field is the 3rd field in the descriptor. */
3937 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3938 /* The POINTER field is the 4th field in the descriptor. */
3939 tree pointer
= DECL_CHAIN (klass
);
3941 /* Retrieve the value of the POINTER field. */
3943 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3945 if (POINTER_TYPE_P (gnu_type
))
3946 return convert (gnu_type
, gnu_expr32
);
3948 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3950 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3951 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3952 tree template_type
= TREE_TYPE (p_bounds_type
);
3953 tree min_field
= TYPE_FIELDS (template_type
);
3954 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3955 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3956 /* See the head comment of build_vms_descriptor. */
3957 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3958 VEC(constructor_elt
,gc
) *v
;
3960 /* Convert POINTER to the pointer-to-array type. */
3961 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
3965 case 1: /* Class S */
3966 case 15: /* Class SB */
3967 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3968 v
= VEC_alloc (constructor_elt
, gc
, 2);
3969 t
= TYPE_FIELDS (desc_type
);
3970 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3971 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3972 convert (TREE_TYPE (min_field
),
3974 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3975 convert (TREE_TYPE (max_field
), t
));
3976 template_tree
= gnat_build_constructor (template_type
, v
);
3977 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3979 /* For class S, we are done. */
3983 /* Test that we really have a SB descriptor, like DEC Ada. */
3984 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3985 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3986 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3987 /* If so, there is already a template in the descriptor and
3988 it is located right after the POINTER field. */
3989 t
= DECL_CHAIN (pointer
);
3991 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3992 /* Otherwise use the {1, LENGTH} template we build above. */
3993 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3994 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3999 case 4: /* Class A */
4000 /* The AFLAGS field is the 7th field in the descriptor. */
4001 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
4002 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4003 /* The DIMCT field is the 8th field in the descriptor. */
4005 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4006 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4007 or FL_COEFF or FL_BOUNDS not set. */
4008 u
= build_int_cst (TREE_TYPE (aflags
), 192);
4009 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
4010 build_binary_op (NE_EXPR
, boolean_type_node
,
4012 convert (TREE_TYPE (dimct
),
4014 build_binary_op (NE_EXPR
, boolean_type_node
,
4015 build2 (BIT_AND_EXPR
,
4019 /* There is already a template in the descriptor and it is
4020 located at the start of block 3 (12th field). */
4021 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
4023 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4024 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
4025 build_call_raise (CE_Length_Check_Failed
, Empty
,
4026 N_Raise_Constraint_Error
),
4029 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
4032 case 10: /* Class NCA */
4034 post_error ("unsupported descriptor type for &", gnat_subprog
);
4035 template_addr
= integer_zero_node
;
4039 /* Build the fat pointer in the form of a constructor. */
4040 v
= VEC_alloc (constructor_elt
, gc
, 2);
4041 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
4042 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
4045 return gnat_build_constructor (gnu_type
, v
);
4052 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4053 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4054 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
4055 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
4059 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
4060 bool by_ref
, Entity_Id gnat_subprog
)
4062 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4063 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4064 tree mbo
= TYPE_FIELDS (desc_type
);
4065 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
4066 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
4067 tree real_type
, is64bit
, gnu_expr32
, gnu_expr64
;
4070 real_type
= TREE_TYPE (gnu_type
);
4072 real_type
= gnu_type
;
4074 /* If the field name is not MBO, it must be 32-bit and no alternate.
4075 Otherwise primary must be 64-bit and alternate 32-bit. */
4076 if (strcmp (mbostr
, "MBO") != 0)
4078 tree ret
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
4080 ret
= build_unary_op (ADDR_EXPR
, gnu_type
, ret
);
4084 /* Build the test for 64-bit descriptor. */
4085 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
4086 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
4088 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
4089 build_binary_op (EQ_EXPR
, boolean_type_node
,
4090 convert (integer_type_node
, mbo
),
4092 build_binary_op (EQ_EXPR
, boolean_type_node
,
4093 convert (integer_type_node
, mbmo
),
4094 integer_minus_one_node
));
4096 /* Build the 2 possible end results. */
4097 gnu_expr64
= convert_vms_descriptor64 (real_type
, gnu_expr
, gnat_subprog
);
4099 gnu_expr64
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr64
);
4100 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
4101 gnu_expr32
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
4103 gnu_expr32
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr32
);
4105 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
4108 /* Build a type to be used to represent an aliased object whose nominal type
4109 is an unconstrained array. This consists of a RECORD_TYPE containing a
4110 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4111 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4112 an arbitrary unconstrained object. Use NAME as the name of the record.
4113 DEBUG_INFO_P is true if we need to write debug information for the type. */
4116 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
4119 tree type
= make_node (RECORD_TYPE
);
4121 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
4122 NULL_TREE
, NULL_TREE
, 0, 1);
4124 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
4125 NULL_TREE
, NULL_TREE
, 0, 1);
4127 TYPE_NAME (type
) = name
;
4128 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
4129 DECL_CHAIN (template_field
) = array_field
;
4130 finish_record_type (type
, template_field
, 0, true);
4132 /* Declare it now since it will never be declared otherwise. This is
4133 necessary to ensure that its subtrees are properly marked. */
4134 create_type_decl (name
, type
, NULL
, true, debug_info_p
, Empty
);
4139 /* Same, taking a thin or fat pointer type instead of a template type. */
4142 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
4143 tree name
, bool debug_info_p
)
4147 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
4150 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
4151 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
4152 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
4155 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
4158 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4159 In the normal case this is just two adjustments, but we have more to
4160 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4163 update_pointer_to (tree old_type
, tree new_type
)
4165 tree ptr
= TYPE_POINTER_TO (old_type
);
4166 tree ref
= TYPE_REFERENCE_TO (old_type
);
4169 /* If this is the main variant, process all the other variants first. */
4170 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
4171 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
4172 update_pointer_to (t
, new_type
);
4174 /* If no pointers and no references, we are done. */
4178 /* Merge the old type qualifiers in the new type.
4180 Each old variant has qualifiers for specific reasons, and the new
4181 designated type as well. Each set of qualifiers represents useful
4182 information grabbed at some point, and merging the two simply unifies
4183 these inputs into the final type description.
4185 Consider for instance a volatile type frozen after an access to constant
4186 type designating it; after the designated type's freeze, we get here with
4187 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4188 when the access type was processed. We will make a volatile and readonly
4189 designated type, because that's what it really is.
4191 We might also get here for a non-dummy OLD_TYPE variant with different
4192 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4193 to private record type elaboration (see the comments around the call to
4194 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4195 the qualifiers in those cases too, to avoid accidentally discarding the
4196 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4198 = build_qualified_type (new_type
,
4199 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
4201 /* If old type and new type are identical, there is nothing to do. */
4202 if (old_type
== new_type
)
4205 /* Otherwise, first handle the simple case. */
4206 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
4208 tree new_ptr
, new_ref
;
4210 /* If pointer or reference already points to new type, nothing to do.
4211 This can happen as update_pointer_to can be invoked multiple times
4212 on the same couple of types because of the type variants. */
4213 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
4214 || (ref
&& TREE_TYPE (ref
) == new_type
))
4217 /* Chain PTR and its variants at the end. */
4218 new_ptr
= TYPE_POINTER_TO (new_type
);
4221 while (TYPE_NEXT_PTR_TO (new_ptr
))
4222 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
4223 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
4226 TYPE_POINTER_TO (new_type
) = ptr
;
4228 /* Now adjust them. */
4229 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
4230 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
4232 TREE_TYPE (t
) = new_type
;
4233 if (TYPE_NULL_BOUNDS (t
))
4234 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
4237 /* Chain REF and its variants at the end. */
4238 new_ref
= TYPE_REFERENCE_TO (new_type
);
4241 while (TYPE_NEXT_REF_TO (new_ref
))
4242 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
4243 TYPE_NEXT_REF_TO (new_ref
) = ref
;
4246 TYPE_REFERENCE_TO (new_type
) = ref
;
4248 /* Now adjust them. */
4249 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
4250 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
4251 TREE_TYPE (t
) = new_type
;
4253 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4254 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
4257 /* Now deal with the unconstrained array case. In this case the pointer
4258 is actually a record where both fields are pointers to dummy nodes.
4259 Turn them into pointers to the correct types using update_pointer_to.
4260 Likewise for the pointer to the object record (thin pointer). */
4263 tree new_ptr
= TYPE_POINTER_TO (new_type
);
4265 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
4267 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4268 since update_pointer_to can be invoked multiple times on the same
4269 couple of types because of the type variants. */
4270 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
4274 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
4275 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
4278 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
4279 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
4281 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
4282 TYPE_OBJECT_RECORD_TYPE (new_type
));
4284 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4288 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4289 unconstrained one. This involves making or finding a template. */
4292 convert_to_fat_pointer (tree type
, tree expr
)
4294 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
4295 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
4296 tree etype
= TREE_TYPE (expr
);
4298 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
4300 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4301 array (compare_fat_pointers ensures that this is the full discriminant)
4302 and a valid pointer to the bounds. This latter property is necessary
4303 since the compiler can hoist the load of the bounds done through it. */
4304 if (integer_zerop (expr
))
4306 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4307 tree null_bounds
, t
;
4309 if (TYPE_NULL_BOUNDS (ptr_template_type
))
4310 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
4313 /* The template type can still be dummy at this point so we build an
4314 empty constructor. The middle-end will fill it in with zeros. */
4315 t
= build_constructor (template_type
, NULL
);
4316 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
4317 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
4318 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
4321 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4322 fold_convert (p_array_type
, null_pointer_node
));
4323 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
4324 t
= build_constructor (type
, v
);
4325 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4326 TREE_CONSTANT (t
) = 0;
4327 TREE_STATIC (t
) = 1;
4332 /* If EXPR is a thin pointer, make template and data from the record. */
4333 if (TYPE_IS_THIN_POINTER_P (etype
))
4335 tree field
= TYPE_FIELDS (TREE_TYPE (etype
));
4337 expr
= gnat_protect_expr (expr
);
4338 if (TREE_CODE (expr
) == ADDR_EXPR
)
4339 expr
= TREE_OPERAND (expr
, 0);
4342 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4343 the thin pointer value has been shifted so we first need to shift
4344 it back to get the template address. */
4345 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)))
4347 = build_binary_op (POINTER_PLUS_EXPR
, etype
, expr
,
4348 fold_build1 (NEGATE_EXPR
, sizetype
,
4350 (DECL_CHAIN (field
))));
4351 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
4354 template_tree
= build_component_ref (expr
, NULL_TREE
, field
, false);
4355 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
4356 build_component_ref (expr
, NULL_TREE
,
4357 DECL_CHAIN (field
), false));
4360 /* Otherwise, build the constructor for the template. */
4362 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
4364 /* The final result is a constructor for the fat pointer.
4366 If EXPR is an argument of a foreign convention subprogram, the type it
4367 points to is directly the component type. In this case, the expression
4368 type may not match the corresponding FIELD_DECL type at this point, so we
4369 call "convert" here to fix that up if necessary. This type consistency is
4370 required, for instance because it ensures that possible later folding of
4371 COMPONENT_REFs against this constructor always yields something of the
4372 same type as the initial reference.
4374 Note that the call to "build_template" above is still fine because it
4375 will only refer to the provided TEMPLATE_TYPE in this case. */
4376 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4377 convert (p_array_type
, expr
));
4378 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4379 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4381 return gnat_build_constructor (type
, v
);
4384 /* Create an expression whose value is that of EXPR,
4385 converted to type TYPE. The TREE_TYPE of the value
4386 is always TYPE. This function implements all reasonable
4387 conversions; callers should filter out those that are
4388 not permitted by the language being compiled. */
4391 convert (tree type
, tree expr
)
4393 tree etype
= TREE_TYPE (expr
);
4394 enum tree_code ecode
= TREE_CODE (etype
);
4395 enum tree_code code
= TREE_CODE (type
);
4397 /* If the expression is already of the right type, we are done. */
4401 /* If both input and output have padding and are of variable size, do this
4402 as an unchecked conversion. Likewise if one is a mere variant of the
4403 other, so we avoid a pointless unpad/repad sequence. */
4404 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
4405 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
4406 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4407 || !TREE_CONSTANT (TYPE_SIZE (etype
))
4408 || TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
4409 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
4410 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
4413 /* If the output type has padding, convert to the inner type and make a
4414 constructor to build the record, unless a variable size is involved. */
4415 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
4417 VEC(constructor_elt
,gc
) *v
;
4419 /* If we previously converted from another type and our type is
4420 of variable size, remove the conversion to avoid the need for
4421 variable-sized temporaries. Likewise for a conversion between
4422 original and packable version. */
4423 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4424 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4425 || (ecode
== RECORD_TYPE
4426 && TYPE_NAME (etype
)
4427 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
4428 expr
= TREE_OPERAND (expr
, 0);
4430 /* If we are just removing the padding from expr, convert the original
4431 object if we have variable size in order to avoid the need for some
4432 variable-sized temporaries. Likewise if the padding is a variant
4433 of the other, so we avoid a pointless unpad/repad sequence. */
4434 if (TREE_CODE (expr
) == COMPONENT_REF
4435 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4436 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4437 || TYPE_MAIN_VARIANT (type
)
4438 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4439 || (ecode
== RECORD_TYPE
4440 && TYPE_NAME (etype
)
4441 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
4442 return convert (type
, TREE_OPERAND (expr
, 0));
4444 /* If the inner type is of self-referential size and the expression type
4445 is a record, do this as an unchecked conversion. But first pad the
4446 expression if possible to have the same size on both sides. */
4447 if (ecode
== RECORD_TYPE
4448 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
4450 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
4451 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4452 false, false, false, true),
4454 return unchecked_convert (type
, expr
, false);
4457 /* If we are converting between array types with variable size, do the
4458 final conversion as an unchecked conversion, again to avoid the need
4459 for some variable-sized temporaries. If valid, this conversion is
4460 very likely purely technical and without real effects. */
4461 if (ecode
== ARRAY_TYPE
4462 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
4463 && !TREE_CONSTANT (TYPE_SIZE (etype
))
4464 && !TREE_CONSTANT (TYPE_SIZE (type
)))
4465 return unchecked_convert (type
,
4466 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4470 v
= VEC_alloc (constructor_elt
, gc
, 1);
4471 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4472 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
4473 return gnat_build_constructor (type
, v
);
4476 /* If the input type has padding, remove it and convert to the output type.
4477 The conditions ordering is arranged to ensure that the output type is not
4478 a padding type here, as it is not clear whether the conversion would
4479 always be correct if this was to happen. */
4480 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
4484 /* If we have just converted to this padded type, just get the
4485 inner expression. */
4486 if (TREE_CODE (expr
) == CONSTRUCTOR
4487 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
4488 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
4489 == TYPE_FIELDS (etype
))
4491 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
4493 /* Otherwise, build an explicit component reference. */
4496 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4498 return convert (type
, unpadded
);
4501 /* If the input is a biased type, adjust first. */
4502 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4503 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
4504 fold_convert (TREE_TYPE (etype
),
4506 TYPE_MIN_VALUE (etype
)));
4508 /* If the input is a justified modular type, we need to extract the actual
4509 object before converting it to any other type with the exceptions of an
4510 unconstrained array or of a mere type variant. It is useful to avoid the
4511 extraction and conversion in the type variant case because it could end
4512 up replacing a VAR_DECL expr by a constructor and we might be about the
4513 take the address of the result. */
4514 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
4515 && code
!= UNCONSTRAINED_ARRAY_TYPE
4516 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
4517 return convert (type
, build_component_ref (expr
, NULL_TREE
,
4518 TYPE_FIELDS (etype
), false));
4520 /* If converting to a type that contains a template, convert to the data
4521 type and then build the template. */
4522 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
4524 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4525 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
4527 /* If the source already has a template, get a reference to the
4528 associated array only, as we are going to rebuild a template
4529 for the target type anyway. */
4530 expr
= maybe_unconstrained_array (expr
);
4532 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4533 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
4534 obj_type
, NULL_TREE
));
4535 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4536 convert (obj_type
, expr
));
4537 return gnat_build_constructor (type
, v
);
4540 /* There are some cases of expressions that we process specially. */
4541 switch (TREE_CODE (expr
))
4547 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4548 conversion in gnat_expand_expr. NULL_EXPR does not represent
4549 and actual value, so no conversion is needed. */
4550 expr
= copy_node (expr
);
4551 TREE_TYPE (expr
) = type
;
4555 /* If we are converting a STRING_CST to another constrained array type,
4556 just make a new one in the proper type. */
4557 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
4558 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
4559 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
4561 expr
= copy_node (expr
);
4562 TREE_TYPE (expr
) = type
;
4568 /* If we are converting a VECTOR_CST to a mere variant type, just make
4569 a new one in the proper type. */
4570 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4572 expr
= copy_node (expr
);
4573 TREE_TYPE (expr
) = type
;
4578 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4579 a new one in the proper type. */
4580 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4582 expr
= copy_node (expr
);
4583 TREE_TYPE (expr
) = type
;
4584 CONSTRUCTOR_ELTS (expr
)
4585 = VEC_copy (constructor_elt
, gc
, CONSTRUCTOR_ELTS (expr
));
4589 /* Likewise for a conversion between original and packable version, or
4590 conversion between types of the same size and with the same list of
4591 fields, but we have to work harder to preserve type consistency. */
4593 && code
== RECORD_TYPE
4594 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
4595 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
4598 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
4599 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
4600 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, len
);
4601 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
4602 unsigned HOST_WIDE_INT idx
;
4605 /* Whether we need to clear TREE_CONSTANT et al. on the output
4606 constructor when we convert in place. */
4607 bool clear_constant
= false;
4609 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
4611 constructor_elt
*elt
;
4612 /* We expect only simple constructors. */
4613 if (!SAME_FIELD_P (index
, efield
))
4615 /* The field must be the same. */
4616 if (!SAME_FIELD_P (efield
, field
))
4618 elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
4620 elt
->value
= convert (TREE_TYPE (field
), value
);
4622 /* If packing has made this field a bitfield and the input
4623 value couldn't be emitted statically any more, we need to
4624 clear TREE_CONSTANT on our output. */
4626 && TREE_CONSTANT (expr
)
4627 && !CONSTRUCTOR_BITFIELD_P (efield
)
4628 && CONSTRUCTOR_BITFIELD_P (field
)
4629 && !initializer_constant_valid_for_bitfield_p (value
))
4630 clear_constant
= true;
4632 efield
= DECL_CHAIN (efield
);
4633 field
= DECL_CHAIN (field
);
4636 /* If we have been able to match and convert all the input fields
4637 to their output type, convert in place now. We'll fallback to a
4638 view conversion downstream otherwise. */
4641 expr
= copy_node (expr
);
4642 TREE_TYPE (expr
) = type
;
4643 CONSTRUCTOR_ELTS (expr
) = v
;
4645 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
4650 /* Likewise for a conversion between array type and vector type with a
4651 compatible representative array. */
4652 else if (code
== VECTOR_TYPE
4653 && ecode
== ARRAY_TYPE
4654 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4657 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
4658 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
4659 VEC(constructor_elt
,gc
) *v
;
4660 unsigned HOST_WIDE_INT ix
;
4663 /* Build a VECTOR_CST from a *constant* array constructor. */
4664 if (TREE_CONSTANT (expr
))
4666 bool constant_p
= true;
4668 /* Iterate through elements and check if all constructor
4669 elements are *_CSTs. */
4670 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4671 if (!CONSTANT_CLASS_P (value
))
4678 return build_vector_from_ctor (type
,
4679 CONSTRUCTOR_ELTS (expr
));
4682 /* Otherwise, build a regular vector constructor. */
4683 v
= VEC_alloc (constructor_elt
, gc
, len
);
4684 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4686 constructor_elt
*elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
4687 elt
->index
= NULL_TREE
;
4690 expr
= copy_node (expr
);
4691 TREE_TYPE (expr
) = type
;
4692 CONSTRUCTOR_ELTS (expr
) = v
;
4697 case UNCONSTRAINED_ARRAY_REF
:
4698 /* First retrieve the underlying array. */
4699 expr
= maybe_unconstrained_array (expr
);
4700 etype
= TREE_TYPE (expr
);
4701 ecode
= TREE_CODE (etype
);
4704 case VIEW_CONVERT_EXPR
:
4706 /* GCC 4.x is very sensitive to type consistency overall, and view
4707 conversions thus are very frequent. Even though just "convert"ing
4708 the inner operand to the output type is fine in most cases, it
4709 might expose unexpected input/output type mismatches in special
4710 circumstances so we avoid such recursive calls when we can. */
4711 tree op0
= TREE_OPERAND (expr
, 0);
4713 /* If we are converting back to the original type, we can just
4714 lift the input conversion. This is a common occurrence with
4715 switches back-and-forth amongst type variants. */
4716 if (type
== TREE_TYPE (op0
))
4719 /* Otherwise, if we're converting between two aggregate or vector
4720 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4721 target type in place or to just convert the inner expression. */
4722 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4723 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4725 /* If we are converting between mere variants, we can just
4726 substitute the VIEW_CONVERT_EXPR in place. */
4727 if (gnat_types_compatible_p (type
, etype
))
4728 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4730 /* Otherwise, we may just bypass the input view conversion unless
4731 one of the types is a fat pointer, which is handled by
4732 specialized code below which relies on exact type matching. */
4733 else if (!TYPE_IS_FAT_POINTER_P (type
)
4734 && !TYPE_IS_FAT_POINTER_P (etype
))
4735 return convert (type
, op0
);
4745 /* Check for converting to a pointer to an unconstrained array. */
4746 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4747 return convert_to_fat_pointer (type
, expr
);
4749 /* If we are converting between two aggregate or vector types that are mere
4750 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4751 to a vector type from its representative array type. */
4752 else if ((code
== ecode
4753 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4754 && gnat_types_compatible_p (type
, etype
))
4755 || (code
== VECTOR_TYPE
4756 && ecode
== ARRAY_TYPE
4757 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4759 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4761 /* If we are converting between tagged types, try to upcast properly. */
4762 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4763 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4765 tree child_etype
= etype
;
4767 tree field
= TYPE_FIELDS (child_etype
);
4768 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4769 return build_component_ref (expr
, NULL_TREE
, field
, false);
4770 child_etype
= TREE_TYPE (field
);
4771 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4774 /* If we are converting from a smaller form of record type back to it, just
4775 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4776 size on both sides. */
4777 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4778 && smaller_form_type_p (etype
, type
))
4780 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4781 false, false, false, true),
4783 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4786 /* In all other cases of related types, make a NOP_EXPR. */
4787 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4788 return fold_convert (type
, expr
);
4793 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4796 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4797 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4798 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4799 return unchecked_convert (type
, expr
, false);
4800 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4801 return fold_convert (type
,
4802 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4803 convert (TREE_TYPE (type
), expr
),
4804 TYPE_MIN_VALUE (type
)));
4806 /* ... fall through ... */
4810 /* If we are converting an additive expression to an integer type
4811 with lower precision, be wary of the optimization that can be
4812 applied by convert_to_integer. There are 2 problematic cases:
4813 - if the first operand was originally of a biased type,
4814 because we could be recursively called to convert it
4815 to an intermediate type and thus rematerialize the
4816 additive operator endlessly,
4817 - if the expression contains a placeholder, because an
4818 intermediate conversion that changes the sign could
4819 be inserted and thus introduce an artificial overflow
4820 at compile time when the placeholder is substituted. */
4821 if (code
== INTEGER_TYPE
4822 && ecode
== INTEGER_TYPE
4823 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4824 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4826 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4828 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4829 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4830 || CONTAINS_PLACEHOLDER_P (expr
))
4831 return build1 (NOP_EXPR
, type
, expr
);
4834 return fold (convert_to_integer (type
, expr
));
4837 case REFERENCE_TYPE
:
4838 /* If converting between two thin pointers, adjust if needed to account
4839 for differing offsets from the base pointer, depending on whether
4840 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4841 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4844 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)) != NULL_TREE
4845 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype
))))
4848 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != NULL_TREE
4849 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type
))))
4851 tree byte_diff
= size_diffop (type_pos
, etype_pos
);
4853 expr
= build1 (NOP_EXPR
, type
, expr
);
4854 if (integer_zerop (byte_diff
))
4857 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4858 fold_convert (sizetype
, byte_diff
));
4861 /* If converting fat pointer to normal or thin pointer, get the pointer
4862 to the array and then convert it. */
4863 if (TYPE_IS_FAT_POINTER_P (etype
))
4865 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4867 return fold (convert_to_pointer (type
, expr
));
4870 return fold (convert_to_real (type
, expr
));
4873 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4875 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4877 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4878 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4880 return gnat_build_constructor (type
, v
);
4883 /* ... fall through ... */
4886 /* In these cases, assume the front-end has validated the conversion.
4887 If the conversion is valid, it will be a bit-wise conversion, so
4888 it can be viewed as an unchecked conversion. */
4889 return unchecked_convert (type
, expr
, false);
4892 /* This is a either a conversion between a tagged type and some
4893 subtype, which we have to mark as a UNION_TYPE because of
4894 overlapping fields or a conversion of an Unchecked_Union. */
4895 return unchecked_convert (type
, expr
, false);
4897 case UNCONSTRAINED_ARRAY_TYPE
:
4898 /* If the input is a VECTOR_TYPE, convert to the representative
4899 array type first. */
4900 if (ecode
== VECTOR_TYPE
)
4902 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4903 etype
= TREE_TYPE (expr
);
4904 ecode
= TREE_CODE (etype
);
4907 /* If EXPR is a constrained array, take its address, convert it to a
4908 fat pointer, and then dereference it. Likewise if EXPR is a
4909 record containing both a template and a constrained array.
4910 Note that a record representing a justified modular type
4911 always represents a packed constrained array. */
4912 if (ecode
== ARRAY_TYPE
4913 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4914 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4915 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4918 (INDIRECT_REF
, NULL_TREE
,
4919 convert_to_fat_pointer (TREE_TYPE (type
),
4920 build_unary_op (ADDR_EXPR
,
4923 /* Do something very similar for converting one unconstrained
4924 array to another. */
4925 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4927 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4928 convert (TREE_TYPE (type
),
4929 build_unary_op (ADDR_EXPR
,
4935 return fold (convert_to_complex (type
, expr
));
4942 /* Create an expression whose value is that of EXPR converted to the common
4943 index type, which is sizetype. EXPR is supposed to be in the base type
4944 of the GNAT index type. Calling it is equivalent to doing
4946 convert (sizetype, expr)
4948 but we try to distribute the type conversion with the knowledge that EXPR
4949 cannot overflow in its type. This is a best-effort approach and we fall
4950 back to the above expression as soon as difficulties are encountered.
4952 This is necessary to overcome issues that arise when the GNAT base index
4953 type and the GCC common index type (sizetype) don't have the same size,
4954 which is quite frequent on 64-bit architectures. In this case, and if
4955 the GNAT base index type is signed but the iteration type of the loop has
4956 been forced to unsigned, the loop scalar evolution engine cannot compute
4957 a simple evolution for the general induction variables associated with the
4958 array indices, because it will preserve the wrap-around semantics in the
4959 unsigned type of their "inner" part. As a result, many loop optimizations
4962 The solution is to use a special (basic) induction variable that is at
4963 least as large as sizetype, and to express the aforementioned general
4964 induction variables in terms of this induction variable, eliminating
4965 the problematic intermediate truncation to the GNAT base index type.
4966 This is possible as long as the original expression doesn't overflow
4967 and if the middle-end hasn't introduced artificial overflows in the
4968 course of the various simplification it can make to the expression. */
4971 convert_to_index_type (tree expr
)
4973 enum tree_code code
= TREE_CODE (expr
);
4974 tree type
= TREE_TYPE (expr
);
4976 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4977 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4978 if (TYPE_UNSIGNED (type
) || !optimize
)
4979 return convert (sizetype
, expr
);
4984 /* The main effect of the function: replace a loop parameter with its
4985 associated special induction variable. */
4986 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
4987 expr
= DECL_INDUCTION_VAR (expr
);
4992 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4993 /* Bail out as soon as we suspect some sort of type frobbing. */
4994 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
4995 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
4999 /* ... fall through ... */
5001 case NON_LVALUE_EXPR
:
5002 return fold_build1 (code
, sizetype
,
5003 convert_to_index_type (TREE_OPERAND (expr
, 0)));
5008 return fold_build2 (code
, sizetype
,
5009 convert_to_index_type (TREE_OPERAND (expr
, 0)),
5010 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5013 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5014 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5017 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5018 convert_to_index_type (TREE_OPERAND (expr
, 1)),
5019 convert_to_index_type (TREE_OPERAND (expr
, 2)));
5025 return convert (sizetype
, expr
);
5028 /* Remove all conversions that are done in EXP. This includes converting
5029 from a padded type or to a justified modular type. If TRUE_ADDRESS
5030 is true, always return the address of the containing object even if
5031 the address is not bit-aligned. */
5034 remove_conversions (tree exp
, bool true_address
)
5036 switch (TREE_CODE (exp
))
5040 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
5041 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
5043 remove_conversions (VEC_index (constructor_elt
,
5044 CONSTRUCTOR_ELTS (exp
), 0)->value
,
5049 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5050 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5054 case VIEW_CONVERT_EXPR
:
5055 case NON_LVALUE_EXPR
:
5056 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5065 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5066 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5067 likewise return an expression pointing to the underlying array. */
5070 maybe_unconstrained_array (tree exp
)
5072 enum tree_code code
= TREE_CODE (exp
);
5073 tree type
= TREE_TYPE (exp
);
5075 switch (TREE_CODE (type
))
5077 case UNCONSTRAINED_ARRAY_TYPE
:
5078 if (code
== UNCONSTRAINED_ARRAY_REF
)
5080 const bool read_only
= TREE_READONLY (exp
);
5081 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
5083 exp
= TREE_OPERAND (exp
, 0);
5084 type
= TREE_TYPE (exp
);
5086 if (TREE_CODE (exp
) == COND_EXPR
)
5089 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5090 build_component_ref (TREE_OPERAND (exp
, 1),
5095 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5096 build_component_ref (TREE_OPERAND (exp
, 2),
5101 exp
= build3 (COND_EXPR
,
5102 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
5103 TREE_OPERAND (exp
, 0), op1
, op2
);
5107 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5108 build_component_ref (exp
, NULL_TREE
,
5111 TREE_READONLY (exp
) = read_only
;
5112 TREE_THIS_NOTRAP (exp
) = no_trap
;
5116 else if (code
== NULL_EXPR
)
5117 exp
= build1 (NULL_EXPR
,
5118 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
5119 TREE_OPERAND (exp
, 0));
5123 /* If this is a padded type and it contains a template, convert to the
5124 unpadded type first. */
5125 if (TYPE_PADDING_P (type
)
5126 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
5127 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
5129 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5130 type
= TREE_TYPE (exp
);
5133 if (TYPE_CONTAINS_TEMPLATE_P (type
))
5135 exp
= build_component_ref (exp
, NULL_TREE
,
5136 DECL_CHAIN (TYPE_FIELDS (type
)),
5138 type
= TREE_TYPE (exp
);
5140 /* If the array type is padded, convert to the unpadded type. */
5141 if (TYPE_IS_PADDING_P (type
))
5142 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5153 /* Return true if EXPR is an expression that can be folded as an operand
5154 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5157 can_fold_for_view_convert_p (tree expr
)
5161 /* The folder will fold NOP_EXPRs between integral types with the same
5162 precision (in the middle-end's sense). We cannot allow it if the
5163 types don't have the same precision in the Ada sense as well. */
5164 if (TREE_CODE (expr
) != NOP_EXPR
)
5167 t1
= TREE_TYPE (expr
);
5168 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5170 /* Defer to the folder for non-integral conversions. */
5171 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
5174 /* Only fold conversions that preserve both precisions. */
5175 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
5176 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
5182 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5183 If NOTRUNC_P is true, truncation operations should be suppressed.
5185 Special care is required with (source or target) integral types whose
5186 precision is not equal to their size, to make sure we fetch or assign
5187 the value bits whose location might depend on the endianness, e.g.
5189 Rmsize : constant := 8;
5190 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5192 type Bit_Array is array (1 .. Rmsize) of Boolean;
5193 pragma Pack (Bit_Array);
5195 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5197 Value : Int := 2#1000_0001#;
5198 Vbits : Bit_Array := To_Bit_Array (Value);
5200 we expect the 8 bits at Vbits'Address to always contain Value, while
5201 their original location depends on the endianness, at Value'Address
5202 on a little-endian architecture but not on a big-endian one. */
5205 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
5207 tree etype
= TREE_TYPE (expr
);
5208 enum tree_code ecode
= TREE_CODE (etype
);
5209 enum tree_code code
= TREE_CODE (type
);
5212 /* If the expression is already of the right type, we are done. */
5216 /* If both types types are integral just do a normal conversion.
5217 Likewise for a conversion to an unconstrained array. */
5218 if ((((INTEGRAL_TYPE_P (type
)
5219 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
5220 || (POINTER_TYPE_P (type
) && !TYPE_IS_THIN_POINTER_P (type
))
5221 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
5222 && ((INTEGRAL_TYPE_P (etype
)
5223 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
5224 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
5225 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
5226 || code
== UNCONSTRAINED_ARRAY_TYPE
)
5228 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
5230 tree ntype
= copy_type (etype
);
5231 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
5232 TYPE_MAIN_VARIANT (ntype
) = ntype
;
5233 expr
= build1 (NOP_EXPR
, ntype
, expr
);
5236 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5238 tree rtype
= copy_type (type
);
5239 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
5240 TYPE_MAIN_VARIANT (rtype
) = rtype
;
5241 expr
= convert (rtype
, expr
);
5242 expr
= build1 (NOP_EXPR
, type
, expr
);
5245 expr
= convert (type
, expr
);
5248 /* If we are converting to an integral type whose precision is not equal
5249 to its size, first unchecked convert to a record type that contains an
5250 field of the given precision. Then extract the field. */
5251 else if (INTEGRAL_TYPE_P (type
)
5252 && TYPE_RM_SIZE (type
)
5253 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5254 GET_MODE_BITSIZE (TYPE_MODE (type
))))
5256 tree rec_type
= make_node (RECORD_TYPE
);
5257 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
5258 tree field_type
, field
;
5260 if (TYPE_UNSIGNED (type
))
5261 field_type
= make_unsigned_type (prec
);
5263 field_type
= make_signed_type (prec
);
5264 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
5266 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5267 NULL_TREE
, NULL_TREE
, 1, 0);
5269 TYPE_FIELDS (rec_type
) = field
;
5270 layout_type (rec_type
);
5272 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5273 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
5274 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
5277 /* Similarly if we are converting from an integral type whose precision is
5278 not equal to its size, first copy into a field of the given precision
5279 and unchecked convert the record type. */
5280 else if (INTEGRAL_TYPE_P (etype
)
5281 && TYPE_RM_SIZE (etype
)
5282 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
5283 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5285 tree rec_type
= make_node (RECORD_TYPE
);
5286 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
5287 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
5288 tree field_type
, field
;
5290 if (TYPE_UNSIGNED (etype
))
5291 field_type
= make_unsigned_type (prec
);
5293 field_type
= make_signed_type (prec
);
5294 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
5296 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5297 NULL_TREE
, NULL_TREE
, 1, 0);
5299 TYPE_FIELDS (rec_type
) = field
;
5300 layout_type (rec_type
);
5302 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
5303 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
5304 expr
= gnat_build_constructor (rec_type
, v
);
5305 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5308 /* If we are converting from a scalar type to a type with a different size,
5309 we need to pad to have the same size on both sides.
5311 ??? We cannot do it unconditionally because unchecked conversions are
5312 used liberally by the front-end to implement polymorphism, e.g. in:
5314 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5315 return p___size__4 (p__object!(S191s.all));
5317 so we skip all expressions that are references. */
5318 else if (!REFERENCE_CLASS_P (expr
)
5319 && !AGGREGATE_TYPE_P (etype
)
5320 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5321 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
5325 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
5326 false, false, false, true),
5328 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5332 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
5333 false, false, false, true);
5334 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5335 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
5340 /* We have a special case when we are converting between two unconstrained
5341 array types. In that case, take the address, convert the fat pointer
5342 types, and dereference. */
5343 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
5344 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5345 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
5346 build_unary_op (ADDR_EXPR
, NULL_TREE
,
5349 /* Another special case is when we are converting to a vector type from its
5350 representative array type; this a regular conversion. */
5351 else if (code
== VECTOR_TYPE
5352 && ecode
== ARRAY_TYPE
5353 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
5355 expr
= convert (type
, expr
);
5359 expr
= maybe_unconstrained_array (expr
);
5360 etype
= TREE_TYPE (expr
);
5361 ecode
= TREE_CODE (etype
);
5362 if (can_fold_for_view_convert_p (expr
))
5363 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5365 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5368 /* If the result is an integral type whose precision is not equal to its
5369 size, sign- or zero-extend the result. We need not do this if the input
5370 is an integral type of the same precision and signedness or if the output
5371 is a biased type or if both the input and output are unsigned. */
5373 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
5374 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5375 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5376 GET_MODE_BITSIZE (TYPE_MODE (type
)))
5377 && !(INTEGRAL_TYPE_P (etype
)
5378 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
5379 && operand_equal_p (TYPE_RM_SIZE (type
),
5380 (TYPE_RM_SIZE (etype
) != 0
5381 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
5383 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
5386 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
5388 = convert (base_type
,
5389 size_binop (MINUS_EXPR
,
5391 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
5392 TYPE_RM_SIZE (type
)));
5395 build_binary_op (RSHIFT_EXPR
, base_type
,
5396 build_binary_op (LSHIFT_EXPR
, base_type
,
5397 convert (base_type
, expr
),
5402 /* An unchecked conversion should never raise Constraint_Error. The code
5403 below assumes that GCC's conversion routines overflow the same way that
5404 the underlying hardware does. This is probably true. In the rare case
5405 when it is false, we can rely on the fact that such conversions are
5406 erroneous anyway. */
5407 if (TREE_CODE (expr
) == INTEGER_CST
)
5408 TREE_OVERFLOW (expr
) = 0;
5410 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5411 show no longer constant. */
5412 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
5413 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
5415 TREE_CONSTANT (expr
) = 0;
5420 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5421 the latter being a record type as predicated by Is_Record_Type. */
5424 tree_code_for_record_type (Entity_Id gnat_type
)
5426 Node_Id component_list
, component
;
5428 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5429 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5430 if (!Is_Unchecked_Union (gnat_type
))
5433 gnat_type
= Implementation_Base_Type (gnat_type
);
5435 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
5437 for (component
= First_Non_Pragma (Component_Items (component_list
));
5438 Present (component
);
5439 component
= Next_Non_Pragma (component
))
5440 if (Ekind (Defining_Entity (component
)) == E_Component
)
5446 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5447 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5448 according to the presence of an alignment clause on the type or, if it
5449 is an array, on the component type. */
5452 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
5454 gnat_type
= Underlying_Type (gnat_type
);
5456 *align_clause
= Present (Alignment_Clause (gnat_type
));
5458 if (Is_Array_Type (gnat_type
))
5460 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5461 if (Present (Alignment_Clause (gnat_type
)))
5462 *align_clause
= true;
5465 if (!Is_Floating_Point_Type (gnat_type
))
5468 if (UI_To_Int (Esize (gnat_type
)) != 64)
5474 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5475 size is greater or equal to 64 bits, or an array of such a type. Set
5476 ALIGN_CLAUSE according to the presence of an alignment clause on the
5477 type or, if it is an array, on the component type. */
5480 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
5482 gnat_type
= Underlying_Type (gnat_type
);
5484 *align_clause
= Present (Alignment_Clause (gnat_type
));
5486 if (Is_Array_Type (gnat_type
))
5488 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5489 if (Present (Alignment_Clause (gnat_type
)))
5490 *align_clause
= true;
5493 if (!Is_Scalar_Type (gnat_type
))
5496 if (UI_To_Int (Esize (gnat_type
)) < 64)
5502 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5503 component of an aggregate type. */
5506 type_for_nonaliased_component_p (tree gnu_type
)
5508 /* If the type is passed by reference, we may have pointers to the
5509 component so it cannot be made non-aliased. */
5510 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
5513 /* We used to say that any component of aggregate type is aliased
5514 because the front-end may take 'Reference of it. The front-end
5515 has been enhanced in the meantime so as to use a renaming instead
5516 in most cases, but the back-end can probably take the address of
5517 such a component too so we go for the conservative stance.
5519 For instance, we might need the address of any array type, even
5520 if normally passed by copy, to construct a fat pointer if the
5521 component is used as an actual for an unconstrained formal.
5523 Likewise for record types: even if a specific record subtype is
5524 passed by copy, the parent type might be passed by ref (e.g. if
5525 it's of variable size) and we might take the address of a child
5526 component to pass to a parent formal. We have no way to check
5527 for such conditions here. */
5528 if (AGGREGATE_TYPE_P (gnu_type
))
5534 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5537 smaller_form_type_p (tree type
, tree orig_type
)
5541 /* We're not interested in variants here. */
5542 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
5545 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5546 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
5549 size
= TYPE_SIZE (type
);
5550 osize
= TYPE_SIZE (orig_type
);
5552 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
5555 return tree_int_cst_lt (size
, osize
) != 0;
5558 /* Perform final processing on global variables. */
5560 static GTY (()) tree dummy_global
;
5563 gnat_write_global_declarations (void)
5568 /* If we have declared types as used at the global level, insert them in
5569 the global hash table. We use a dummy variable for this purpose. */
5570 if (!VEC_empty (tree
, types_used_by_cur_var_decl
))
5572 struct varpool_node
*node
;
5575 ASM_FORMAT_PRIVATE_NAME (label
, first_global_object_name
, 0);
5577 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, get_identifier (label
),
5579 TREE_STATIC (dummy_global
) = 1;
5580 TREE_ASM_WRITTEN (dummy_global
) = 1;
5581 node
= varpool_node (dummy_global
);
5582 node
->symbol
.force_output
= 1;
5584 while (!VEC_empty (tree
, types_used_by_cur_var_decl
))
5586 tree t
= VEC_pop (tree
, types_used_by_cur_var_decl
);
5587 types_used_by_var_decl_insert (t
, dummy_global
);
5591 /* Output debug information for all global type declarations first. This
5592 ensures that global types whose compilation hasn't been finalized yet,
5593 for example pointers to Taft amendment types, have their compilation
5594 finalized in the right context. */
5595 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
5596 if (TREE_CODE (iter
) == TYPE_DECL
)
5597 debug_hooks
->global_decl (iter
);
5599 /* Proceed to optimize and emit assembly. */
5600 finalize_compilation_unit ();
5602 /* After cgraph has had a chance to emit everything that's going to
5603 be emitted, output debug information for the rest of globals. */
5606 timevar_push (TV_SYMOUT
);
5607 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
5608 if (TREE_CODE (iter
) != TYPE_DECL
)
5609 debug_hooks
->global_decl (iter
);
5610 timevar_pop (TV_SYMOUT
);
5614 /* ************************************************************************
5615 * * GCC builtins support *
5616 * ************************************************************************ */
5618 /* The general scheme is fairly simple:
5620 For each builtin function/type to be declared, gnat_install_builtins calls
5621 internal facilities which eventually get to gnat_push_decl, which in turn
5622 tracks the so declared builtin function decls in the 'builtin_decls' global
5623 datastructure. When an Intrinsic subprogram declaration is processed, we
5624 search this global datastructure to retrieve the associated BUILT_IN DECL
5627 /* Search the chain of currently available builtin declarations for a node
5628 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5629 found, if any, or NULL_TREE otherwise. */
5631 builtin_decl_for (tree name
)
5636 FOR_EACH_VEC_ELT (tree
, builtin_decls
, i
, decl
)
5637 if (DECL_NAME (decl
) == name
)
5643 /* The code below eventually exposes gnat_install_builtins, which declares
5644 the builtin types and functions we might need, either internally or as
5645 user accessible facilities.
5647 ??? This is a first implementation shot, still in rough shape. It is
5648 heavily inspired from the "C" family implementation, with chunks copied
5649 verbatim from there.
5651 Two obvious TODO candidates are
5652 o Use a more efficient name/decl mapping scheme
5653 o Devise a middle-end infrastructure to avoid having to copy
5654 pieces between front-ends. */
5656 /* ----------------------------------------------------------------------- *
5657 * BUILTIN ELEMENTARY TYPES *
5658 * ----------------------------------------------------------------------- */
5660 /* Standard data types to be used in builtin argument declarations. */
5664 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
5666 CTI_CONST_STRING_TYPE
,
5671 static tree c_global_trees
[CTI_MAX
];
5673 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5674 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5675 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5677 /* ??? In addition some attribute handlers, we currently don't support a
5678 (small) number of builtin-types, which in turns inhibits support for a
5679 number of builtin functions. */
5680 #define wint_type_node void_type_node
5681 #define intmax_type_node void_type_node
5682 #define uintmax_type_node void_type_node
5684 /* Build the void_list_node (void_type_node having been created). */
5687 build_void_list_node (void)
5689 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
5693 /* Used to help initialize the builtin-types.def table. When a type of
5694 the correct size doesn't exist, use error_mark_node instead of NULL.
5695 The later results in segfaults even when a decl using the type doesn't
5699 builtin_type_for_size (int size
, bool unsignedp
)
5701 tree type
= gnat_type_for_size (size
, unsignedp
);
5702 return type
? type
: error_mark_node
;
5705 /* Build/push the elementary type decls that builtin functions/types
5709 install_builtin_elementary_types (void)
5711 signed_size_type_node
= gnat_signed_type (size_type_node
);
5712 pid_type_node
= integer_type_node
;
5713 void_list_node
= build_void_list_node ();
5715 string_type_node
= build_pointer_type (char_type_node
);
5716 const_string_type_node
5717 = build_pointer_type (build_qualified_type
5718 (char_type_node
, TYPE_QUAL_CONST
));
5721 /* ----------------------------------------------------------------------- *
5722 * BUILTIN FUNCTION TYPES *
5723 * ----------------------------------------------------------------------- */
5725 /* Now, builtin function types per se. */
5729 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5730 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5731 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5732 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5733 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5734 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5735 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5736 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5737 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5738 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5739 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5740 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5741 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5742 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5743 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5745 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5746 #include "builtin-types.def"
5747 #undef DEF_PRIMITIVE_TYPE
5748 #undef DEF_FUNCTION_TYPE_0
5749 #undef DEF_FUNCTION_TYPE_1
5750 #undef DEF_FUNCTION_TYPE_2
5751 #undef DEF_FUNCTION_TYPE_3
5752 #undef DEF_FUNCTION_TYPE_4
5753 #undef DEF_FUNCTION_TYPE_5
5754 #undef DEF_FUNCTION_TYPE_6
5755 #undef DEF_FUNCTION_TYPE_7
5756 #undef DEF_FUNCTION_TYPE_VAR_0
5757 #undef DEF_FUNCTION_TYPE_VAR_1
5758 #undef DEF_FUNCTION_TYPE_VAR_2
5759 #undef DEF_FUNCTION_TYPE_VAR_3
5760 #undef DEF_FUNCTION_TYPE_VAR_4
5761 #undef DEF_FUNCTION_TYPE_VAR_5
5762 #undef DEF_POINTER_TYPE
5766 typedef enum c_builtin_type builtin_type
;
5768 /* A temporary array used in communication with def_fn_type. */
5769 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5771 /* A helper function for install_builtin_types. Build function type
5772 for DEF with return type RET and N arguments. If VAR is true, then the
5773 function should be variadic after those N arguments.
5775 Takes special care not to ICE if any of the types involved are
5776 error_mark_node, which indicates that said type is not in fact available
5777 (see builtin_type_for_size). In which case the function type as a whole
5778 should be error_mark_node. */
5781 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5784 tree
*args
= XALLOCAVEC (tree
, n
);
5789 for (i
= 0; i
< n
; ++i
)
5791 builtin_type a
= (builtin_type
) va_arg (list
, int);
5792 t
= builtin_types
[a
];
5793 if (t
== error_mark_node
)
5798 t
= builtin_types
[ret
];
5799 if (t
== error_mark_node
)
5802 t
= build_varargs_function_type_array (t
, n
, args
);
5804 t
= build_function_type_array (t
, n
, args
);
5807 builtin_types
[def
] = t
;
5811 /* Build the builtin function types and install them in the builtin_types
5812 array for later use in builtin function decls. */
5815 install_builtin_function_types (void)
5817 tree va_list_ref_type_node
;
5818 tree va_list_arg_type_node
;
5820 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5822 va_list_arg_type_node
= va_list_ref_type_node
=
5823 build_pointer_type (TREE_TYPE (va_list_type_node
));
5827 va_list_arg_type_node
= va_list_type_node
;
5828 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5831 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5832 builtin_types[ENUM] = VALUE;
5833 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5834 def_fn_type (ENUM, RETURN, 0, 0);
5835 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5836 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5837 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5838 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5839 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5840 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5841 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5842 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5843 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5844 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5845 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5847 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5848 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5850 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5851 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5852 def_fn_type (ENUM, RETURN, 1, 0);
5853 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5854 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5855 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5856 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5857 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5858 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5859 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5860 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5861 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5862 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5863 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5864 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5866 #include "builtin-types.def"
5868 #undef DEF_PRIMITIVE_TYPE
5869 #undef DEF_FUNCTION_TYPE_1
5870 #undef DEF_FUNCTION_TYPE_2
5871 #undef DEF_FUNCTION_TYPE_3
5872 #undef DEF_FUNCTION_TYPE_4
5873 #undef DEF_FUNCTION_TYPE_5
5874 #undef DEF_FUNCTION_TYPE_6
5875 #undef DEF_FUNCTION_TYPE_VAR_0
5876 #undef DEF_FUNCTION_TYPE_VAR_1
5877 #undef DEF_FUNCTION_TYPE_VAR_2
5878 #undef DEF_FUNCTION_TYPE_VAR_3
5879 #undef DEF_FUNCTION_TYPE_VAR_4
5880 #undef DEF_FUNCTION_TYPE_VAR_5
5881 #undef DEF_POINTER_TYPE
5882 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5885 /* ----------------------------------------------------------------------- *
5886 * BUILTIN ATTRIBUTES *
5887 * ----------------------------------------------------------------------- */
5889 enum built_in_attribute
5891 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5892 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5893 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5894 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5895 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5896 #include "builtin-attrs.def"
5897 #undef DEF_ATTR_NULL_TREE
5899 #undef DEF_ATTR_STRING
5900 #undef DEF_ATTR_IDENT
5901 #undef DEF_ATTR_TREE_LIST
5905 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5908 install_builtin_attributes (void)
5910 /* Fill in the built_in_attributes array. */
5911 #define DEF_ATTR_NULL_TREE(ENUM) \
5912 built_in_attributes[(int) ENUM] = NULL_TREE;
5913 #define DEF_ATTR_INT(ENUM, VALUE) \
5914 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5915 #define DEF_ATTR_STRING(ENUM, VALUE) \
5916 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5917 #define DEF_ATTR_IDENT(ENUM, STRING) \
5918 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5919 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5920 built_in_attributes[(int) ENUM] \
5921 = tree_cons (built_in_attributes[(int) PURPOSE], \
5922 built_in_attributes[(int) VALUE], \
5923 built_in_attributes[(int) CHAIN]);
5924 #include "builtin-attrs.def"
5925 #undef DEF_ATTR_NULL_TREE
5927 #undef DEF_ATTR_STRING
5928 #undef DEF_ATTR_IDENT
5929 #undef DEF_ATTR_TREE_LIST
5932 /* Handle a "const" attribute; arguments as in
5933 struct attribute_spec.handler. */
5936 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5937 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5940 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5941 TREE_READONLY (*node
) = 1;
5943 *no_add_attrs
= true;
5948 /* Handle a "nothrow" attribute; arguments as in
5949 struct attribute_spec.handler. */
5952 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5953 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5956 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5957 TREE_NOTHROW (*node
) = 1;
5959 *no_add_attrs
= true;
5964 /* Handle a "pure" attribute; arguments as in
5965 struct attribute_spec.handler. */
5968 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5969 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5971 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5972 DECL_PURE_P (*node
) = 1;
5973 /* ??? TODO: Support types. */
5976 warning (OPT_Wattributes
, "%qs attribute ignored",
5977 IDENTIFIER_POINTER (name
));
5978 *no_add_attrs
= true;
5984 /* Handle a "no vops" attribute; arguments as in
5985 struct attribute_spec.handler. */
5988 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5989 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5990 bool *ARG_UNUSED (no_add_attrs
))
5992 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
5993 DECL_IS_NOVOPS (*node
) = 1;
5997 /* Helper for nonnull attribute handling; fetch the operand number
5998 from the attribute argument list. */
6001 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
6003 /* Verify the arg number is a constant. */
6004 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
6005 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
6008 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
6012 /* Handle the "nonnull" attribute. */
6014 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6015 tree args
, int ARG_UNUSED (flags
),
6019 unsigned HOST_WIDE_INT attr_arg_num
;
6021 /* If no arguments are specified, all pointer arguments should be
6022 non-null. Verify a full prototype is given so that the arguments
6023 will have the correct types when we actually check them later. */
6026 if (!prototype_p (type
))
6028 error ("nonnull attribute without arguments on a non-prototype");
6029 *no_add_attrs
= true;
6034 /* Argument list specified. Verify that each argument number references
6035 a pointer argument. */
6036 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
6038 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
6040 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
6042 error ("nonnull argument has invalid operand number (argument %lu)",
6043 (unsigned long) attr_arg_num
);
6044 *no_add_attrs
= true;
6048 if (prototype_p (type
))
6050 function_args_iterator iter
;
6053 function_args_iter_init (&iter
, type
);
6054 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
6056 argument
= function_args_iter_cond (&iter
);
6057 if (!argument
|| ck_num
== arg_num
)
6062 || TREE_CODE (argument
) == VOID_TYPE
)
6064 error ("nonnull argument with out-of-range operand number "
6065 "(argument %lu, operand %lu)",
6066 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6067 *no_add_attrs
= true;
6071 if (TREE_CODE (argument
) != POINTER_TYPE
)
6073 error ("nonnull argument references non-pointer operand "
6074 "(argument %lu, operand %lu)",
6075 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6076 *no_add_attrs
= true;
6085 /* Handle a "sentinel" attribute. */
6088 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
6089 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6091 if (!prototype_p (*node
))
6093 warning (OPT_Wattributes
,
6094 "%qs attribute requires prototypes with named arguments",
6095 IDENTIFIER_POINTER (name
));
6096 *no_add_attrs
= true;
6100 if (!stdarg_p (*node
))
6102 warning (OPT_Wattributes
,
6103 "%qs attribute only applies to variadic functions",
6104 IDENTIFIER_POINTER (name
));
6105 *no_add_attrs
= true;
6111 tree position
= TREE_VALUE (args
);
6113 if (TREE_CODE (position
) != INTEGER_CST
)
6115 warning (0, "requested position is not an integer constant");
6116 *no_add_attrs
= true;
6120 if (tree_int_cst_lt (position
, integer_zero_node
))
6122 warning (0, "requested position is less than zero");
6123 *no_add_attrs
= true;
6131 /* Handle a "noreturn" attribute; arguments as in
6132 struct attribute_spec.handler. */
6135 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6136 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6138 tree type
= TREE_TYPE (*node
);
6140 /* See FIXME comment in c_common_attribute_table. */
6141 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6142 TREE_THIS_VOLATILE (*node
) = 1;
6143 else if (TREE_CODE (type
) == POINTER_TYPE
6144 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
6146 = build_pointer_type
6147 (build_type_variant (TREE_TYPE (type
),
6148 TYPE_READONLY (TREE_TYPE (type
)), 1));
6151 warning (OPT_Wattributes
, "%qs attribute ignored",
6152 IDENTIFIER_POINTER (name
));
6153 *no_add_attrs
= true;
6159 /* Handle a "leaf" attribute; arguments as in
6160 struct attribute_spec.handler. */
6163 handle_leaf_attribute (tree
*node
, tree name
,
6164 tree
ARG_UNUSED (args
),
6165 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6167 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6169 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6170 *no_add_attrs
= true;
6172 if (!TREE_PUBLIC (*node
))
6174 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
6175 *no_add_attrs
= true;
6181 /* Handle a "malloc" attribute; arguments as in
6182 struct attribute_spec.handler. */
6185 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6186 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6188 if (TREE_CODE (*node
) == FUNCTION_DECL
6189 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
6190 DECL_IS_MALLOC (*node
) = 1;
6193 warning (OPT_Wattributes
, "%qs attribute ignored",
6194 IDENTIFIER_POINTER (name
));
6195 *no_add_attrs
= true;
6201 /* Fake handler for attributes we don't properly support. */
6204 fake_attribute_handler (tree
* ARG_UNUSED (node
),
6205 tree
ARG_UNUSED (name
),
6206 tree
ARG_UNUSED (args
),
6207 int ARG_UNUSED (flags
),
6208 bool * ARG_UNUSED (no_add_attrs
))
6213 /* Handle a "type_generic" attribute. */
6216 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6217 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6218 bool * ARG_UNUSED (no_add_attrs
))
6220 /* Ensure we have a function type. */
6221 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
6223 /* Ensure we have a variadic function. */
6224 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
6229 /* Handle a "vector_size" attribute; arguments as in
6230 struct attribute_spec.handler. */
6233 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
6234 int ARG_UNUSED (flags
),
6237 unsigned HOST_WIDE_INT vecsize
, nunits
;
6238 enum machine_mode orig_mode
;
6239 tree type
= *node
, new_type
, size
;
6241 *no_add_attrs
= true;
6243 size
= TREE_VALUE (args
);
6245 if (!host_integerp (size
, 1))
6247 warning (OPT_Wattributes
, "%qs attribute ignored",
6248 IDENTIFIER_POINTER (name
));
6252 /* Get the vector size (in bytes). */
6253 vecsize
= tree_low_cst (size
, 1);
6255 /* We need to provide for vector pointers, vector arrays, and
6256 functions returning vectors. For example:
6258 __attribute__((vector_size(16))) short *foo;
6260 In this case, the mode is SI, but the type being modified is
6261 HI, so we need to look further. */
6263 while (POINTER_TYPE_P (type
)
6264 || TREE_CODE (type
) == FUNCTION_TYPE
6265 || TREE_CODE (type
) == ARRAY_TYPE
)
6266 type
= TREE_TYPE (type
);
6268 /* Get the mode of the type being modified. */
6269 orig_mode
= TYPE_MODE (type
);
6271 if ((!INTEGRAL_TYPE_P (type
)
6272 && !SCALAR_FLOAT_TYPE_P (type
)
6273 && !FIXED_POINT_TYPE_P (type
))
6274 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
6275 && GET_MODE_CLASS (orig_mode
) != MODE_INT
6276 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
6277 || !host_integerp (TYPE_SIZE_UNIT (type
), 1)
6278 || TREE_CODE (type
) == BOOLEAN_TYPE
)
6280 error ("invalid vector type for attribute %qs",
6281 IDENTIFIER_POINTER (name
));
6285 if (vecsize
% tree_low_cst (TYPE_SIZE_UNIT (type
), 1))
6287 error ("vector size not an integral multiple of component size");
6293 error ("zero vector size");
6297 /* Calculate how many units fit in the vector. */
6298 nunits
= vecsize
/ tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
6299 if (nunits
& (nunits
- 1))
6301 error ("number of components of the vector not a power of two");
6305 new_type
= build_vector_type (type
, nunits
);
6307 /* Build back pointers if needed. */
6308 *node
= reconstruct_complex_type (*node
, new_type
);
6313 /* Handle a "vector_type" attribute; arguments as in
6314 struct attribute_spec.handler. */
6317 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6318 int ARG_UNUSED (flags
),
6321 /* Vector representative type and size. */
6322 tree rep_type
= *node
;
6323 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
6326 /* Vector size in bytes and number of units. */
6327 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
6329 /* Vector element type and mode. */
6331 enum machine_mode elem_mode
;
6333 *no_add_attrs
= true;
6335 /* Get the representative array type, possibly nested within a
6336 padding record e.g. for alignment purposes. */
6338 if (TYPE_IS_PADDING_P (rep_type
))
6339 rep_type
= TREE_TYPE (TYPE_FIELDS (rep_type
));
6341 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
6343 error ("attribute %qs applies to array types only",
6344 IDENTIFIER_POINTER (name
));
6348 /* Silently punt on variable sizes. We can't make vector types for them,
6349 need to ignore them on front-end generated subtypes of unconstrained
6350 bases, and this attribute is for binding implementors, not end-users, so
6351 we should never get there from legitimate explicit uses. */
6353 if (!host_integerp (rep_size
, 1))
6356 /* Get the element type/mode and check this is something we know
6357 how to make vectors of. */
6359 elem_type
= TREE_TYPE (rep_type
);
6360 elem_mode
= TYPE_MODE (elem_type
);
6362 if ((!INTEGRAL_TYPE_P (elem_type
)
6363 && !SCALAR_FLOAT_TYPE_P (elem_type
)
6364 && !FIXED_POINT_TYPE_P (elem_type
))
6365 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
6366 && GET_MODE_CLASS (elem_mode
) != MODE_INT
6367 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
6368 || !host_integerp (TYPE_SIZE_UNIT (elem_type
), 1))
6370 error ("invalid element type for attribute %qs",
6371 IDENTIFIER_POINTER (name
));
6375 /* Sanity check the vector size and element type consistency. */
6377 vec_bytes
= tree_low_cst (rep_size
, 1);
6379 if (vec_bytes
% tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1))
6381 error ("vector size not an integral multiple of component size");
6387 error ("zero vector size");
6391 vec_units
= vec_bytes
/ tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1);
6392 if (vec_units
& (vec_units
- 1))
6394 error ("number of components of the vector not a power of two");
6398 /* Build the vector type and replace. */
6400 *node
= build_vector_type (elem_type
, vec_units
);
6401 rep_name
= TYPE_NAME (rep_type
);
6402 if (TREE_CODE (rep_name
) == TYPE_DECL
)
6403 rep_name
= DECL_NAME (rep_name
);
6404 TYPE_NAME (*node
) = rep_name
;
6405 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
6410 /* ----------------------------------------------------------------------- *
6411 * BUILTIN FUNCTIONS *
6412 * ----------------------------------------------------------------------- */
6414 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6415 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6416 if nonansi_p and flag_no_nonansi_builtin. */
6419 def_builtin_1 (enum built_in_function fncode
,
6421 enum built_in_class fnclass
,
6422 tree fntype
, tree libtype
,
6423 bool both_p
, bool fallback_p
,
6424 bool nonansi_p ATTRIBUTE_UNUSED
,
6425 tree fnattrs
, bool implicit_p
)
6428 const char *libname
;
6430 /* Preserve an already installed decl. It most likely was setup in advance
6431 (e.g. as part of the internal builtins) for specific reasons. */
6432 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
6435 gcc_assert ((!both_p
&& !fallback_p
)
6436 || !strncmp (name
, "__builtin_",
6437 strlen ("__builtin_")));
6439 libname
= name
+ strlen ("__builtin_");
6440 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
6441 (fallback_p
? libname
: NULL
),
6444 /* ??? This is normally further controlled by command-line options
6445 like -fno-builtin, but we don't have them for Ada. */
6446 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
6449 set_builtin_decl (fncode
, decl
, implicit_p
);
6452 static int flag_isoc94
= 0;
6453 static int flag_isoc99
= 0;
6455 /* Install what the common builtins.def offers. */
6458 install_builtin_functions (void)
6460 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6461 NONANSI_P, ATTRS, IMPLICIT, COND) \
6463 def_builtin_1 (ENUM, NAME, CLASS, \
6464 builtin_types[(int) TYPE], \
6465 builtin_types[(int) LIBTYPE], \
6466 BOTH_P, FALLBACK_P, NONANSI_P, \
6467 built_in_attributes[(int) ATTRS], IMPLICIT);
6468 #include "builtins.def"
6472 /* ----------------------------------------------------------------------- *
6473 * BUILTIN FUNCTIONS *
6474 * ----------------------------------------------------------------------- */
6476 /* Install the builtin functions we might need. */
6479 gnat_install_builtins (void)
6481 install_builtin_elementary_types ();
6482 install_builtin_function_types ();
6483 install_builtin_attributes ();
6485 /* Install builtins used by generic middle-end pieces first. Some of these
6486 know about internal specificities and control attributes accordingly, for
6487 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6488 the generic definition from builtins.def. */
6489 build_common_builtin_nodes ();
6491 /* Now, install the target specific builtins, such as the AltiVec family on
6492 ppc, and the common set as exposed by builtins.def. */
6493 targetm
.init_builtins ();
6494 install_builtin_functions ();
6497 #include "gt-ada-utils.h"
6498 #include "gtype-ada.h"