1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
42 #include "diagnostic.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
61 /* If nonzero, pretend we are allocating at global level. */
64 /* The default alignment of "double" floating-point types, i.e. floating
65 point types whose size is equal to 64 bits, or 0 if this alignment is
66 not specifically capped. */
67 int double_float_alignment
;
69 /* The default alignment of "double" or larger scalar types, i.e. scalar
70 types whose size is greater or equal to 64 bits, or 0 if this alignment
71 is not specifically capped. */
72 int double_scalar_alignment
;
74 /* Tree nodes for the various types and decls we create. */
75 tree gnat_std_decls
[(int) ADT_LAST
];
77 /* Functions to call for each of the possible raise reasons. */
78 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
80 /* Likewise, but with extra info for each of the possible raise reasons. */
81 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
83 /* Forward declarations for handlers of attributes. */
84 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
85 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
86 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
87 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
88 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
97 /* Fake handler for attributes we don't properly support, typically because
98 they'd require dragging a lot of the common-c front-end circuitry. */
99 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
101 /* Table of machine-independent internal attributes for Ada. We support
102 this minimal set of attributes to accommodate the needs of builtins. */
103 const struct attribute_spec gnat_internal_attribute_table
[] =
105 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
106 affects_type_identity } */
107 { "const", 0, 0, true, false, false, handle_const_attribute
,
109 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
111 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
113 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
115 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
117 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
119 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
121 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
123 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
125 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
128 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
130 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
132 { "may_alias", 0, 0, false, true, false, NULL
, false },
134 /* ??? format and format_arg are heavy and not supported, which actually
135 prevents support for stdio builtins, which we however declare as part
136 of the common builtins.def contents. */
137 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
138 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
140 { NULL
, 0, 0, false, false, false, NULL
, false }
143 /* Associates a GNAT tree node to a GCC tree node. It is used in
144 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
145 of `save_gnu_tree' for more info. */
146 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
148 #define GET_GNU_TREE(GNAT_ENTITY) \
149 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
151 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
152 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
154 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
155 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
157 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
158 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
160 #define GET_DUMMY_NODE(GNAT_ENTITY) \
161 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
163 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
164 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
167 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169 /* This variable keeps a table for types for each precision so that we only
170 allocate each of them once. Signed and unsigned types are kept separate.
172 Note that these types are only used when fold-const requests something
173 special. Perhaps we should NOT share these types; we'll see how it
175 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
177 /* Likewise for float types, but record these by mode. */
178 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
180 /* For each binding contour we allocate a binding_level structure to indicate
181 the binding depth. */
183 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
184 /* The binding level containing this one (the enclosing binding level). */
185 struct gnat_binding_level
*chain
;
186 /* The BLOCK node for this level. */
188 /* If nonzero, the setjmp buffer that needs to be updated for any
189 variable-sized definition within this context. */
193 /* The binding level currently in effect. */
194 static GTY(()) struct gnat_binding_level
*current_binding_level
;
196 /* A chain of gnat_binding_level structures awaiting reuse. */
197 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
199 /* The context to be used for global declarations. */
200 static GTY(()) tree global_context
;
202 /* An array of global declarations. */
203 static GTY(()) VEC(tree
,gc
) *global_decls
;
205 /* An array of builtin function declarations. */
206 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
208 /* An array of global renaming pointers. */
209 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
211 /* A chain of unused BLOCK nodes. */
212 static GTY((deletable
)) tree free_block_chain
;
214 static int pad_type_hash_marked_p (const void *p
);
215 static hashval_t
pad_type_hash_hash (const void *p
);
216 static int pad_type_hash_eq (const void *p1
, const void *p2
);
218 /* A hash table of padded types. It is modelled on the generic type
219 hash table in tree.c, which must thus be used as a reference. */
220 struct GTY(()) pad_type_hash
{
225 static GTY ((if_marked ("pad_type_hash_marked_p"),
226 param_is (struct pad_type_hash
)))
227 htab_t pad_type_hash_table
;
229 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
230 static tree
compute_related_constant (tree
, tree
);
231 static tree
split_plus (tree
, tree
*);
232 static tree
float_type_for_precision (int, enum machine_mode
);
233 static tree
convert_to_fat_pointer (tree
, tree
);
234 static bool potential_alignment_gap (tree
, tree
, tree
);
235 static void process_attributes (tree
, struct attrib
*);
237 /* Initialize data structures of the utils.c module. */
240 init_gnat_utils (void)
242 /* Initialize the association of GNAT nodes to GCC trees. */
243 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
245 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
246 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
248 /* Initialize the hash table of padded types. */
249 pad_type_hash_table
= htab_create_ggc (512, pad_type_hash_hash
,
250 pad_type_hash_eq
, 0);
253 /* Destroy data structures of the utils.c module. */
256 destroy_gnat_utils (void)
258 /* Destroy the association of GNAT nodes to GCC trees. */
259 ggc_free (associate_gnat_to_gnu
);
260 associate_gnat_to_gnu
= NULL
;
262 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
263 ggc_free (dummy_node_table
);
264 dummy_node_table
= NULL
;
266 /* Destroy the hash table of padded types. */
267 htab_delete (pad_type_hash_table
);
268 pad_type_hash_table
= NULL
;
270 /* Invalidate the global renaming pointers. */
271 invalidate_global_renaming_pointers ();
274 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
275 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
276 If NO_CHECK is true, the latter check is suppressed.
278 If GNU_DECL is zero, reset a previous association. */
281 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
283 /* Check that GNAT_ENTITY is not already defined and that it is being set
284 to something which is a decl. If that is not the case, this usually
285 means GNAT_ENTITY is defined twice, but occasionally is due to some
287 gcc_assert (!(gnu_decl
288 && (PRESENT_GNU_TREE (gnat_entity
)
289 || (!no_check
&& !DECL_P (gnu_decl
)))));
291 SET_GNU_TREE (gnat_entity
, gnu_decl
);
294 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
295 that was associated with it. If there is no such tree node, abort.
297 In some cases, such as delayed elaboration or expressions that need to
298 be elaborated only once, GNAT_ENTITY is really not an entity. */
301 get_gnu_tree (Entity_Id gnat_entity
)
303 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
304 return GET_GNU_TREE (gnat_entity
);
307 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
310 present_gnu_tree (Entity_Id gnat_entity
)
312 return PRESENT_GNU_TREE (gnat_entity
);
315 /* Make a dummy type corresponding to GNAT_TYPE. */
318 make_dummy_type (Entity_Id gnat_type
)
320 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
323 /* If there is an equivalent type, get its underlying type. */
324 if (Present (gnat_underlying
))
325 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
327 /* If there was no equivalent type (can only happen when just annotating
328 types) or underlying type, go back to the original type. */
329 if (No (gnat_underlying
))
330 gnat_underlying
= gnat_type
;
332 /* If it there already a dummy type, use that one. Else make one. */
333 if (PRESENT_DUMMY_NODE (gnat_underlying
))
334 return GET_DUMMY_NODE (gnat_underlying
);
336 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
338 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
339 ? tree_code_for_record_type (gnat_underlying
)
341 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
342 TYPE_DUMMY_P (gnu_type
) = 1;
343 TYPE_STUB_DECL (gnu_type
)
344 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
345 if (Is_By_Reference_Type (gnat_underlying
))
346 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
348 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
353 /* Return the dummy type that was made for GNAT_TYPE, if any. */
356 get_dummy_type (Entity_Id gnat_type
)
358 return GET_DUMMY_NODE (gnat_type
);
361 /* Build dummy fat and thin pointer types whose designated type is specified
362 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
365 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
367 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
368 tree gnu_fat_type
, fields
, gnu_object_type
;
370 gnu_template_type
= make_node (RECORD_TYPE
);
371 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
372 TYPE_DUMMY_P (gnu_template_type
) = 1;
373 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
375 gnu_array_type
= make_node (ENUMERAL_TYPE
);
376 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
377 TYPE_DUMMY_P (gnu_array_type
) = 1;
378 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
380 gnu_fat_type
= make_node (RECORD_TYPE
);
381 /* Build a stub DECL to trigger the special processing for fat pointer types
383 TYPE_NAME (gnu_fat_type
)
384 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
386 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
387 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
389 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
390 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
391 finish_fat_pointer_type (gnu_fat_type
, fields
);
392 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
393 /* Suppress debug info until after the type is completed. */
394 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
396 gnu_object_type
= make_node (RECORD_TYPE
);
397 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
398 TYPE_DUMMY_P (gnu_object_type
) = 1;
400 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
401 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
404 /* Return true if we are in the global binding level. */
407 global_bindings_p (void)
409 return force_global
|| current_function_decl
== NULL_TREE
;
412 /* Enter a new binding level. */
415 gnat_pushlevel (void)
417 struct gnat_binding_level
*newlevel
= NULL
;
419 /* Reuse a struct for this binding level, if there is one. */
420 if (free_binding_level
)
422 newlevel
= free_binding_level
;
423 free_binding_level
= free_binding_level
->chain
;
426 newlevel
= ggc_alloc_gnat_binding_level ();
428 /* Use a free BLOCK, if any; otherwise, allocate one. */
429 if (free_block_chain
)
431 newlevel
->block
= free_block_chain
;
432 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
433 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
436 newlevel
->block
= make_node (BLOCK
);
438 /* Point the BLOCK we just made to its parent. */
439 if (current_binding_level
)
440 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
442 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
443 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
444 TREE_USED (newlevel
->block
) = 1;
446 /* Add this level to the front of the chain (stack) of active levels. */
447 newlevel
->chain
= current_binding_level
;
448 newlevel
->jmpbuf_decl
= NULL_TREE
;
449 current_binding_level
= newlevel
;
452 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
453 and point FNDECL to this BLOCK. */
456 set_current_block_context (tree fndecl
)
458 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
459 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
460 set_block_for_group (current_binding_level
->block
);
463 /* Set the jmpbuf_decl for the current binding level to DECL. */
466 set_block_jmpbuf_decl (tree decl
)
468 current_binding_level
->jmpbuf_decl
= decl
;
471 /* Get the jmpbuf_decl, if any, for the current binding level. */
474 get_block_jmpbuf_decl (void)
476 return current_binding_level
->jmpbuf_decl
;
479 /* Exit a binding level. Set any BLOCK into the current code group. */
484 struct gnat_binding_level
*level
= current_binding_level
;
485 tree block
= level
->block
;
487 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
488 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
490 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
491 are no variables free the block and merge its subblocks into those of its
492 parent block. Otherwise, add it to the list of its parent. */
493 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
495 else if (BLOCK_VARS (block
) == NULL_TREE
)
497 BLOCK_SUBBLOCKS (level
->chain
->block
)
498 = block_chainon (BLOCK_SUBBLOCKS (block
),
499 BLOCK_SUBBLOCKS (level
->chain
->block
));
500 BLOCK_CHAIN (block
) = free_block_chain
;
501 free_block_chain
= block
;
505 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
506 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
507 TREE_USED (block
) = 1;
508 set_block_for_group (block
);
511 /* Free this binding structure. */
512 current_binding_level
= level
->chain
;
513 level
->chain
= free_binding_level
;
514 free_binding_level
= level
;
517 /* Exit a binding level and discard the associated BLOCK. */
522 struct gnat_binding_level
*level
= current_binding_level
;
523 tree block
= level
->block
;
525 BLOCK_CHAIN (block
) = free_block_chain
;
526 free_block_chain
= block
;
528 /* Free this binding structure. */
529 current_binding_level
= level
->chain
;
530 level
->chain
= free_binding_level
;
531 free_binding_level
= level
;
534 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
535 for location information and flag propagation. */
538 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
540 /* If DECL is public external or at top level, it has global context. */
541 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
544 global_context
= build_translation_unit_decl (NULL_TREE
);
545 DECL_CONTEXT (decl
) = global_context
;
549 DECL_CONTEXT (decl
) = current_function_decl
;
551 /* Functions imported in another function are not really nested.
552 For really nested functions mark them initially as needing
553 a static chain for uses of that flag before unnesting;
554 lower_nested_functions will then recompute it. */
555 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
556 DECL_STATIC_CHAIN (decl
) = 1;
559 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
561 /* Set the location of DECL and emit a declaration for it. */
562 if (Present (gnat_node
))
563 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
565 add_decl_expr (decl
, gnat_node
);
567 /* Put the declaration on the list. The list of declarations is in reverse
568 order. The list will be reversed later. Put global declarations in the
569 globals list and local ones in the current block. But skip TYPE_DECLs
570 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
571 with the debugger and aren't needed anyway. */
572 if (!(TREE_CODE (decl
) == TYPE_DECL
573 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
575 if (DECL_EXTERNAL (decl
))
577 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
578 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
580 else if (global_bindings_p ())
581 VEC_safe_push (tree
, gc
, global_decls
, decl
);
584 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
585 BLOCK_VARS (current_binding_level
->block
) = decl
;
589 /* For the declaration of a type, set its name if it either is not already
590 set or if the previous type name was not derived from a source name.
591 We'd rather have the type named with a real name and all the pointer
592 types to the same object have the same POINTER_TYPE node. Code in the
593 equivalent function of c-decl.c makes a copy of the type node here, but
594 that may cause us trouble with incomplete types. We make an exception
595 for fat pointer types because the compiler automatically builds them
596 for unconstrained array types and the debugger uses them to represent
597 both these and pointers to these. */
598 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
600 tree t
= TREE_TYPE (decl
);
602 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
604 /* Array and pointer types aren't "tagged" types so we force the
605 type to be associated with its typedef in the DWARF back-end,
606 in order to make sure that the latter is always preserved. */
607 if (!DECL_ARTIFICIAL (decl
)
608 && (TREE_CODE (t
) == ARRAY_TYPE
609 || TREE_CODE (t
) == POINTER_TYPE
))
611 tree tt
= build_distinct_type_copy (t
);
612 if (TREE_CODE (t
) == POINTER_TYPE
)
613 TYPE_NEXT_PTR_TO (t
) = tt
;
614 TYPE_NAME (tt
) = DECL_NAME (decl
);
615 TYPE_CONTEXT (tt
) = DECL_CONTEXT (decl
);
616 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
617 DECL_ORIGINAL_TYPE (decl
) = tt
;
620 else if (TYPE_IS_FAT_POINTER_P (t
))
622 /* We need a variant for the placeholder machinery to work. */
623 tree tt
= build_variant_type_copy (t
);
624 TYPE_NAME (tt
) = decl
;
625 TYPE_CONTEXT (tt
) = DECL_CONTEXT (decl
);
626 TREE_USED (tt
) = TREE_USED (t
);
627 TREE_TYPE (decl
) = tt
;
628 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
629 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
631 DECL_ORIGINAL_TYPE (decl
) = t
;
632 DECL_ARTIFICIAL (decl
) = 0;
635 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
640 /* Propagate the name to all the anonymous variants. This is needed
641 for the type qualifiers machinery to work properly. */
643 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
644 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
646 TYPE_NAME (t
) = decl
;
647 TYPE_CONTEXT (t
) = DECL_CONTEXT (decl
);
652 /* Create a record type that contains a SIZE bytes long field of TYPE with a
653 starting bit position so that it is aligned to ALIGN bits, and leaving at
654 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
655 record is guaranteed to get. */
658 make_aligning_type (tree type
, unsigned int align
, tree size
,
659 unsigned int base_align
, int room
)
661 /* We will be crafting a record type with one field at a position set to be
662 the next multiple of ALIGN past record'address + room bytes. We use a
663 record placeholder to express record'address. */
664 tree record_type
= make_node (RECORD_TYPE
);
665 tree record
= build0 (PLACEHOLDER_EXPR
, record_type
);
668 = convert (sizetype
, build_unary_op (ADDR_EXPR
, NULL_TREE
, record
));
670 /* The diagram below summarizes the shape of what we manipulate:
672 <--------- pos ---------->
673 { +------------+-------------+-----------------+
674 record =>{ |############| ... | field (type) |
675 { +------------+-------------+-----------------+
676 |<-- room -->|<- voffset ->|<---- size ----->|
679 record_addr vblock_addr
681 Every length is in sizetype bytes there, except "pos" which has to be
682 set as a bit position in the GCC tree for the record. */
683 tree room_st
= size_int (room
);
684 tree vblock_addr_st
= size_binop (PLUS_EXPR
, record_addr_st
, room_st
);
685 tree voffset_st
, pos
, field
;
687 tree name
= TYPE_NAME (type
);
689 if (TREE_CODE (name
) == TYPE_DECL
)
690 name
= DECL_NAME (name
);
691 name
= concat_name (name
, "ALIGN");
692 TYPE_NAME (record_type
) = name
;
694 /* Compute VOFFSET and then POS. The next byte position multiple of some
695 alignment after some address is obtained by "and"ing the alignment minus
696 1 with the two's complement of the address. */
697 voffset_st
= size_binop (BIT_AND_EXPR
,
698 fold_build1 (NEGATE_EXPR
, sizetype
, vblock_addr_st
),
699 size_int ((align
/ BITS_PER_UNIT
) - 1));
701 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
702 pos
= size_binop (MULT_EXPR
,
703 convert (bitsizetype
,
704 size_binop (PLUS_EXPR
, room_st
, voffset_st
)),
707 /* Craft the GCC record representation. We exceptionally do everything
708 manually here because 1) our generic circuitry is not quite ready to
709 handle the complex position/size expressions we are setting up, 2) we
710 have a strong simplifying factor at hand: we know the maximum possible
711 value of voffset, and 3) we have to set/reset at least the sizes in
712 accordance with this maximum value anyway, as we need them to convey
713 what should be "alloc"ated for this type.
715 Use -1 as the 'addressable' indication for the field to prevent the
716 creation of a bitfield. We don't need one, it would have damaging
717 consequences on the alignment computation, and create_field_decl would
718 make one without this special argument, for instance because of the
719 complex position expression. */
720 field
= create_field_decl (get_identifier ("F"), type
, record_type
, size
,
722 TYPE_FIELDS (record_type
) = field
;
724 TYPE_ALIGN (record_type
) = base_align
;
725 TYPE_USER_ALIGN (record_type
) = 1;
727 TYPE_SIZE (record_type
)
728 = size_binop (PLUS_EXPR
,
729 size_binop (MULT_EXPR
, convert (bitsizetype
, size
),
731 bitsize_int (align
+ room
* BITS_PER_UNIT
));
732 TYPE_SIZE_UNIT (record_type
)
733 = size_binop (PLUS_EXPR
, size
,
734 size_int (room
+ align
/ BITS_PER_UNIT
));
736 SET_TYPE_MODE (record_type
, BLKmode
);
737 relate_alias_sets (record_type
, type
, ALIAS_SET_COPY
);
739 /* Declare it now since it will never be declared otherwise. This is
740 necessary to ensure that its subtrees are properly marked. */
741 create_type_decl (name
, record_type
, NULL
, true, false, Empty
);
746 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
747 as the field type of a packed record if IN_RECORD is true, or as the
748 component type of a packed array if IN_RECORD is false. See if we can
749 rewrite it either as a type that has a non-BLKmode, which we can pack
750 tighter in the packed record case, or as a smaller type. If so, return
751 the new type. If not, return the original type. */
754 make_packable_type (tree type
, bool in_record
)
756 unsigned HOST_WIDE_INT size
= tree_low_cst (TYPE_SIZE (type
), 1);
757 unsigned HOST_WIDE_INT new_size
;
758 tree new_type
, old_field
, field_list
= NULL_TREE
;
761 /* No point in doing anything if the size is zero. */
765 new_type
= make_node (TREE_CODE (type
));
767 /* Copy the name and flags from the old type to that of the new.
768 Note that we rely on the pointer equality created here for
769 TYPE_NAME to look through conversions in various places. */
770 TYPE_NAME (new_type
) = TYPE_NAME (type
);
771 TYPE_JUSTIFIED_MODULAR_P (new_type
) = TYPE_JUSTIFIED_MODULAR_P (type
);
772 TYPE_CONTAINS_TEMPLATE_P (new_type
) = TYPE_CONTAINS_TEMPLATE_P (type
);
773 if (TREE_CODE (type
) == RECORD_TYPE
)
774 TYPE_PADDING_P (new_type
) = TYPE_PADDING_P (type
);
776 /* If we are in a record and have a small size, set the alignment to
777 try for an integral mode. Otherwise set it to try for a smaller
778 type with BLKmode. */
779 if (in_record
&& size
<= MAX_FIXED_MODE_SIZE
)
781 align
= ceil_pow2 (size
);
782 TYPE_ALIGN (new_type
) = align
;
783 new_size
= (size
+ align
- 1) & -align
;
787 unsigned HOST_WIDE_INT align
;
789 /* Do not try to shrink the size if the RM size is not constant. */
790 if (TYPE_CONTAINS_TEMPLATE_P (type
)
791 || !host_integerp (TYPE_ADA_SIZE (type
), 1))
794 /* Round the RM size up to a unit boundary to get the minimal size
795 for a BLKmode record. Give up if it's already the size. */
796 new_size
= TREE_INT_CST_LOW (TYPE_ADA_SIZE (type
));
797 new_size
= (new_size
+ BITS_PER_UNIT
- 1) & -BITS_PER_UNIT
;
798 if (new_size
== size
)
801 align
= new_size
& -new_size
;
802 TYPE_ALIGN (new_type
) = MIN (TYPE_ALIGN (type
), align
);
805 TYPE_USER_ALIGN (new_type
) = 1;
807 /* Now copy the fields, keeping the position and size as we don't want
808 to change the layout by propagating the packedness downwards. */
809 for (old_field
= TYPE_FIELDS (type
); old_field
;
810 old_field
= DECL_CHAIN (old_field
))
812 tree new_field_type
= TREE_TYPE (old_field
);
813 tree new_field
, new_size
;
815 if (RECORD_OR_UNION_TYPE_P (new_field_type
)
816 && !TYPE_FAT_POINTER_P (new_field_type
)
817 && host_integerp (TYPE_SIZE (new_field_type
), 1))
818 new_field_type
= make_packable_type (new_field_type
, true);
820 /* However, for the last field in a not already packed record type
821 that is of an aggregate type, we need to use the RM size in the
822 packable version of the record type, see finish_record_type. */
823 if (!DECL_CHAIN (old_field
)
824 && !TYPE_PACKED (type
)
825 && RECORD_OR_UNION_TYPE_P (new_field_type
)
826 && !TYPE_FAT_POINTER_P (new_field_type
)
827 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type
)
828 && TYPE_ADA_SIZE (new_field_type
))
829 new_size
= TYPE_ADA_SIZE (new_field_type
);
831 new_size
= DECL_SIZE (old_field
);
834 = create_field_decl (DECL_NAME (old_field
), new_field_type
, new_type
,
835 new_size
, bit_position (old_field
),
837 !DECL_NONADDRESSABLE_P (old_field
));
839 DECL_INTERNAL_P (new_field
) = DECL_INTERNAL_P (old_field
);
840 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field
, old_field
);
841 if (TREE_CODE (new_type
) == QUAL_UNION_TYPE
)
842 DECL_QUALIFIER (new_field
) = DECL_QUALIFIER (old_field
);
844 DECL_CHAIN (new_field
) = field_list
;
845 field_list
= new_field
;
848 finish_record_type (new_type
, nreverse (field_list
), 2, false);
849 relate_alias_sets (new_type
, type
, ALIAS_SET_COPY
);
850 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type
),
851 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type
)));
853 /* If this is a padding record, we never want to make the size smaller
854 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
855 if (TYPE_IS_PADDING_P (type
) || TREE_CODE (type
) == QUAL_UNION_TYPE
)
857 TYPE_SIZE (new_type
) = TYPE_SIZE (type
);
858 TYPE_SIZE_UNIT (new_type
) = TYPE_SIZE_UNIT (type
);
863 TYPE_SIZE (new_type
) = bitsize_int (new_size
);
864 TYPE_SIZE_UNIT (new_type
)
865 = size_int ((new_size
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
868 if (!TYPE_CONTAINS_TEMPLATE_P (type
))
869 SET_TYPE_ADA_SIZE (new_type
, TYPE_ADA_SIZE (type
));
871 compute_record_mode (new_type
);
873 /* Try harder to get a packable type if necessary, for example
874 in case the record itself contains a BLKmode field. */
875 if (in_record
&& TYPE_MODE (new_type
) == BLKmode
)
876 SET_TYPE_MODE (new_type
,
877 mode_for_size_tree (TYPE_SIZE (new_type
), MODE_INT
, 1));
879 /* If neither the mode nor the size has shrunk, return the old type. */
880 if (TYPE_MODE (new_type
) == BLKmode
&& new_size
>= size
)
886 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
887 If TYPE is the best type, return it. Otherwise, make a new type. We
888 only support new integral and pointer types. FOR_BIASED is true if
889 we are making a biased type. */
892 make_type_from_size (tree type
, tree size_tree
, bool for_biased
)
894 unsigned HOST_WIDE_INT size
;
898 /* If size indicates an error, just return TYPE to avoid propagating
899 the error. Likewise if it's too large to represent. */
900 if (!size_tree
|| !host_integerp (size_tree
, 1))
903 size
= tree_low_cst (size_tree
, 1);
905 switch (TREE_CODE (type
))
910 biased_p
= (TREE_CODE (type
) == INTEGER_TYPE
911 && TYPE_BIASED_REPRESENTATION_P (type
));
913 /* Integer types with precision 0 are forbidden. */
917 /* Only do something if the type isn't a packed array type and doesn't
918 already have the proper size and the size isn't too large. */
919 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type
)
920 || (TYPE_PRECISION (type
) == size
&& biased_p
== for_biased
)
921 || size
> LONG_LONG_TYPE_SIZE
)
924 biased_p
|= for_biased
;
925 if (TYPE_UNSIGNED (type
) || biased_p
)
926 new_type
= make_unsigned_type (size
);
928 new_type
= make_signed_type (size
);
929 TREE_TYPE (new_type
) = TREE_TYPE (type
) ? TREE_TYPE (type
) : type
;
930 SET_TYPE_RM_MIN_VALUE (new_type
,
931 convert (TREE_TYPE (new_type
),
932 TYPE_MIN_VALUE (type
)));
933 SET_TYPE_RM_MAX_VALUE (new_type
,
934 convert (TREE_TYPE (new_type
),
935 TYPE_MAX_VALUE (type
)));
936 /* Copy the name to show that it's essentially the same type and
937 not a subrange type. */
938 TYPE_NAME (new_type
) = TYPE_NAME (type
);
939 TYPE_BIASED_REPRESENTATION_P (new_type
) = biased_p
;
940 SET_TYPE_RM_SIZE (new_type
, bitsize_int (size
));
944 /* Do something if this is a fat pointer, in which case we
945 may need to return the thin pointer. */
946 if (TYPE_FAT_POINTER_P (type
) && size
< POINTER_SIZE
* 2)
948 enum machine_mode p_mode
= mode_for_size (size
, MODE_INT
, 0);
949 if (!targetm
.valid_pointer_mode (p_mode
))
952 build_pointer_type_for_mode
953 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type
)),
959 /* Only do something if this is a thin pointer, in which case we
960 may need to return the fat pointer. */
961 if (TYPE_IS_THIN_POINTER_P (type
) && size
>= POINTER_SIZE
* 2)
963 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)));
973 /* See if the data pointed to by the hash table slot is marked. */
976 pad_type_hash_marked_p (const void *p
)
978 const_tree
const type
= ((const struct pad_type_hash
*) p
)->type
;
980 return ggc_marked_p (type
);
983 /* Return the cached hash value. */
986 pad_type_hash_hash (const void *p
)
988 return ((const struct pad_type_hash
*) p
)->hash
;
991 /* Return 1 iff the padded types are equivalent. */
994 pad_type_hash_eq (const void *p1
, const void *p2
)
996 const struct pad_type_hash
*const t1
= (const struct pad_type_hash
*) p1
;
997 const struct pad_type_hash
*const t2
= (const struct pad_type_hash
*) p2
;
1000 if (t1
->hash
!= t2
->hash
)
1006 /* We consider that the padded types are equivalent if they pad the same
1007 type and have the same size, alignment and RM size. Taking the mode
1008 into account is redundant since it is determined by the others. */
1010 TREE_TYPE (TYPE_FIELDS (type1
)) == TREE_TYPE (TYPE_FIELDS (type2
))
1011 && TYPE_SIZE (type1
) == TYPE_SIZE (type2
)
1012 && TYPE_ALIGN (type1
) == TYPE_ALIGN (type2
)
1013 && TYPE_ADA_SIZE (type1
) == TYPE_ADA_SIZE (type2
);
1016 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1017 if needed. We have already verified that SIZE and TYPE are large enough.
1018 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1019 IS_COMPONENT_TYPE is true if this is being done for the component type of
1020 an array. IS_USER_TYPE is true if the original type needs to be completed.
1021 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1022 the RM size of the resulting type is to be set to SIZE too. */
1025 maybe_pad_type (tree type
, tree size
, unsigned int align
,
1026 Entity_Id gnat_entity
, bool is_component_type
,
1027 bool is_user_type
, bool definition
, bool set_rm_size
)
1029 tree orig_size
= TYPE_SIZE (type
);
1032 /* If TYPE is a padded type, see if it agrees with any size and alignment
1033 we were given. If so, return the original type. Otherwise, strip
1034 off the padding, since we will either be returning the inner type
1035 or repadding it. If no size or alignment is specified, use that of
1036 the original padded type. */
1037 if (TYPE_IS_PADDING_P (type
))
1040 || operand_equal_p (round_up (size
,
1041 MAX (align
, TYPE_ALIGN (type
))),
1042 round_up (TYPE_SIZE (type
),
1043 MAX (align
, TYPE_ALIGN (type
))),
1045 && (align
== 0 || align
== TYPE_ALIGN (type
)))
1049 size
= TYPE_SIZE (type
);
1051 align
= TYPE_ALIGN (type
);
1053 type
= TREE_TYPE (TYPE_FIELDS (type
));
1054 orig_size
= TYPE_SIZE (type
);
1057 /* If the size is either not being changed or is being made smaller (which
1058 is not done here and is only valid for bitfields anyway), show the size
1059 isn't changing. Likewise, clear the alignment if it isn't being
1060 changed. Then return if we aren't doing anything. */
1062 && (operand_equal_p (size
, orig_size
, 0)
1063 || (TREE_CODE (orig_size
) == INTEGER_CST
1064 && tree_int_cst_lt (size
, orig_size
))))
1067 if (align
== TYPE_ALIGN (type
))
1070 if (align
== 0 && !size
)
1073 /* If requested, complete the original type and give it a name. */
1075 create_type_decl (get_entity_name (gnat_entity
), type
,
1076 NULL
, !Comes_From_Source (gnat_entity
),
1078 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1079 && DECL_IGNORED_P (TYPE_NAME (type
))),
1082 /* We used to modify the record in place in some cases, but that could
1083 generate incorrect debugging information. So make a new record
1085 record
= make_node (RECORD_TYPE
);
1086 TYPE_PADDING_P (record
) = 1;
1088 if (Present (gnat_entity
))
1089 TYPE_NAME (record
) = create_concat_name (gnat_entity
, "PAD");
1091 TYPE_ALIGN (record
) = align
;
1092 TYPE_SIZE (record
) = size
? size
: orig_size
;
1093 TYPE_SIZE_UNIT (record
)
1094 = convert (sizetype
,
1095 size_binop (CEIL_DIV_EXPR
, TYPE_SIZE (record
),
1096 bitsize_unit_node
));
1098 /* If we are changing the alignment and the input type is a record with
1099 BLKmode and a small constant size, try to make a form that has an
1100 integral mode. This might allow the padding record to also have an
1101 integral mode, which will be much more efficient. There is no point
1102 in doing so if a size is specified unless it is also a small constant
1103 size and it is incorrect to do so if we cannot guarantee that the mode
1104 will be naturally aligned since the field must always be addressable.
1106 ??? This might not always be a win when done for a stand-alone object:
1107 since the nominal and the effective type of the object will now have
1108 different modes, a VIEW_CONVERT_EXPR will be required for converting
1109 between them and it might be hard to overcome afterwards, including
1110 at the RTL level when the stand-alone object is accessed as a whole. */
1112 && RECORD_OR_UNION_TYPE_P (type
)
1113 && TYPE_MODE (type
) == BLKmode
1114 && !TYPE_BY_REFERENCE_P (type
)
1115 && TREE_CODE (orig_size
) == INTEGER_CST
1116 && !TREE_OVERFLOW (orig_size
)
1117 && compare_tree_int (orig_size
, MAX_FIXED_MODE_SIZE
) <= 0
1119 || (TREE_CODE (size
) == INTEGER_CST
1120 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) <= 0)))
1122 tree packable_type
= make_packable_type (type
, true);
1123 if (TYPE_MODE (packable_type
) != BLKmode
1124 && align
>= TYPE_ALIGN (packable_type
))
1125 type
= packable_type
;
1128 /* Now create the field with the original size. */
1129 field
= create_field_decl (get_identifier ("F"), type
, record
, orig_size
,
1130 bitsize_zero_node
, 0, 1);
1131 DECL_INTERNAL_P (field
) = 1;
1133 /* Do not emit debug info until after the auxiliary record is built. */
1134 finish_record_type (record
, field
, 1, false);
1136 /* Set the RM size if requested. */
1139 SET_TYPE_ADA_SIZE (record
, size
? size
: orig_size
);
1141 /* If the padded type is complete and has constant size, we canonicalize
1142 it by means of the hash table. This is consistent with the language
1143 semantics and ensures that gigi and the middle-end have a common view
1144 of these padded types. */
1145 if (TREE_CONSTANT (TYPE_SIZE (record
)))
1148 struct pad_type_hash in
, *h
;
1151 hashcode
= iterative_hash_object (TYPE_HASH (type
), 0);
1152 hashcode
= iterative_hash_expr (TYPE_SIZE (record
), hashcode
);
1153 hashcode
= iterative_hash_hashval_t (TYPE_ALIGN (record
), hashcode
);
1154 hashcode
= iterative_hash_expr (TYPE_ADA_SIZE (record
), hashcode
);
1158 h
= (struct pad_type_hash
*)
1159 htab_find_with_hash (pad_type_hash_table
, &in
, hashcode
);
1166 h
= ggc_alloc_pad_type_hash ();
1169 loc
= htab_find_slot_with_hash (pad_type_hash_table
, h
, hashcode
,
1175 /* Unless debugging information isn't being written for the input type,
1176 write a record that shows what we are a subtype of and also make a
1177 variable that indicates our size, if still variable. */
1178 if (TREE_CODE (orig_size
) != INTEGER_CST
1179 && TYPE_NAME (record
)
1181 && !(TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1182 && DECL_IGNORED_P (TYPE_NAME (type
))))
1184 tree marker
= make_node (RECORD_TYPE
);
1185 tree name
= TYPE_NAME (record
);
1186 tree orig_name
= TYPE_NAME (type
);
1188 if (TREE_CODE (name
) == TYPE_DECL
)
1189 name
= DECL_NAME (name
);
1191 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1192 orig_name
= DECL_NAME (orig_name
);
1194 TYPE_NAME (marker
) = concat_name (name
, "XVS");
1195 finish_record_type (marker
,
1196 create_field_decl (orig_name
,
1197 build_reference_type (type
),
1198 marker
, NULL_TREE
, NULL_TREE
,
1202 add_parallel_type (record
, marker
);
1204 if (definition
&& size
&& TREE_CODE (size
) != INTEGER_CST
)
1205 TYPE_SIZE_UNIT (marker
)
1206 = create_var_decl (concat_name (name
, "XVZ"), NULL_TREE
, sizetype
,
1207 TYPE_SIZE_UNIT (record
), false, false, false,
1208 false, NULL
, gnat_entity
);
1211 rest_of_record_type_compilation (record
);
1214 /* If the size was widened explicitly, maybe give a warning. Take the
1215 original size as the maximum size of the input if there was an
1216 unconstrained record involved and round it up to the specified alignment,
1217 if one was specified. But don't do it if we are just annotating types
1218 and the type is tagged, since tagged types aren't fully laid out in this
1221 || TREE_CODE (size
) == COND_EXPR
1222 || TREE_CODE (size
) == MAX_EXPR
1224 || (type_annotate_only
&& Is_Tagged_Type (Etype (gnat_entity
))))
1227 if (CONTAINS_PLACEHOLDER_P (orig_size
))
1228 orig_size
= max_size (orig_size
, true);
1231 orig_size
= round_up (orig_size
, align
);
1233 if (!operand_equal_p (size
, orig_size
, 0)
1234 && !(TREE_CODE (size
) == INTEGER_CST
1235 && TREE_CODE (orig_size
) == INTEGER_CST
1236 && (TREE_OVERFLOW (size
)
1237 || TREE_OVERFLOW (orig_size
)
1238 || tree_int_cst_lt (size
, orig_size
))))
1240 Node_Id gnat_error_node
= Empty
;
1242 if (Is_Packed_Array_Type (gnat_entity
))
1243 gnat_entity
= Original_Array_Type (gnat_entity
);
1245 if ((Ekind (gnat_entity
) == E_Component
1246 || Ekind (gnat_entity
) == E_Discriminant
)
1247 && Present (Component_Clause (gnat_entity
)))
1248 gnat_error_node
= Last_Bit (Component_Clause (gnat_entity
));
1249 else if (Present (Size_Clause (gnat_entity
)))
1250 gnat_error_node
= Expression (Size_Clause (gnat_entity
));
1252 /* Generate message only for entities that come from source, since
1253 if we have an entity created by expansion, the message will be
1254 generated for some other corresponding source entity. */
1255 if (Comes_From_Source (gnat_entity
))
1257 if (Present (gnat_error_node
))
1258 post_error_ne_tree ("{^ }bits of & unused?",
1259 gnat_error_node
, gnat_entity
,
1260 size_diffop (size
, orig_size
));
1261 else if (is_component_type
)
1262 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1263 gnat_entity
, gnat_entity
,
1264 size_diffop (size
, orig_size
));
1271 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1272 If this is a multi-dimensional array type, do this recursively.
1275 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1276 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1277 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1280 relate_alias_sets (tree gnu_new_type
, tree gnu_old_type
, enum alias_set_op op
)
1282 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1283 of a one-dimensional array, since the padding has the same alias set
1284 as the field type, but if it's a multi-dimensional array, we need to
1285 see the inner types. */
1286 while (TREE_CODE (gnu_old_type
) == RECORD_TYPE
1287 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type
)
1288 || TYPE_PADDING_P (gnu_old_type
)))
1289 gnu_old_type
= TREE_TYPE (TYPE_FIELDS (gnu_old_type
));
1291 /* Unconstrained array types are deemed incomplete and would thus be given
1292 alias set 0. Retrieve the underlying array type. */
1293 if (TREE_CODE (gnu_old_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1295 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type
))));
1296 if (TREE_CODE (gnu_new_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1298 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type
))));
1300 if (TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1301 && TREE_CODE (TREE_TYPE (gnu_new_type
)) == ARRAY_TYPE
1302 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type
)))
1303 relate_alias_sets (TREE_TYPE (gnu_new_type
), TREE_TYPE (gnu_old_type
), op
);
1307 case ALIAS_SET_COPY
:
1308 /* The alias set shouldn't be copied between array types with different
1309 aliasing settings because this can break the aliasing relationship
1310 between the array type and its element type. */
1311 #ifndef ENABLE_CHECKING
1312 if (flag_strict_aliasing
)
1314 gcc_assert (!(TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1315 && TREE_CODE (gnu_old_type
) == ARRAY_TYPE
1316 && TYPE_NONALIASED_COMPONENT (gnu_new_type
)
1317 != TYPE_NONALIASED_COMPONENT (gnu_old_type
)));
1319 TYPE_ALIAS_SET (gnu_new_type
) = get_alias_set (gnu_old_type
);
1322 case ALIAS_SET_SUBSET
:
1323 case ALIAS_SET_SUPERSET
:
1325 alias_set_type old_set
= get_alias_set (gnu_old_type
);
1326 alias_set_type new_set
= get_alias_set (gnu_new_type
);
1328 /* Do nothing if the alias sets conflict. This ensures that we
1329 never call record_alias_subset several times for the same pair
1330 or at all for alias set 0. */
1331 if (!alias_sets_conflict_p (old_set
, new_set
))
1333 if (op
== ALIAS_SET_SUBSET
)
1334 record_alias_subset (old_set
, new_set
);
1336 record_alias_subset (new_set
, old_set
);
1345 record_component_aliases (gnu_new_type
);
1348 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1349 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1352 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
1354 tree type_decl
= build_decl (input_location
,
1355 TYPE_DECL
, get_identifier (name
), type
);
1356 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1357 TYPE_ARTIFICIAL (type
) = artificial_p
;
1358 gnat_pushdecl (type_decl
, Empty
);
1360 if (debug_hooks
->type_decl
)
1361 debug_hooks
->type_decl (type_decl
, false);
1364 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1365 finish constructing the record type as a fat pointer type. */
1368 finish_fat_pointer_type (tree record_type
, tree field_list
)
1370 /* Make sure we can put it into a register. */
1371 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
1373 /* Show what it really is. */
1374 TYPE_FAT_POINTER_P (record_type
) = 1;
1376 /* Do not emit debug info for it since the types of its fields may still be
1377 incomplete at this point. */
1378 finish_record_type (record_type
, field_list
, 0, false);
1380 /* Force type_contains_placeholder_p to return true on it. Although the
1381 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1382 type but the representation of the unconstrained array. */
1383 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
1386 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1387 finish constructing the record or union type. If REP_LEVEL is zero, this
1388 record has no representation clause and so will be entirely laid out here.
1389 If REP_LEVEL is one, this record has a representation clause and has been
1390 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1391 this record is derived from a parent record and thus inherits its layout;
1392 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1393 we need to write debug information about this type. */
1396 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
1399 enum tree_code code
= TREE_CODE (record_type
);
1400 tree name
= TYPE_NAME (record_type
);
1401 tree ada_size
= bitsize_zero_node
;
1402 tree size
= bitsize_zero_node
;
1403 bool had_size
= TYPE_SIZE (record_type
) != 0;
1404 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
1405 bool had_align
= TYPE_ALIGN (record_type
) != 0;
1408 TYPE_FIELDS (record_type
) = field_list
;
1410 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1411 generate debug info and have a parallel type. */
1412 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
1413 name
= DECL_NAME (name
);
1414 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
1416 /* Globally initialize the record first. If this is a rep'ed record,
1417 that just means some initializations; otherwise, layout the record. */
1420 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
1423 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
1426 TYPE_SIZE (record_type
) = bitsize_zero_node
;
1428 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1429 out just like a UNION_TYPE, since the size will be fixed. */
1430 else if (code
== QUAL_UNION_TYPE
)
1435 /* Ensure there isn't a size already set. There can be in an error
1436 case where there is a rep clause but all fields have errors and
1437 no longer have a position. */
1438 TYPE_SIZE (record_type
) = 0;
1440 /* Ensure we use the traditional GCC layout for bitfields when we need
1441 to pack the record type or have a representation clause. The other
1442 possible layout (Microsoft C compiler), if available, would prevent
1443 efficient packing in almost all cases. */
1444 #ifdef TARGET_MS_BITFIELD_LAYOUT
1445 if (TARGET_MS_BITFIELD_LAYOUT
&& TYPE_PACKED (record_type
))
1446 decl_attributes (&record_type
,
1447 tree_cons (get_identifier ("gcc_struct"),
1448 NULL_TREE
, NULL_TREE
),
1449 ATTR_FLAG_TYPE_IN_PLACE
);
1452 layout_type (record_type
);
1455 /* At this point, the position and size of each field is known. It was
1456 either set before entry by a rep clause, or by laying out the type above.
1458 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1459 to compute the Ada size; the GCC size and alignment (for rep'ed records
1460 that are not padding types); and the mode (for rep'ed records). We also
1461 clear the DECL_BIT_FIELD indication for the cases we know have not been
1462 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1464 if (code
== QUAL_UNION_TYPE
)
1465 field_list
= nreverse (field_list
);
1467 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
1469 tree type
= TREE_TYPE (field
);
1470 tree pos
= bit_position (field
);
1471 tree this_size
= DECL_SIZE (field
);
1474 if (RECORD_OR_UNION_TYPE_P (type
)
1475 && !TYPE_FAT_POINTER_P (type
)
1476 && !TYPE_CONTAINS_TEMPLATE_P (type
)
1477 && TYPE_ADA_SIZE (type
))
1478 this_ada_size
= TYPE_ADA_SIZE (type
);
1480 this_ada_size
= this_size
;
1482 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1483 if (DECL_BIT_FIELD (field
)
1484 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
1486 unsigned int align
= TYPE_ALIGN (type
);
1488 /* In the general case, type alignment is required. */
1489 if (value_factor_p (pos
, align
))
1491 /* The enclosing record type must be sufficiently aligned.
1492 Otherwise, if no alignment was specified for it and it
1493 has been laid out already, bump its alignment to the
1494 desired one if this is compatible with its size. */
1495 if (TYPE_ALIGN (record_type
) >= align
)
1497 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1498 DECL_BIT_FIELD (field
) = 0;
1502 && value_factor_p (TYPE_SIZE (record_type
), align
))
1504 TYPE_ALIGN (record_type
) = align
;
1505 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1506 DECL_BIT_FIELD (field
) = 0;
1510 /* In the non-strict alignment case, only byte alignment is. */
1511 if (!STRICT_ALIGNMENT
1512 && DECL_BIT_FIELD (field
)
1513 && value_factor_p (pos
, BITS_PER_UNIT
))
1514 DECL_BIT_FIELD (field
) = 0;
1517 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1518 field is technically not addressable. Except that it can actually
1519 be addressed if it is BLKmode and happens to be properly aligned. */
1520 if (DECL_BIT_FIELD (field
)
1521 && !(DECL_MODE (field
) == BLKmode
1522 && value_factor_p (pos
, BITS_PER_UNIT
)))
1523 DECL_NONADDRESSABLE_P (field
) = 1;
1525 /* A type must be as aligned as its most aligned field that is not
1526 a bit-field. But this is already enforced by layout_type. */
1527 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
1528 TYPE_ALIGN (record_type
)
1529 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
1534 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
1535 size
= size_binop (MAX_EXPR
, size
, this_size
);
1538 case QUAL_UNION_TYPE
:
1540 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1541 this_ada_size
, ada_size
);
1542 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1547 /* Since we know here that all fields are sorted in order of
1548 increasing bit position, the size of the record is one
1549 higher than the ending bit of the last field processed
1550 unless we have a rep clause, since in that case we might
1551 have a field outside a QUAL_UNION_TYPE that has a higher ending
1552 position. So use a MAX in that case. Also, if this field is a
1553 QUAL_UNION_TYPE, we need to take into account the previous size in
1554 the case of empty variants. */
1556 = merge_sizes (ada_size
, pos
, this_ada_size
,
1557 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1559 = merge_sizes (size
, pos
, this_size
,
1560 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1568 if (code
== QUAL_UNION_TYPE
)
1569 nreverse (field_list
);
1573 /* If this is a padding record, we never want to make the size smaller
1574 than what was specified in it, if any. */
1575 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
1576 size
= TYPE_SIZE (record_type
);
1578 /* Now set any of the values we've just computed that apply. */
1579 if (!TYPE_FAT_POINTER_P (record_type
)
1580 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
1581 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
1585 tree size_unit
= had_size_unit
1586 ? TYPE_SIZE_UNIT (record_type
)
1587 : convert (sizetype
,
1588 size_binop (CEIL_DIV_EXPR
, size
,
1589 bitsize_unit_node
));
1590 unsigned int align
= TYPE_ALIGN (record_type
);
1592 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
1593 TYPE_SIZE_UNIT (record_type
)
1594 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
1596 compute_record_mode (record_type
);
1601 rest_of_record_type_compilation (record_type
);
1604 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1607 add_parallel_type (tree type
, tree parallel_type
)
1609 tree decl
= TYPE_STUB_DECL (type
);
1611 while (DECL_PARALLEL_TYPE (decl
))
1612 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
1614 SET_DECL_PARALLEL_TYPE (decl
, parallel_type
);
1617 /* Return true if TYPE has a parallel type. */
1620 has_parallel_type (tree type
)
1622 tree decl
= TYPE_STUB_DECL (type
);
1624 return DECL_PARALLEL_TYPE (decl
) != NULL_TREE
;
1627 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1628 associated with it. It need not be invoked directly in most cases since
1629 finish_record_type takes care of doing so, but this can be necessary if
1630 a parallel type is to be attached to the record type. */
1633 rest_of_record_type_compilation (tree record_type
)
1635 bool var_size
= false;
1638 /* If this is a padded type, the bulk of the debug info has already been
1639 generated for the field's type. */
1640 if (TYPE_IS_PADDING_P (record_type
))
1643 /* If the type already has a parallel type (XVS type), then we're done. */
1644 if (has_parallel_type (record_type
))
1647 for (field
= TYPE_FIELDS (record_type
); field
; field
= DECL_CHAIN (field
))
1649 /* We need to make an XVE/XVU record if any field has variable size,
1650 whether or not the record does. For example, if we have a union,
1651 it may be that all fields, rounded up to the alignment, have the
1652 same size, in which case we'll use that size. But the debug
1653 output routines (except Dwarf2) won't be able to output the fields,
1654 so we need to make the special record. */
1655 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
1656 /* If a field has a non-constant qualifier, the record will have
1657 variable size too. */
1658 || (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1659 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
1666 /* If this record type is of variable size, make a parallel record type that
1667 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1670 tree new_record_type
1671 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1672 ? UNION_TYPE
: TREE_CODE (record_type
));
1673 tree orig_name
= TYPE_NAME (record_type
), new_name
;
1674 tree last_pos
= bitsize_zero_node
;
1675 tree old_field
, prev_old_field
= NULL_TREE
;
1677 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1678 orig_name
= DECL_NAME (orig_name
);
1681 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
1683 TYPE_NAME (new_record_type
) = new_name
;
1684 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
1685 TYPE_STUB_DECL (new_record_type
)
1686 = create_type_stub_decl (new_name
, new_record_type
);
1687 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
1688 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
1689 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
1690 TYPE_SIZE_UNIT (new_record_type
)
1691 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
1693 /* Now scan all the fields, replacing each field with a new
1694 field corresponding to the new encoding. */
1695 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
1696 old_field
= DECL_CHAIN (old_field
))
1698 tree field_type
= TREE_TYPE (old_field
);
1699 tree field_name
= DECL_NAME (old_field
);
1701 tree curpos
= bit_position (old_field
);
1703 unsigned int align
= 0;
1706 /* See how the position was modified from the last position.
1708 There are two basic cases we support: a value was added
1709 to the last position or the last position was rounded to
1710 a boundary and they something was added. Check for the
1711 first case first. If not, see if there is any evidence
1712 of rounding. If so, round the last position and try
1715 If this is a union, the position can be taken as zero. */
1717 /* Some computations depend on the shape of the position expression,
1718 so strip conversions to make sure it's exposed. */
1719 curpos
= remove_conversions (curpos
, true);
1721 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
1722 pos
= bitsize_zero_node
, align
= 0;
1724 pos
= compute_related_constant (curpos
, last_pos
);
1726 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
1727 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
1729 tree offset
= TREE_OPERAND (curpos
, 0);
1730 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
1732 /* An offset which is a bitwise AND with a negative power of 2
1733 means an alignment corresponding to this power of 2. Note
1734 that, as sizetype is sign-extended but nonetheless unsigned,
1735 we don't directly use tree_int_cst_sgn. */
1736 offset
= remove_conversions (offset
, true);
1737 if (TREE_CODE (offset
) == BIT_AND_EXPR
1738 && host_integerp (TREE_OPERAND (offset
, 1), 0)
1739 && TREE_INT_CST_HIGH (TREE_OPERAND (offset
, 1)) < 0)
1742 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
1743 if (exact_log2 (pow
) > 0)
1747 pos
= compute_related_constant (curpos
,
1748 round_up (last_pos
, align
));
1750 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
1751 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
1752 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
1753 && host_integerp (TREE_OPERAND
1754 (TREE_OPERAND (curpos
, 0), 1),
1759 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
1760 pos
= compute_related_constant (curpos
,
1761 round_up (last_pos
, align
));
1763 else if (potential_alignment_gap (prev_old_field
, old_field
,
1766 align
= TYPE_ALIGN (field_type
);
1767 pos
= compute_related_constant (curpos
,
1768 round_up (last_pos
, align
));
1771 /* If we can't compute a position, set it to zero.
1773 ??? We really should abort here, but it's too much work
1774 to get this correct for all cases. */
1777 pos
= bitsize_zero_node
;
1779 /* See if this type is variable-sized and make a pointer type
1780 and indicate the indirection if so. Beware that the debug
1781 back-end may adjust the position computed above according
1782 to the alignment of the field type, i.e. the pointer type
1783 in this case, if we don't preventively counter that. */
1784 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1786 field_type
= build_pointer_type (field_type
);
1787 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1789 field_type
= copy_node (field_type
);
1790 TYPE_ALIGN (field_type
) = align
;
1795 /* Make a new field name, if necessary. */
1796 if (var
|| align
!= 0)
1801 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1802 align
/ BITS_PER_UNIT
);
1804 strcpy (suffix
, "XVL");
1806 field_name
= concat_name (field_name
, suffix
);
1810 = create_field_decl (field_name
, field_type
, new_record_type
,
1811 DECL_SIZE (old_field
), pos
, 0, 0);
1812 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1813 TYPE_FIELDS (new_record_type
) = new_field
;
1815 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1816 zero. The only time it's not the last field of the record
1817 is when there are other components at fixed positions after
1818 it (meaning there was a rep clause for every field) and we
1819 want to be able to encode them. */
1820 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1821 (TREE_CODE (TREE_TYPE (old_field
))
1824 : DECL_SIZE (old_field
));
1825 prev_old_field
= old_field
;
1828 TYPE_FIELDS (new_record_type
) = nreverse (TYPE_FIELDS (new_record_type
));
1830 add_parallel_type (record_type
, new_record_type
);
1834 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1835 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1836 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1837 replace a value of zero with the old size. If HAS_REP is true, we take the
1838 MAX of the end position of this field with LAST_SIZE. In all other cases,
1839 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1842 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1845 tree type
= TREE_TYPE (last_size
);
1848 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1850 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1852 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1856 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1857 integer_zerop (TREE_OPERAND (size
, 1))
1858 ? last_size
: merge_sizes (last_size
, first_bit
,
1859 TREE_OPERAND (size
, 1),
1861 integer_zerop (TREE_OPERAND (size
, 2))
1862 ? last_size
: merge_sizes (last_size
, first_bit
,
1863 TREE_OPERAND (size
, 2),
1866 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1867 when fed through substitute_in_expr) into thinking that a constant
1868 size is not constant. */
1869 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1870 new_size
= TREE_OPERAND (new_size
, 0);
1875 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1876 related by the addition of a constant. Return that constant if so. */
1879 compute_related_constant (tree op0
, tree op1
)
1881 tree op0_var
, op1_var
;
1882 tree op0_con
= split_plus (op0
, &op0_var
);
1883 tree op1_con
= split_plus (op1
, &op1_var
);
1884 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1886 if (operand_equal_p (op0_var
, op1_var
, 0))
1888 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1894 /* Utility function of above to split a tree OP which may be a sum, into a
1895 constant part, which is returned, and a variable part, which is stored
1896 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1900 split_plus (tree in
, tree
*pvar
)
1902 /* Strip conversions in order to ease the tree traversal and maximize the
1903 potential for constant or plus/minus discovery. We need to be careful
1904 to always return and set *pvar to bitsizetype trees, but it's worth
1906 in
= remove_conversions (in
, false);
1908 *pvar
= convert (bitsizetype
, in
);
1910 if (TREE_CODE (in
) == INTEGER_CST
)
1912 *pvar
= bitsize_zero_node
;
1913 return convert (bitsizetype
, in
);
1915 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1917 tree lhs_var
, rhs_var
;
1918 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1919 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1921 if (lhs_var
== TREE_OPERAND (in
, 0)
1922 && rhs_var
== TREE_OPERAND (in
, 1))
1923 return bitsize_zero_node
;
1925 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1926 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1929 return bitsize_zero_node
;
1932 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1933 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1934 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1935 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1936 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1937 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1938 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1939 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1940 invisible reference. */
1943 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1944 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1945 bool return_by_invisi_ref_p
)
1947 /* A list of the data type nodes of the subprogram formal parameters.
1948 This list is generated by traversing the input list of PARM_DECL
1950 VEC(tree
,gc
) *param_type_list
= NULL
;
1953 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1954 VEC_safe_push (tree
, gc
, param_type_list
, TREE_TYPE (t
));
1956 type
= build_function_type_vec (return_type
, param_type_list
);
1958 /* TYPE may have been shared since GCC hashes types. If it has a different
1959 CICO_LIST, make a copy. Likewise for the various flags. */
1960 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1961 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1963 type
= copy_type (type
);
1964 TYPE_CI_CO_LIST (type
) = cico_list
;
1965 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1966 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1967 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1973 /* Return a copy of TYPE but safe to modify in any way. */
1976 copy_type (tree type
)
1978 tree new_type
= copy_node (type
);
1980 /* Unshare the language-specific data. */
1981 if (TYPE_LANG_SPECIFIC (type
))
1983 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1984 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1987 /* And the contents of the language-specific slot if needed. */
1988 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1989 && TYPE_RM_VALUES (type
))
1991 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1992 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1993 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
1994 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
1997 /* copy_node clears this field instead of copying it, because it is
1998 aliased with TREE_CHAIN. */
1999 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
2001 TYPE_POINTER_TO (new_type
) = 0;
2002 TYPE_REFERENCE_TO (new_type
) = 0;
2003 TYPE_MAIN_VARIANT (new_type
) = new_type
;
2004 TYPE_NEXT_VARIANT (new_type
) = 0;
2009 /* Return a subtype of sizetype with range MIN to MAX and whose
2010 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2011 of the associated TYPE_DECL. */
2014 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
2016 /* First build a type for the desired range. */
2017 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
2019 /* Then set the index type. */
2020 SET_TYPE_INDEX_TYPE (type
, index
);
2021 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
2026 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2027 sizetype is used. */
2030 create_range_type (tree type
, tree min
, tree max
)
2034 if (type
== NULL_TREE
)
2037 /* First build a type with the base range. */
2038 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
2039 TYPE_MAX_VALUE (type
));
2041 /* Then set the actual range. */
2042 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
2043 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
2048 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2049 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2053 create_type_stub_decl (tree type_name
, tree type
)
2055 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2056 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2057 emitted in DWARF. */
2058 tree type_decl
= build_decl (input_location
,
2059 TYPE_DECL
, type_name
, type
);
2060 DECL_ARTIFICIAL (type_decl
) = 1;
2061 TYPE_ARTIFICIAL (type
) = 1;
2065 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2066 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2067 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2068 true if we need to write debug information about this type. GNAT_NODE
2069 is used for the position of the decl. */
2072 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
2073 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
2075 enum tree_code code
= TREE_CODE (type
);
2076 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
2079 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2080 gcc_assert (!TYPE_IS_DUMMY_P (type
));
2082 /* If the type hasn't been named yet, we're naming it; preserve an existing
2083 TYPE_STUB_DECL that has been attached to it for some purpose. */
2084 if (!named
&& TYPE_STUB_DECL (type
))
2086 type_decl
= TYPE_STUB_DECL (type
);
2087 DECL_NAME (type_decl
) = type_name
;
2090 type_decl
= build_decl (input_location
,
2091 TYPE_DECL
, type_name
, type
);
2093 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
2094 TYPE_ARTIFICIAL (type
) = artificial_p
;
2096 /* Add this decl to the current binding level. */
2097 gnat_pushdecl (type_decl
, gnat_node
);
2099 process_attributes (type_decl
, attr_list
);
2101 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2102 This causes the name to be also viewed as a "tag" by the debug
2103 back-end, with the advantage that no DW_TAG_typedef is emitted
2104 for artificial "tagged" types in DWARF. */
2106 TYPE_STUB_DECL (type
) = type_decl
;
2108 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2109 back-end doesn't support, and for others if we don't need to. */
2110 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
2111 DECL_IGNORED_P (type_decl
) = 1;
2116 /* Return a VAR_DECL or CONST_DECL node.
2118 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2119 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2120 the GCC tree for an optional initial expression; NULL_TREE if none.
2122 CONST_FLAG is true if this variable is constant, in which case we might
2123 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2125 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2126 definition to be made visible outside of the current compilation unit, for
2127 instance variable definitions in a package specification.
2129 EXTERN_FLAG is true when processing an external variable declaration (as
2130 opposed to a definition: no storage is to be allocated for the variable).
2132 STATIC_FLAG is only relevant when not at top level. In that case
2133 it indicates whether to always allocate storage to the variable.
2135 GNAT_NODE is used for the position of the decl. */
2138 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
2139 bool const_flag
, bool public_flag
, bool extern_flag
,
2140 bool static_flag
, bool const_decl_allowed_p
,
2141 struct attrib
*attr_list
, Node_Id gnat_node
)
2143 /* Whether the initializer is a constant initializer. At the global level
2144 or for an external object or an object to be allocated in static memory,
2145 we check that it is a valid constant expression for use in initializing
2146 a static variable; otherwise, we only check that it is constant. */
2149 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
2150 && (global_bindings_p () || extern_flag
|| static_flag
2151 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
2152 : TREE_CONSTANT (var_init
)));
2154 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2155 case the initializer may be used in-lieu of the DECL node (as done in
2156 Identifier_to_gnu). This is useful to prevent the need of elaboration
2157 code when an identifier for which such a decl is made is in turn used as
2158 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2159 but extra constraints apply to this choice (see below) and are not
2160 relevant to the distinction we wish to make. */
2161 bool constant_p
= const_flag
&& init_const
;
2163 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2164 and may be used for scalars in general but not for aggregates. */
2166 = build_decl (input_location
,
2167 (constant_p
&& const_decl_allowed_p
2168 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
2171 /* If this is external, throw away any initializations (they will be done
2172 elsewhere) unless this is a constant for which we would like to remain
2173 able to get the initializer. If we are defining a global here, leave a
2174 constant initialization and save any variable elaborations for the
2175 elaboration routine. If we are just annotating types, throw away the
2176 initialization if it isn't a constant. */
2177 if ((extern_flag
&& !constant_p
)
2178 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
2179 var_init
= NULL_TREE
;
2181 /* At the global level, an initializer requiring code to be generated
2182 produces elaboration statements. Check that such statements are allowed,
2183 that is, not violating a No_Elaboration_Code restriction. */
2184 if (global_bindings_p () && var_init
!= 0 && !init_const
)
2185 Check_Elaboration_Code_Allowed (gnat_node
);
2187 DECL_INITIAL (var_decl
) = var_init
;
2188 TREE_READONLY (var_decl
) = const_flag
;
2189 DECL_EXTERNAL (var_decl
) = extern_flag
;
2190 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
2191 TREE_CONSTANT (var_decl
) = constant_p
;
2192 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
2193 = TYPE_VOLATILE (type
);
2195 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2196 try to fiddle with DECL_COMMON. However, on platforms that don't
2197 support global BSS sections, uninitialized global variables would
2198 go in DATA instead, thus increasing the size of the executable. */
2200 && TREE_CODE (var_decl
) == VAR_DECL
2201 && TREE_PUBLIC (var_decl
)
2202 && !have_global_bss_p ())
2203 DECL_COMMON (var_decl
) = 1;
2205 /* At the global binding level, we need to allocate static storage for the
2206 variable if it isn't external. Otherwise, we allocate automatic storage
2207 unless requested not to. */
2208 TREE_STATIC (var_decl
)
2209 = !extern_flag
&& (static_flag
|| global_bindings_p ());
2211 /* For an external constant whose initializer is not absolute, do not emit
2212 debug info. In DWARF this would mean a global relocation in a read-only
2213 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2217 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
2218 != null_pointer_node
)
2219 DECL_IGNORED_P (var_decl
) = 1;
2221 /* Add this decl to the current binding level. */
2222 gnat_pushdecl (var_decl
, gnat_node
);
2224 if (TREE_SIDE_EFFECTS (var_decl
))
2225 TREE_ADDRESSABLE (var_decl
) = 1;
2227 if (TREE_CODE (var_decl
) == VAR_DECL
)
2230 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
2231 process_attributes (var_decl
, attr_list
);
2232 if (global_bindings_p ())
2233 rest_of_decl_compilation (var_decl
, true, 0);
2239 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2242 aggregate_type_contains_array_p (tree type
)
2244 switch (TREE_CODE (type
))
2248 case QUAL_UNION_TYPE
:
2251 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
2252 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
2253 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
2266 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2267 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2268 nonzero, it is the specified size of the field. If POS is nonzero, it is
2269 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2270 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2271 means we are allowed to take the address of the field; if it is negative,
2272 we should not make a bitfield, which is used by make_aligning_type. */
2275 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
2276 tree size
, tree pos
, int packed
, int addressable
)
2278 tree field_decl
= build_decl (input_location
,
2279 FIELD_DECL
, field_name
, field_type
);
2281 DECL_CONTEXT (field_decl
) = record_type
;
2282 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
2284 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2285 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2286 Likewise for an aggregate without specified position that contains an
2287 array, because in this case slices of variable length of this array
2288 must be handled by GCC and variable-sized objects need to be aligned
2289 to at least a byte boundary. */
2290 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
2292 && AGGREGATE_TYPE_P (field_type
)
2293 && aggregate_type_contains_array_p (field_type
))))
2294 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
2296 /* If a size is specified, use it. Otherwise, if the record type is packed
2297 compute a size to use, which may differ from the object's natural size.
2298 We always set a size in this case to trigger the checks for bitfield
2299 creation below, which is typically required when no position has been
2302 size
= convert (bitsizetype
, size
);
2303 else if (packed
== 1)
2305 size
= rm_size (field_type
);
2306 if (TYPE_MODE (field_type
) == BLKmode
)
2307 size
= round_up (size
, BITS_PER_UNIT
);
2310 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2311 specified for two reasons: first if the size differs from the natural
2312 size. Second, if the alignment is insufficient. There are a number of
2313 ways the latter can be true.
2315 We never make a bitfield if the type of the field has a nonconstant size,
2316 because no such entity requiring bitfield operations should reach here.
2318 We do *preventively* make a bitfield when there might be the need for it
2319 but we don't have all the necessary information to decide, as is the case
2320 of a field with no specified position in a packed record.
2322 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2323 in layout_decl or finish_record_type to clear the bit_field indication if
2324 it is in fact not needed. */
2325 if (addressable
>= 0
2327 && TREE_CODE (size
) == INTEGER_CST
2328 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
2329 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
2330 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
2332 || (TYPE_ALIGN (record_type
) != 0
2333 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
2335 DECL_BIT_FIELD (field_decl
) = 1;
2336 DECL_SIZE (field_decl
) = size
;
2337 if (!packed
&& !pos
)
2339 if (TYPE_ALIGN (record_type
) != 0
2340 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
2341 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
2343 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2347 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
2349 /* Bump the alignment if need be, either for bitfield/packing purposes or
2350 to satisfy the type requirements if no such consideration applies. When
2351 we get the alignment from the type, indicate if this is from an explicit
2352 user request, which prevents stor-layout from lowering it later on. */
2354 unsigned int bit_align
2355 = (DECL_BIT_FIELD (field_decl
) ? 1
2356 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
2358 if (bit_align
> DECL_ALIGN (field_decl
))
2359 DECL_ALIGN (field_decl
) = bit_align
;
2360 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
2362 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2363 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
2369 /* We need to pass in the alignment the DECL is known to have.
2370 This is the lowest-order bit set in POS, but no more than
2371 the alignment of the record, if one is specified. Note
2372 that an alignment of 0 is taken as infinite. */
2373 unsigned int known_align
;
2375 if (host_integerp (pos
, 1))
2376 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
2378 known_align
= BITS_PER_UNIT
;
2380 if (TYPE_ALIGN (record_type
)
2381 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
2382 known_align
= TYPE_ALIGN (record_type
);
2384 layout_decl (field_decl
, known_align
);
2385 SET_DECL_OFFSET_ALIGN (field_decl
,
2386 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
2388 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
2389 &DECL_FIELD_BIT_OFFSET (field_decl
),
2390 DECL_OFFSET_ALIGN (field_decl
), pos
);
2393 /* In addition to what our caller says, claim the field is addressable if we
2394 know that its type is not suitable.
2396 The field may also be "technically" nonaddressable, meaning that even if
2397 we attempt to take the field's address we will actually get the address
2398 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2399 value we have at this point is not accurate enough, so we don't account
2400 for this here and let finish_record_type decide. */
2401 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
2404 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
2409 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2410 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2411 (either an In parameter or an address of a pass-by-ref parameter). */
2414 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
2416 tree param_decl
= build_decl (input_location
,
2417 PARM_DECL
, param_name
, param_type
);
2419 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2420 can lead to various ABI violations. */
2421 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
2422 && INTEGRAL_TYPE_P (param_type
)
2423 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
2425 /* We have to be careful about biased types here. Make a subtype
2426 of integer_type_node with the proper biasing. */
2427 if (TREE_CODE (param_type
) == INTEGER_TYPE
2428 && TYPE_BIASED_REPRESENTATION_P (param_type
))
2431 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
2432 TREE_TYPE (subtype
) = integer_type_node
;
2433 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
2434 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
2435 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
2436 param_type
= subtype
;
2439 param_type
= integer_type_node
;
2442 DECL_ARG_TYPE (param_decl
) = param_type
;
2443 TREE_READONLY (param_decl
) = readonly
;
2447 /* Given a DECL and ATTR_LIST, process the listed attributes. */
2450 process_attributes (tree decl
, struct attrib
*attr_list
)
2452 for (; attr_list
; attr_list
= attr_list
->next
)
2453 switch (attr_list
->type
)
2455 case ATTR_MACHINE_ATTRIBUTE
:
2456 input_location
= DECL_SOURCE_LOCATION (decl
);
2457 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
2459 ATTR_FLAG_TYPE_IN_PLACE
);
2462 case ATTR_LINK_ALIAS
:
2463 if (! DECL_EXTERNAL (decl
))
2465 TREE_STATIC (decl
) = 1;
2466 assemble_alias (decl
, attr_list
->name
);
2470 case ATTR_WEAK_EXTERNAL
:
2472 declare_weak (decl
);
2474 post_error ("?weak declarations not supported on this target",
2475 attr_list
->error_point
);
2478 case ATTR_LINK_SECTION
:
2479 if (targetm_common
.have_named_sections
)
2481 DECL_SECTION_NAME (decl
)
2482 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
2483 IDENTIFIER_POINTER (attr_list
->name
));
2484 DECL_COMMON (decl
) = 0;
2487 post_error ("?section attributes are not supported for this target",
2488 attr_list
->error_point
);
2491 case ATTR_LINK_CONSTRUCTOR
:
2492 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
2493 TREE_USED (decl
) = 1;
2496 case ATTR_LINK_DESTRUCTOR
:
2497 DECL_STATIC_DESTRUCTOR (decl
) = 1;
2498 TREE_USED (decl
) = 1;
2501 case ATTR_THREAD_LOCAL_STORAGE
:
2502 DECL_TLS_MODEL (decl
) = decl_default_tls_model (decl
);
2503 DECL_COMMON (decl
) = 0;
2508 /* Record DECL as a global renaming pointer. */
2511 record_global_renaming_pointer (tree decl
)
2513 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
2514 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
2517 /* Invalidate the global renaming pointers. */
2520 invalidate_global_renaming_pointers (void)
2525 FOR_EACH_VEC_ELT (tree
, global_renaming_pointers
, i
, iter
)
2526 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
2528 VEC_free (tree
, gc
, global_renaming_pointers
);
2531 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2535 value_factor_p (tree value
, HOST_WIDE_INT factor
)
2537 if (host_integerp (value
, 1))
2538 return tree_low_cst (value
, 1) % factor
== 0;
2540 if (TREE_CODE (value
) == MULT_EXPR
)
2541 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
2542 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
2547 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2548 unless we can prove these 2 fields are laid out in such a way that no gap
2549 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2550 is the distance in bits between the end of PREV_FIELD and the starting
2551 position of CURR_FIELD. It is ignored if null. */
2554 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
2556 /* If this is the first field of the record, there cannot be any gap */
2560 /* If the previous field is a union type, then return False: The only
2561 time when such a field is not the last field of the record is when
2562 there are other components at fixed positions after it (meaning there
2563 was a rep clause for every field), in which case we don't want the
2564 alignment constraint to override them. */
2565 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
2568 /* If the distance between the end of prev_field and the beginning of
2569 curr_field is constant, then there is a gap if the value of this
2570 constant is not null. */
2571 if (offset
&& host_integerp (offset
, 1))
2572 return !integer_zerop (offset
);
2574 /* If the size and position of the previous field are constant,
2575 then check the sum of this size and position. There will be a gap
2576 iff it is not multiple of the current field alignment. */
2577 if (host_integerp (DECL_SIZE (prev_field
), 1)
2578 && host_integerp (bit_position (prev_field
), 1))
2579 return ((tree_low_cst (bit_position (prev_field
), 1)
2580 + tree_low_cst (DECL_SIZE (prev_field
), 1))
2581 % DECL_ALIGN (curr_field
) != 0);
2583 /* If both the position and size of the previous field are multiples
2584 of the current field alignment, there cannot be any gap. */
2585 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
2586 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
2589 /* Fallback, return that there may be a potential gap */
2593 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2597 create_label_decl (tree label_name
, Node_Id gnat_node
)
2600 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
2602 DECL_MODE (label_decl
) = VOIDmode
;
2604 /* Add this decl to the current binding level. */
2605 gnat_pushdecl (label_decl
, gnat_node
);
2610 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2611 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2612 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2613 PARM_DECL nodes chained through the DECL_CHAIN field).
2615 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2616 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2617 used for the position of the decl. */
2620 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
2621 tree param_decl_list
, bool inline_flag
, bool public_flag
,
2622 bool extern_flag
, bool artificial_flag
,
2623 struct attrib
*attr_list
, Node_Id gnat_node
)
2625 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
2627 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
2628 TREE_TYPE (subprog_type
));
2629 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
2631 /* If this is a non-inline function nested inside an inlined external
2632 function, we cannot honor both requests without cloning the nested
2633 function in the current unit since it is private to the other unit.
2634 We could inline the nested function as well but it's probably better
2635 to err on the side of too little inlining. */
2638 && current_function_decl
2639 && DECL_DECLARED_INLINE_P (current_function_decl
)
2640 && DECL_EXTERNAL (current_function_decl
))
2641 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
2643 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
2644 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
2645 DECL_DECLARED_INLINE_P (subprog_decl
) = inline_flag
;
2646 DECL_NO_INLINE_WARNING_P (subprog_decl
) = inline_flag
&& artificial_flag
;
2648 TREE_PUBLIC (subprog_decl
) = public_flag
;
2649 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
2650 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2651 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2653 DECL_ARTIFICIAL (result_decl
) = 1;
2654 DECL_IGNORED_P (result_decl
) = 1;
2655 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
2656 DECL_RESULT (subprog_decl
) = result_decl
;
2660 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
2662 /* The expand_main_function circuitry expects "main_identifier_node" to
2663 designate the DECL_NAME of the 'main' entry point, in turn expected
2664 to be declared as the "main" function literally by default. Ada
2665 program entry points are typically declared with a different name
2666 within the binder generated file, exported as 'main' to satisfy the
2667 system expectations. Force main_identifier_node in this case. */
2668 if (asm_name
== main_identifier_node
)
2669 DECL_NAME (subprog_decl
) = main_identifier_node
;
2672 /* Add this decl to the current binding level. */
2673 gnat_pushdecl (subprog_decl
, gnat_node
);
2675 process_attributes (subprog_decl
, attr_list
);
2677 /* Output the assembler code and/or RTL for the declaration. */
2678 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
2680 return subprog_decl
;
2683 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2684 body. This routine needs to be invoked before processing the declarations
2685 appearing in the subprogram. */
2688 begin_subprog_body (tree subprog_decl
)
2692 announce_function (subprog_decl
);
2694 /* This function is being defined. */
2695 TREE_STATIC (subprog_decl
) = 1;
2697 current_function_decl
= subprog_decl
;
2699 /* Enter a new binding level and show that all the parameters belong to
2703 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
2704 param_decl
= DECL_CHAIN (param_decl
))
2705 DECL_CONTEXT (param_decl
) = subprog_decl
;
2707 make_decl_rtl (subprog_decl
);
2710 /* Finish translating the current subprogram and set its BODY. */
2713 end_subprog_body (tree body
)
2715 tree fndecl
= current_function_decl
;
2717 /* Attach the BLOCK for this level to the function and pop the level. */
2718 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2719 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2722 /* Mark the RESULT_DECL as being in this subprogram. */
2723 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2725 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2726 if (TREE_CODE (body
) == BIND_EXPR
)
2728 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
2729 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
2732 DECL_SAVED_TREE (fndecl
) = body
;
2734 current_function_decl
= decl_function_context (fndecl
);
2737 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2740 rest_of_subprog_body_compilation (tree subprog_decl
)
2742 /* We cannot track the location of errors past this point. */
2743 error_gnat_node
= Empty
;
2745 /* If we're only annotating types, don't actually compile this function. */
2746 if (type_annotate_only
)
2749 /* Dump functions before gimplification. */
2750 dump_function (TDI_original
, subprog_decl
);
2752 if (!decl_function_context (subprog_decl
))
2753 cgraph_finalize_function (subprog_decl
, false);
2755 /* Register this function with cgraph just far enough to get it
2756 added to our parent's nested function list. */
2757 (void) cgraph_get_create_node (subprog_decl
);
2761 gnat_builtin_function (tree decl
)
2763 gnat_pushdecl (decl
, Empty
);
2767 /* Return an integer type with the number of bits of precision given by
2768 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2769 it is a signed type. */
2772 gnat_type_for_size (unsigned precision
, int unsignedp
)
2777 if (precision
<= 2 * MAX_BITS_PER_WORD
2778 && signed_and_unsigned_types
[precision
][unsignedp
])
2779 return signed_and_unsigned_types
[precision
][unsignedp
];
2782 t
= make_unsigned_type (precision
);
2784 t
= make_signed_type (precision
);
2786 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2787 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2791 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2792 TYPE_NAME (t
) = get_identifier (type_name
);
2798 /* Likewise for floating-point types. */
2801 float_type_for_precision (int precision
, enum machine_mode mode
)
2806 if (float_types
[(int) mode
])
2807 return float_types
[(int) mode
];
2809 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2810 TYPE_PRECISION (t
) = precision
;
2813 gcc_assert (TYPE_MODE (t
) == mode
);
2816 sprintf (type_name
, "FLOAT_%d", precision
);
2817 TYPE_NAME (t
) = get_identifier (type_name
);
2823 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2824 an unsigned type; otherwise a signed type is returned. */
2827 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2829 if (mode
== BLKmode
)
2832 if (mode
== VOIDmode
)
2833 return void_type_node
;
2835 if (COMPLEX_MODE_P (mode
))
2838 if (SCALAR_FLOAT_MODE_P (mode
))
2839 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2841 if (SCALAR_INT_MODE_P (mode
))
2842 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2844 if (VECTOR_MODE_P (mode
))
2846 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2847 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2849 return build_vector_type_for_mode (inner_type
, mode
);
2855 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2858 gnat_unsigned_type (tree type_node
)
2860 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2862 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2864 type
= copy_node (type
);
2865 TREE_TYPE (type
) = type_node
;
2867 else if (TREE_TYPE (type_node
)
2868 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2869 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2871 type
= copy_node (type
);
2872 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2878 /* Return the signed version of a TYPE_NODE, a scalar type. */
2881 gnat_signed_type (tree type_node
)
2883 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2885 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2887 type
= copy_node (type
);
2888 TREE_TYPE (type
) = type_node
;
2890 else if (TREE_TYPE (type_node
)
2891 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2892 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2894 type
= copy_node (type
);
2895 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2901 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2902 transparently converted to each other. */
2905 gnat_types_compatible_p (tree t1
, tree t2
)
2907 enum tree_code code
;
2909 /* This is the default criterion. */
2910 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2913 /* We only check structural equivalence here. */
2914 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2917 /* Vector types are also compatible if they have the same number of subparts
2918 and the same form of (scalar) element type. */
2919 if (code
== VECTOR_TYPE
2920 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2921 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2922 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2925 /* Array types are also compatible if they are constrained and have the same
2926 domain(s) and the same component type. */
2927 if (code
== ARRAY_TYPE
2928 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2929 || (TYPE_DOMAIN (t1
)
2931 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2932 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2933 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2934 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2935 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2936 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2937 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2943 /* Return true if EXPR is a useless type conversion. */
2946 gnat_useless_type_conversion (tree expr
)
2948 if (CONVERT_EXPR_P (expr
)
2949 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2950 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
2951 return gnat_types_compatible_p (TREE_TYPE (expr
),
2952 TREE_TYPE (TREE_OPERAND (expr
, 0)));
2957 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2960 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
2961 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
2963 return TYPE_CI_CO_LIST (t
) == cico_list
2964 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
2965 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
2966 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
2969 /* EXP is an expression for the size of an object. If this size contains
2970 discriminant references, replace them with the maximum (if MAX_P) or
2971 minimum (if !MAX_P) possible value of the discriminant. */
2974 max_size (tree exp
, bool max_p
)
2976 enum tree_code code
= TREE_CODE (exp
);
2977 tree type
= TREE_TYPE (exp
);
2979 switch (TREE_CODE_CLASS (code
))
2981 case tcc_declaration
:
2986 if (code
== CALL_EXPR
)
2991 t
= maybe_inline_call_in_expr (exp
);
2993 return max_size (t
, max_p
);
2995 n
= call_expr_nargs (exp
);
2997 argarray
= XALLOCAVEC (tree
, n
);
2998 for (i
= 0; i
< n
; i
++)
2999 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
3000 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
3005 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3006 modify. Otherwise, we treat it like a variable. */
3007 if (!CONTAINS_PLACEHOLDER_P (exp
))
3010 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
3012 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
3014 case tcc_comparison
:
3015 return max_p
? size_one_node
: size_zero_node
;
3019 case tcc_expression
:
3020 switch (TREE_CODE_LENGTH (code
))
3023 if (code
== SAVE_EXPR
)
3025 else if (code
== NON_LVALUE_EXPR
)
3026 return max_size (TREE_OPERAND (exp
, 0), max_p
);
3029 fold_build1 (code
, type
,
3030 max_size (TREE_OPERAND (exp
, 0),
3031 code
== NEGATE_EXPR
? !max_p
: max_p
));
3034 if (code
== COMPOUND_EXPR
)
3035 return max_size (TREE_OPERAND (exp
, 1), max_p
);
3038 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
3039 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
3040 code
== MINUS_EXPR
? !max_p
: max_p
);
3042 /* Special-case wanting the maximum value of a MIN_EXPR.
3043 In that case, if one side overflows, return the other.
3044 sizetype is signed, but we know sizes are non-negative.
3045 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3046 overflowing and the RHS a variable. */
3049 && TREE_CODE (rhs
) == INTEGER_CST
3050 && TREE_OVERFLOW (rhs
))
3054 && TREE_CODE (lhs
) == INTEGER_CST
3055 && TREE_OVERFLOW (lhs
))
3057 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
3058 && TREE_CODE (lhs
) == INTEGER_CST
3059 && TREE_OVERFLOW (lhs
)
3060 && !TREE_CONSTANT (rhs
))
3063 return fold_build2 (code
, type
, lhs
, rhs
);
3067 if (code
== COND_EXPR
)
3068 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
3069 max_size (TREE_OPERAND (exp
, 1), max_p
),
3070 max_size (TREE_OPERAND (exp
, 2), max_p
));
3073 /* Other tree classes cannot happen. */
3081 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3082 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3083 Return a constructor for the template. */
3086 build_template (tree template_type
, tree array_type
, tree expr
)
3088 VEC(constructor_elt
,gc
) *template_elts
= NULL
;
3089 tree bound_list
= NULL_TREE
;
3092 while (TREE_CODE (array_type
) == RECORD_TYPE
3093 && (TYPE_PADDING_P (array_type
)
3094 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
3095 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
3097 if (TREE_CODE (array_type
) == ARRAY_TYPE
3098 || (TREE_CODE (array_type
) == INTEGER_TYPE
3099 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
3100 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
3102 /* First make the list for a CONSTRUCTOR for the template. Go down the
3103 field list of the template instead of the type chain because this
3104 array might be an Ada array of arrays and we can't tell where the
3105 nested arrays stop being the underlying object. */
3107 for (field
= TYPE_FIELDS (template_type
); field
;
3109 ? (bound_list
= TREE_CHAIN (bound_list
))
3110 : (array_type
= TREE_TYPE (array_type
))),
3111 field
= DECL_CHAIN (DECL_CHAIN (field
)))
3113 tree bounds
, min
, max
;
3115 /* If we have a bound list, get the bounds from there. Likewise
3116 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3117 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3118 This will give us a maximum range. */
3120 bounds
= TREE_VALUE (bound_list
);
3121 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
3122 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
3123 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
3124 && DECL_BY_COMPONENT_PTR_P (expr
))
3125 bounds
= TREE_TYPE (field
);
3129 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
3130 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
3132 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3133 substitute it from OBJECT. */
3134 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
3135 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
3137 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
3138 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
3141 return gnat_build_constructor (template_type
, template_elts
);
3144 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3145 being built; the new decl is chained on to the front of the list. */
3148 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
3149 tree initial
, tree field_list
)
3152 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
3155 DECL_INITIAL (field
) = initial
;
3156 DECL_CHAIN (field
) = field_list
;
3160 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3161 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3162 type contains in its DECL_INITIAL the expression to use when a constructor
3163 is made for the type. GNAT_ENTITY is an entity used to print out an error
3164 message if the mechanism cannot be applied to an object of that type and
3165 also for the name. */
3168 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3170 tree record_type
= make_node (RECORD_TYPE
);
3171 tree pointer32_type
, pointer64_type
;
3172 tree field_list
= NULL_TREE
;
3173 int klass
, ndim
, i
, dtype
= 0;
3174 tree inner_type
, tem
;
3177 /* If TYPE is an unconstrained array, use the underlying array type. */
3178 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3179 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3181 /* If this is an array, compute the number of dimensions in the array,
3182 get the index types, and point to the inner type. */
3183 if (TREE_CODE (type
) != ARRAY_TYPE
)
3186 for (ndim
= 1, inner_type
= type
;
3187 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3188 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3189 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3192 idx_arr
= XALLOCAVEC (tree
, ndim
);
3194 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
3195 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3196 for (i
= ndim
- 1, inner_type
= type
;
3198 i
--, inner_type
= TREE_TYPE (inner_type
))
3199 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3201 for (i
= 0, inner_type
= type
;
3203 i
++, inner_type
= TREE_TYPE (inner_type
))
3204 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3206 /* Now get the DTYPE value. */
3207 switch (TREE_CODE (type
))
3212 if (TYPE_VAX_FLOATING_POINT_P (type
))
3213 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3226 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3229 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3232 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3235 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3238 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3241 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3247 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3251 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3252 && TYPE_VAX_FLOATING_POINT_P (type
))
3253 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3265 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3276 /* Get the CLASS value. */
3279 case By_Descriptor_A
:
3280 case By_Short_Descriptor_A
:
3283 case By_Descriptor_NCA
:
3284 case By_Short_Descriptor_NCA
:
3287 case By_Descriptor_SB
:
3288 case By_Short_Descriptor_SB
:
3292 case By_Short_Descriptor
:
3293 case By_Descriptor_S
:
3294 case By_Short_Descriptor_S
:
3300 /* Make the type for a descriptor for VMS. The first four fields are the
3301 same for all types. */
3303 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
3304 size_in_bytes ((mech
== By_Descriptor_A
3305 || mech
== By_Short_Descriptor_A
)
3306 ? inner_type
: type
),
3309 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
3310 size_int (dtype
), field_list
);
3312 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
3313 size_int (klass
), field_list
);
3315 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
3316 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3318 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3319 that we cannot build a template call to the CE routine as it would get a
3320 wrong source location; instead we use a second placeholder for it. */
3321 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
3322 build0 (PLACEHOLDER_EXPR
, type
));
3323 tem
= build3 (COND_EXPR
, pointer32_type
,
3325 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
3326 build_int_cstu (pointer64_type
, 0x80000000))
3327 : boolean_false_node
,
3328 build0 (PLACEHOLDER_EXPR
, void_type_node
),
3329 convert (pointer32_type
, tem
));
3332 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
3338 case By_Short_Descriptor
:
3339 case By_Descriptor_S
:
3340 case By_Short_Descriptor_S
:
3343 case By_Descriptor_SB
:
3344 case By_Short_Descriptor_SB
:
3346 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3348 (TREE_CODE (type
) == ARRAY_TYPE
3349 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3353 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3355 (TREE_CODE (type
) == ARRAY_TYPE
3356 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3361 case By_Descriptor_A
:
3362 case By_Short_Descriptor_A
:
3363 case By_Descriptor_NCA
:
3364 case By_Short_Descriptor_NCA
:
3366 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3367 record_type
, size_zero_node
, field_list
);
3370 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3371 record_type
, size_zero_node
, field_list
);
3374 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3376 size_int ((mech
== By_Descriptor_NCA
3377 || mech
== By_Short_Descriptor_NCA
)
3379 /* Set FL_COLUMN, FL_COEFF, and
3381 : (TREE_CODE (type
) == ARRAY_TYPE
3382 && TYPE_CONVENTION_FORTRAN_P
3388 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3389 record_type
, size_int (ndim
), field_list
);
3392 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3393 record_type
, size_in_bytes (type
),
3396 /* Now build a pointer to the 0,0,0... element. */
3397 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3398 for (i
= 0, inner_type
= type
; i
< ndim
;
3399 i
++, inner_type
= TREE_TYPE (inner_type
))
3400 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3401 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3402 NULL_TREE
, NULL_TREE
);
3405 = make_descriptor_field ("A0", pointer32_type
, record_type
,
3406 build1 (ADDR_EXPR
, pointer32_type
, tem
),
3409 /* Next come the addressing coefficients. */
3410 tem
= size_one_node
;
3411 for (i
= 0; i
< ndim
; i
++)
3415 = size_binop (MULT_EXPR
, tem
,
3416 size_binop (PLUS_EXPR
,
3417 size_binop (MINUS_EXPR
,
3418 TYPE_MAX_VALUE (idx_arr
[i
]),
3419 TYPE_MIN_VALUE (idx_arr
[i
])),
3422 fname
[0] = ((mech
== By_Descriptor_NCA
||
3423 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
3424 fname
[1] = '0' + i
, fname
[2] = 0;
3426 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3427 record_type
, idx_length
, field_list
);
3429 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
3433 /* Finally here are the bounds. */
3434 for (i
= 0; i
< ndim
; i
++)
3438 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3440 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3441 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
3446 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3447 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
3453 post_error ("unsupported descriptor type for &", gnat_entity
);
3456 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
3457 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3461 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3462 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3463 type contains in its DECL_INITIAL the expression to use when a constructor
3464 is made for the type. GNAT_ENTITY is an entity used to print out an error
3465 message if the mechanism cannot be applied to an object of that type and
3466 also for the name. */
3469 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3471 tree record_type
= make_node (RECORD_TYPE
);
3472 tree pointer64_type
;
3473 tree field_list
= NULL_TREE
;
3474 int klass
, ndim
, i
, dtype
= 0;
3475 tree inner_type
, tem
;
3478 /* If TYPE is an unconstrained array, use the underlying array type. */
3479 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3480 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3482 /* If this is an array, compute the number of dimensions in the array,
3483 get the index types, and point to the inner type. */
3484 if (TREE_CODE (type
) != ARRAY_TYPE
)
3487 for (ndim
= 1, inner_type
= type
;
3488 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3489 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3490 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3493 idx_arr
= XALLOCAVEC (tree
, ndim
);
3495 if (mech
!= By_Descriptor_NCA
3496 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3497 for (i
= ndim
- 1, inner_type
= type
;
3499 i
--, inner_type
= TREE_TYPE (inner_type
))
3500 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3502 for (i
= 0, inner_type
= type
;
3504 i
++, inner_type
= TREE_TYPE (inner_type
))
3505 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3507 /* Now get the DTYPE value. */
3508 switch (TREE_CODE (type
))
3513 if (TYPE_VAX_FLOATING_POINT_P (type
))
3514 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3527 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3530 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3533 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3536 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3539 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3542 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3548 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3552 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3553 && TYPE_VAX_FLOATING_POINT_P (type
))
3554 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3566 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3577 /* Get the CLASS value. */
3580 case By_Descriptor_A
:
3583 case By_Descriptor_NCA
:
3586 case By_Descriptor_SB
:
3590 case By_Descriptor_S
:
3596 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3597 are the same for all types. */
3599 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3600 record_type
, size_int (1), field_list
);
3602 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3603 record_type
, size_int (dtype
), field_list
);
3605 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3606 record_type
, size_int (klass
), field_list
);
3608 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3609 record_type
, size_int (-1), field_list
);
3611 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3613 size_in_bytes (mech
== By_Descriptor_A
3614 ? inner_type
: type
),
3617 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3620 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
3621 build_unary_op (ADDR_EXPR
, pointer64_type
,
3622 build0 (PLACEHOLDER_EXPR
, type
)),
3628 case By_Descriptor_S
:
3631 case By_Descriptor_SB
:
3633 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3635 (TREE_CODE (type
) == ARRAY_TYPE
3636 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3640 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3642 (TREE_CODE (type
) == ARRAY_TYPE
3643 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3648 case By_Descriptor_A
:
3649 case By_Descriptor_NCA
:
3651 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3652 record_type
, size_zero_node
, field_list
);
3655 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3656 record_type
, size_zero_node
, field_list
);
3658 dtype
= (mech
== By_Descriptor_NCA
3660 /* Set FL_COLUMN, FL_COEFF, and
3662 : (TREE_CODE (type
) == ARRAY_TYPE
3663 && TYPE_CONVENTION_FORTRAN_P (type
)
3666 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3667 record_type
, size_int (dtype
),
3671 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3672 record_type
, size_int (ndim
), field_list
);
3675 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3676 record_type
, size_int (0), field_list
);
3678 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3679 record_type
, size_in_bytes (type
),
3682 /* Now build a pointer to the 0,0,0... element. */
3683 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3684 for (i
= 0, inner_type
= type
; i
< ndim
;
3685 i
++, inner_type
= TREE_TYPE (inner_type
))
3686 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3687 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3688 NULL_TREE
, NULL_TREE
);
3691 = make_descriptor_field ("A0", pointer64_type
, record_type
,
3692 build1 (ADDR_EXPR
, pointer64_type
, tem
),
3695 /* Next come the addressing coefficients. */
3696 tem
= size_one_node
;
3697 for (i
= 0; i
< ndim
; i
++)
3701 = size_binop (MULT_EXPR
, tem
,
3702 size_binop (PLUS_EXPR
,
3703 size_binop (MINUS_EXPR
,
3704 TYPE_MAX_VALUE (idx_arr
[i
]),
3705 TYPE_MIN_VALUE (idx_arr
[i
])),
3708 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
3709 fname
[1] = '0' + i
, fname
[2] = 0;
3711 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3712 record_type
, idx_length
, field_list
);
3714 if (mech
== By_Descriptor_NCA
)
3718 /* Finally here are the bounds. */
3719 for (i
= 0; i
< ndim
; i
++)
3723 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3725 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3727 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
3731 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3733 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
3738 post_error ("unsupported descriptor type for &", gnat_entity
);
3741 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
3742 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3746 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3747 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3750 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3752 VEC(constructor_elt
,gc
) *v
= NULL
;
3755 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3756 gnu_expr
= gnat_protect_expr (gnu_expr
);
3757 gnat_mark_addressable (gnu_expr
);
3759 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3760 routine in case we have a 32-bit descriptor. */
3761 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3762 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3763 N_Raise_Constraint_Error
),
3766 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3769 = convert (TREE_TYPE (field
),
3770 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3772 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3775 return gnat_build_constructor (gnu_type
, v
);
3778 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3779 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3780 which the VMS descriptor is passed. */
3783 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3785 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3786 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3787 /* The CLASS field is the 3rd field in the descriptor. */
3788 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3789 /* The POINTER field is the 6th field in the descriptor. */
3790 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3792 /* Retrieve the value of the POINTER field. */
3794 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3796 if (POINTER_TYPE_P (gnu_type
))
3797 return convert (gnu_type
, gnu_expr64
);
3799 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3801 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3802 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3803 tree template_type
= TREE_TYPE (p_bounds_type
);
3804 tree min_field
= TYPE_FIELDS (template_type
);
3805 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3806 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3807 /* See the head comment of build_vms_descriptor. */
3808 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3809 tree lfield
, ufield
;
3810 VEC(constructor_elt
,gc
) *v
;
3812 /* Convert POINTER to the pointer-to-array type. */
3813 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3817 case 1: /* Class S */
3818 case 15: /* Class SB */
3819 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3820 v
= VEC_alloc (constructor_elt
, gc
, 2);
3821 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3822 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3823 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3824 convert (TREE_TYPE (min_field
),
3826 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3827 convert (TREE_TYPE (max_field
), t
));
3828 template_tree
= gnat_build_constructor (template_type
, v
);
3829 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3831 /* For class S, we are done. */
3835 /* Test that we really have a SB descriptor, like DEC Ada. */
3836 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3837 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3838 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3839 /* If so, there is already a template in the descriptor and
3840 it is located right after the POINTER field. The fields are
3841 64bits so they must be repacked. */
3842 t
= DECL_CHAIN (pointer
);
3843 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3844 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3847 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3849 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3851 /* Build the template in the form of a constructor. */
3852 v
= VEC_alloc (constructor_elt
, gc
, 2);
3853 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3854 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3856 template_tree
= gnat_build_constructor (template_type
, v
);
3858 /* Otherwise use the {1, LENGTH} template we build above. */
3859 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3860 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3865 case 4: /* Class A */
3866 /* The AFLAGS field is the 3rd field after the pointer in the
3868 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3869 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3870 /* The DIMCT field is the next field in the descriptor after
3873 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3874 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3875 or FL_COEFF or FL_BOUNDS not set. */
3876 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3877 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3878 build_binary_op (NE_EXPR
, boolean_type_node
,
3880 convert (TREE_TYPE (dimct
),
3882 build_binary_op (NE_EXPR
, boolean_type_node
,
3883 build2 (BIT_AND_EXPR
,
3887 /* There is already a template in the descriptor and it is located
3888 in block 3. The fields are 64bits so they must be repacked. */
3889 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3891 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3892 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3895 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3897 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3899 /* Build the template in the form of a constructor. */
3900 v
= VEC_alloc (constructor_elt
, gc
, 2);
3901 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3902 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3904 template_tree
= gnat_build_constructor (template_type
, v
);
3905 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3906 build_call_raise (CE_Length_Check_Failed
, Empty
,
3907 N_Raise_Constraint_Error
),
3910 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3913 case 10: /* Class NCA */
3915 post_error ("unsupported descriptor type for &", gnat_subprog
);
3916 template_addr
= integer_zero_node
;
3920 /* Build the fat pointer in the form of a constructor. */
3921 v
= VEC_alloc (constructor_elt
, gc
, 2);
3922 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3923 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3925 return gnat_build_constructor (gnu_type
, v
);
3932 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3933 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3934 which the VMS descriptor is passed. */
3937 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3939 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3940 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3941 /* The CLASS field is the 3rd field in the descriptor. */
3942 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3943 /* The POINTER field is the 4th field in the descriptor. */
3944 tree pointer
= DECL_CHAIN (klass
);
3946 /* Retrieve the value of the POINTER field. */
3948 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3950 if (POINTER_TYPE_P (gnu_type
))
3951 return convert (gnu_type
, gnu_expr32
);
3953 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3955 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3956 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3957 tree template_type
= TREE_TYPE (p_bounds_type
);
3958 tree min_field
= TYPE_FIELDS (template_type
);
3959 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3960 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3961 /* See the head comment of build_vms_descriptor. */
3962 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3963 VEC(constructor_elt
,gc
) *v
;
3965 /* Convert POINTER to the pointer-to-array type. */
3966 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
3970 case 1: /* Class S */
3971 case 15: /* Class SB */
3972 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3973 v
= VEC_alloc (constructor_elt
, gc
, 2);
3974 t
= TYPE_FIELDS (desc_type
);
3975 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3976 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3977 convert (TREE_TYPE (min_field
),
3979 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3980 convert (TREE_TYPE (max_field
), t
));
3981 template_tree
= gnat_build_constructor (template_type
, v
);
3982 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3984 /* For class S, we are done. */
3988 /* Test that we really have a SB descriptor, like DEC Ada. */
3989 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3990 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3991 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3992 /* If so, there is already a template in the descriptor and
3993 it is located right after the POINTER field. */
3994 t
= DECL_CHAIN (pointer
);
3996 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3997 /* Otherwise use the {1, LENGTH} template we build above. */
3998 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3999 build_unary_op (ADDR_EXPR
, p_bounds_type
,
4004 case 4: /* Class A */
4005 /* The AFLAGS field is the 7th field in the descriptor. */
4006 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
4007 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4008 /* The DIMCT field is the 8th field in the descriptor. */
4010 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4011 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4012 or FL_COEFF or FL_BOUNDS not set. */
4013 u
= build_int_cst (TREE_TYPE (aflags
), 192);
4014 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
4015 build_binary_op (NE_EXPR
, boolean_type_node
,
4017 convert (TREE_TYPE (dimct
),
4019 build_binary_op (NE_EXPR
, boolean_type_node
,
4020 build2 (BIT_AND_EXPR
,
4024 /* There is already a template in the descriptor and it is
4025 located at the start of block 3 (12th field). */
4026 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
4028 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4029 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
4030 build_call_raise (CE_Length_Check_Failed
, Empty
,
4031 N_Raise_Constraint_Error
),
4034 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
4037 case 10: /* Class NCA */
4039 post_error ("unsupported descriptor type for &", gnat_subprog
);
4040 template_addr
= integer_zero_node
;
4044 /* Build the fat pointer in the form of a constructor. */
4045 v
= VEC_alloc (constructor_elt
, gc
, 2);
4046 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
4047 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
4050 return gnat_build_constructor (gnu_type
, v
);
4057 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4058 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4059 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
4060 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
4064 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
4065 bool by_ref
, Entity_Id gnat_subprog
)
4067 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4068 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4069 tree mbo
= TYPE_FIELDS (desc_type
);
4070 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
4071 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
4072 tree real_type
, is64bit
, gnu_expr32
, gnu_expr64
;
4075 real_type
= TREE_TYPE (gnu_type
);
4077 real_type
= gnu_type
;
4079 /* If the field name is not MBO, it must be 32-bit and no alternate.
4080 Otherwise primary must be 64-bit and alternate 32-bit. */
4081 if (strcmp (mbostr
, "MBO") != 0)
4083 tree ret
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
4085 ret
= build_unary_op (ADDR_EXPR
, gnu_type
, ret
);
4089 /* Build the test for 64-bit descriptor. */
4090 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
4091 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
4093 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
4094 build_binary_op (EQ_EXPR
, boolean_type_node
,
4095 convert (integer_type_node
, mbo
),
4097 build_binary_op (EQ_EXPR
, boolean_type_node
,
4098 convert (integer_type_node
, mbmo
),
4099 integer_minus_one_node
));
4101 /* Build the 2 possible end results. */
4102 gnu_expr64
= convert_vms_descriptor64 (real_type
, gnu_expr
, gnat_subprog
);
4104 gnu_expr64
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr64
);
4105 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
4106 gnu_expr32
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
4108 gnu_expr32
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr32
);
4110 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
4113 /* Build a type to be used to represent an aliased object whose nominal type
4114 is an unconstrained array. This consists of a RECORD_TYPE containing a
4115 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4116 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4117 an arbitrary unconstrained object. Use NAME as the name of the record.
4118 DEBUG_INFO_P is true if we need to write debug information for the type. */
4121 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
4124 tree type
= make_node (RECORD_TYPE
);
4126 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
4127 NULL_TREE
, NULL_TREE
, 0, 1);
4129 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
4130 NULL_TREE
, NULL_TREE
, 0, 1);
4132 TYPE_NAME (type
) = name
;
4133 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
4134 DECL_CHAIN (template_field
) = array_field
;
4135 finish_record_type (type
, template_field
, 0, true);
4137 /* Declare it now since it will never be declared otherwise. This is
4138 necessary to ensure that its subtrees are properly marked. */
4139 create_type_decl (name
, type
, NULL
, true, debug_info_p
, Empty
);
4144 /* Same, taking a thin or fat pointer type instead of a template type. */
4147 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
4148 tree name
, bool debug_info_p
)
4152 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
4155 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
4156 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
4157 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
4160 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
4163 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4164 In the normal case this is just two adjustments, but we have more to
4165 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4168 update_pointer_to (tree old_type
, tree new_type
)
4170 tree ptr
= TYPE_POINTER_TO (old_type
);
4171 tree ref
= TYPE_REFERENCE_TO (old_type
);
4174 /* If this is the main variant, process all the other variants first. */
4175 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
4176 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
4177 update_pointer_to (t
, new_type
);
4179 /* If no pointers and no references, we are done. */
4183 /* Merge the old type qualifiers in the new type.
4185 Each old variant has qualifiers for specific reasons, and the new
4186 designated type as well. Each set of qualifiers represents useful
4187 information grabbed at some point, and merging the two simply unifies
4188 these inputs into the final type description.
4190 Consider for instance a volatile type frozen after an access to constant
4191 type designating it; after the designated type's freeze, we get here with
4192 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4193 when the access type was processed. We will make a volatile and readonly
4194 designated type, because that's what it really is.
4196 We might also get here for a non-dummy OLD_TYPE variant with different
4197 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4198 to private record type elaboration (see the comments around the call to
4199 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4200 the qualifiers in those cases too, to avoid accidentally discarding the
4201 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4203 = build_qualified_type (new_type
,
4204 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
4206 /* If old type and new type are identical, there is nothing to do. */
4207 if (old_type
== new_type
)
4210 /* Otherwise, first handle the simple case. */
4211 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
4213 tree new_ptr
, new_ref
;
4215 /* If pointer or reference already points to new type, nothing to do.
4216 This can happen as update_pointer_to can be invoked multiple times
4217 on the same couple of types because of the type variants. */
4218 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
4219 || (ref
&& TREE_TYPE (ref
) == new_type
))
4222 /* Chain PTR and its variants at the end. */
4223 new_ptr
= TYPE_POINTER_TO (new_type
);
4226 while (TYPE_NEXT_PTR_TO (new_ptr
))
4227 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
4228 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
4231 TYPE_POINTER_TO (new_type
) = ptr
;
4233 /* Now adjust them. */
4234 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
4235 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
4237 TREE_TYPE (t
) = new_type
;
4238 if (TYPE_NULL_BOUNDS (t
))
4239 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
4242 /* Chain REF and its variants at the end. */
4243 new_ref
= TYPE_REFERENCE_TO (new_type
);
4246 while (TYPE_NEXT_REF_TO (new_ref
))
4247 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
4248 TYPE_NEXT_REF_TO (new_ref
) = ref
;
4251 TYPE_REFERENCE_TO (new_type
) = ref
;
4253 /* Now adjust them. */
4254 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
4255 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
4256 TREE_TYPE (t
) = new_type
;
4258 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4259 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
4262 /* Now deal with the unconstrained array case. In this case the pointer
4263 is actually a record where both fields are pointers to dummy nodes.
4264 Turn them into pointers to the correct types using update_pointer_to.
4265 Likewise for the pointer to the object record (thin pointer). */
4268 tree new_ptr
= TYPE_POINTER_TO (new_type
);
4270 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
4272 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4273 since update_pointer_to can be invoked multiple times on the same
4274 couple of types because of the type variants. */
4275 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
4279 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
4280 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
4283 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
4284 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
4286 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
4287 TYPE_OBJECT_RECORD_TYPE (new_type
));
4289 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4293 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4294 unconstrained one. This involves making or finding a template. */
4297 convert_to_fat_pointer (tree type
, tree expr
)
4299 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
4300 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
4301 tree etype
= TREE_TYPE (expr
);
4303 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
4305 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4306 array (compare_fat_pointers ensures that this is the full discriminant)
4307 and a valid pointer to the bounds. This latter property is necessary
4308 since the compiler can hoist the load of the bounds done through it. */
4309 if (integer_zerop (expr
))
4311 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4312 tree null_bounds
, t
;
4314 if (TYPE_NULL_BOUNDS (ptr_template_type
))
4315 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
4318 /* The template type can still be dummy at this point so we build an
4319 empty constructor. The middle-end will fill it in with zeros. */
4320 t
= build_constructor (template_type
, NULL
);
4321 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
4322 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
4323 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
4326 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4327 fold_convert (p_array_type
, null_pointer_node
));
4328 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
4329 t
= build_constructor (type
, v
);
4330 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4331 TREE_CONSTANT (t
) = 0;
4332 TREE_STATIC (t
) = 1;
4337 /* If EXPR is a thin pointer, make template and data from the record. */
4338 if (TYPE_IS_THIN_POINTER_P (etype
))
4340 tree field
= TYPE_FIELDS (TREE_TYPE (etype
));
4342 expr
= gnat_protect_expr (expr
);
4343 if (TREE_CODE (expr
) == ADDR_EXPR
)
4344 expr
= TREE_OPERAND (expr
, 0);
4347 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4348 the thin pointer value has been shifted so we first need to shift
4349 it back to get the template address. */
4350 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)))
4352 = build_binary_op (POINTER_PLUS_EXPR
, etype
, expr
,
4353 fold_build1 (NEGATE_EXPR
, sizetype
,
4355 (DECL_CHAIN (field
))));
4356 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
4359 template_tree
= build_component_ref (expr
, NULL_TREE
, field
, false);
4360 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
4361 build_component_ref (expr
, NULL_TREE
,
4362 DECL_CHAIN (field
), false));
4365 /* Otherwise, build the constructor for the template. */
4367 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
4369 /* The final result is a constructor for the fat pointer.
4371 If EXPR is an argument of a foreign convention subprogram, the type it
4372 points to is directly the component type. In this case, the expression
4373 type may not match the corresponding FIELD_DECL type at this point, so we
4374 call "convert" here to fix that up if necessary. This type consistency is
4375 required, for instance because it ensures that possible later folding of
4376 COMPONENT_REFs against this constructor always yields something of the
4377 same type as the initial reference.
4379 Note that the call to "build_template" above is still fine because it
4380 will only refer to the provided TEMPLATE_TYPE in this case. */
4381 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4382 convert (p_array_type
, expr
));
4383 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4384 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4386 return gnat_build_constructor (type
, v
);
4389 /* Create an expression whose value is that of EXPR,
4390 converted to type TYPE. The TREE_TYPE of the value
4391 is always TYPE. This function implements all reasonable
4392 conversions; callers should filter out those that are
4393 not permitted by the language being compiled. */
4396 convert (tree type
, tree expr
)
4398 tree etype
= TREE_TYPE (expr
);
4399 enum tree_code ecode
= TREE_CODE (etype
);
4400 enum tree_code code
= TREE_CODE (type
);
4402 /* If the expression is already of the right type, we are done. */
4406 /* If both input and output have padding and are of variable size, do this
4407 as an unchecked conversion. Likewise if one is a mere variant of the
4408 other, so we avoid a pointless unpad/repad sequence. */
4409 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
4410 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
4411 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4412 || !TREE_CONSTANT (TYPE_SIZE (etype
))
4413 || TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
4414 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
4415 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
4418 /* If the output type has padding, convert to the inner type and make a
4419 constructor to build the record, unless a variable size is involved. */
4420 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
4422 VEC(constructor_elt
,gc
) *v
;
4424 /* If we previously converted from another type and our type is
4425 of variable size, remove the conversion to avoid the need for
4426 variable-sized temporaries. Likewise for a conversion between
4427 original and packable version. */
4428 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4429 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4430 || (ecode
== RECORD_TYPE
4431 && TYPE_NAME (etype
)
4432 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
4433 expr
= TREE_OPERAND (expr
, 0);
4435 /* If we are just removing the padding from expr, convert the original
4436 object if we have variable size in order to avoid the need for some
4437 variable-sized temporaries. Likewise if the padding is a variant
4438 of the other, so we avoid a pointless unpad/repad sequence. */
4439 if (TREE_CODE (expr
) == COMPONENT_REF
4440 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4441 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4442 || TYPE_MAIN_VARIANT (type
)
4443 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4444 || (ecode
== RECORD_TYPE
4445 && TYPE_NAME (etype
)
4446 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
4447 return convert (type
, TREE_OPERAND (expr
, 0));
4449 /* If the inner type is of self-referential size and the expression type
4450 is a record, do this as an unchecked conversion. But first pad the
4451 expression if possible to have the same size on both sides. */
4452 if (ecode
== RECORD_TYPE
4453 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
4455 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
4456 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4457 false, false, false, true),
4459 return unchecked_convert (type
, expr
, false);
4462 /* If we are converting between array types with variable size, do the
4463 final conversion as an unchecked conversion, again to avoid the need
4464 for some variable-sized temporaries. If valid, this conversion is
4465 very likely purely technical and without real effects. */
4466 if (ecode
== ARRAY_TYPE
4467 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
4468 && !TREE_CONSTANT (TYPE_SIZE (etype
))
4469 && !TREE_CONSTANT (TYPE_SIZE (type
)))
4470 return unchecked_convert (type
,
4471 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4475 v
= VEC_alloc (constructor_elt
, gc
, 1);
4476 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4477 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
4478 return gnat_build_constructor (type
, v
);
4481 /* If the input type has padding, remove it and convert to the output type.
4482 The conditions ordering is arranged to ensure that the output type is not
4483 a padding type here, as it is not clear whether the conversion would
4484 always be correct if this was to happen. */
4485 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
4489 /* If we have just converted to this padded type, just get the
4490 inner expression. */
4491 if (TREE_CODE (expr
) == CONSTRUCTOR
4492 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
4493 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
4494 == TYPE_FIELDS (etype
))
4496 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
4498 /* Otherwise, build an explicit component reference. */
4501 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4503 return convert (type
, unpadded
);
4506 /* If the input is a biased type, adjust first. */
4507 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4508 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
4509 fold_convert (TREE_TYPE (etype
),
4511 TYPE_MIN_VALUE (etype
)));
4513 /* If the input is a justified modular type, we need to extract the actual
4514 object before converting it to any other type with the exceptions of an
4515 unconstrained array or of a mere type variant. It is useful to avoid the
4516 extraction and conversion in the type variant case because it could end
4517 up replacing a VAR_DECL expr by a constructor and we might be about the
4518 take the address of the result. */
4519 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
4520 && code
!= UNCONSTRAINED_ARRAY_TYPE
4521 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
4522 return convert (type
, build_component_ref (expr
, NULL_TREE
,
4523 TYPE_FIELDS (etype
), false));
4525 /* If converting to a type that contains a template, convert to the data
4526 type and then build the template. */
4527 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
4529 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4530 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
4532 /* If the source already has a template, get a reference to the
4533 associated array only, as we are going to rebuild a template
4534 for the target type anyway. */
4535 expr
= maybe_unconstrained_array (expr
);
4537 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4538 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
4539 obj_type
, NULL_TREE
));
4540 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4541 convert (obj_type
, expr
));
4542 return gnat_build_constructor (type
, v
);
4545 /* There are some cases of expressions that we process specially. */
4546 switch (TREE_CODE (expr
))
4552 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4553 conversion in gnat_expand_expr. NULL_EXPR does not represent
4554 and actual value, so no conversion is needed. */
4555 expr
= copy_node (expr
);
4556 TREE_TYPE (expr
) = type
;
4560 /* If we are converting a STRING_CST to another constrained array type,
4561 just make a new one in the proper type. */
4562 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
4563 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
4564 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
4566 expr
= copy_node (expr
);
4567 TREE_TYPE (expr
) = type
;
4573 /* If we are converting a VECTOR_CST to a mere variant type, just make
4574 a new one in the proper type. */
4575 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4577 expr
= copy_node (expr
);
4578 TREE_TYPE (expr
) = type
;
4583 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4584 a new one in the proper type. */
4585 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4587 expr
= copy_node (expr
);
4588 TREE_TYPE (expr
) = type
;
4589 CONSTRUCTOR_ELTS (expr
)
4590 = VEC_copy (constructor_elt
, gc
, CONSTRUCTOR_ELTS (expr
));
4594 /* Likewise for a conversion between original and packable version, or
4595 conversion between types of the same size and with the same list of
4596 fields, but we have to work harder to preserve type consistency. */
4598 && code
== RECORD_TYPE
4599 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
4600 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
4603 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
4604 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
4605 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, len
);
4606 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
4607 unsigned HOST_WIDE_INT idx
;
4610 /* Whether we need to clear TREE_CONSTANT et al. on the output
4611 constructor when we convert in place. */
4612 bool clear_constant
= false;
4614 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
4616 constructor_elt
*elt
;
4617 /* We expect only simple constructors. */
4618 if (!SAME_FIELD_P (index
, efield
))
4620 /* The field must be the same. */
4621 if (!SAME_FIELD_P (efield
, field
))
4623 elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
4625 elt
->value
= convert (TREE_TYPE (field
), value
);
4627 /* If packing has made this field a bitfield and the input
4628 value couldn't be emitted statically any more, we need to
4629 clear TREE_CONSTANT on our output. */
4631 && TREE_CONSTANT (expr
)
4632 && !CONSTRUCTOR_BITFIELD_P (efield
)
4633 && CONSTRUCTOR_BITFIELD_P (field
)
4634 && !initializer_constant_valid_for_bitfield_p (value
))
4635 clear_constant
= true;
4637 efield
= DECL_CHAIN (efield
);
4638 field
= DECL_CHAIN (field
);
4641 /* If we have been able to match and convert all the input fields
4642 to their output type, convert in place now. We'll fallback to a
4643 view conversion downstream otherwise. */
4646 expr
= copy_node (expr
);
4647 TREE_TYPE (expr
) = type
;
4648 CONSTRUCTOR_ELTS (expr
) = v
;
4650 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
4655 /* Likewise for a conversion between array type and vector type with a
4656 compatible representative array. */
4657 else if (code
== VECTOR_TYPE
4658 && ecode
== ARRAY_TYPE
4659 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4662 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
4663 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
4664 VEC(constructor_elt
,gc
) *v
;
4665 unsigned HOST_WIDE_INT ix
;
4668 /* Build a VECTOR_CST from a *constant* array constructor. */
4669 if (TREE_CONSTANT (expr
))
4671 bool constant_p
= true;
4673 /* Iterate through elements and check if all constructor
4674 elements are *_CSTs. */
4675 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4676 if (!CONSTANT_CLASS_P (value
))
4683 return build_vector_from_ctor (type
,
4684 CONSTRUCTOR_ELTS (expr
));
4687 /* Otherwise, build a regular vector constructor. */
4688 v
= VEC_alloc (constructor_elt
, gc
, len
);
4689 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4691 constructor_elt
*elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
4692 elt
->index
= NULL_TREE
;
4695 expr
= copy_node (expr
);
4696 TREE_TYPE (expr
) = type
;
4697 CONSTRUCTOR_ELTS (expr
) = v
;
4702 case UNCONSTRAINED_ARRAY_REF
:
4703 /* First retrieve the underlying array. */
4704 expr
= maybe_unconstrained_array (expr
);
4705 etype
= TREE_TYPE (expr
);
4706 ecode
= TREE_CODE (etype
);
4709 case VIEW_CONVERT_EXPR
:
4711 /* GCC 4.x is very sensitive to type consistency overall, and view
4712 conversions thus are very frequent. Even though just "convert"ing
4713 the inner operand to the output type is fine in most cases, it
4714 might expose unexpected input/output type mismatches in special
4715 circumstances so we avoid such recursive calls when we can. */
4716 tree op0
= TREE_OPERAND (expr
, 0);
4718 /* If we are converting back to the original type, we can just
4719 lift the input conversion. This is a common occurrence with
4720 switches back-and-forth amongst type variants. */
4721 if (type
== TREE_TYPE (op0
))
4724 /* Otherwise, if we're converting between two aggregate or vector
4725 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4726 target type in place or to just convert the inner expression. */
4727 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4728 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4730 /* If we are converting between mere variants, we can just
4731 substitute the VIEW_CONVERT_EXPR in place. */
4732 if (gnat_types_compatible_p (type
, etype
))
4733 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4735 /* Otherwise, we may just bypass the input view conversion unless
4736 one of the types is a fat pointer, which is handled by
4737 specialized code below which relies on exact type matching. */
4738 else if (!TYPE_IS_FAT_POINTER_P (type
)
4739 && !TYPE_IS_FAT_POINTER_P (etype
))
4740 return convert (type
, op0
);
4750 /* Check for converting to a pointer to an unconstrained array. */
4751 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4752 return convert_to_fat_pointer (type
, expr
);
4754 /* If we are converting between two aggregate or vector types that are mere
4755 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4756 to a vector type from its representative array type. */
4757 else if ((code
== ecode
4758 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4759 && gnat_types_compatible_p (type
, etype
))
4760 || (code
== VECTOR_TYPE
4761 && ecode
== ARRAY_TYPE
4762 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4764 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4766 /* If we are converting between tagged types, try to upcast properly. */
4767 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4768 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4770 tree child_etype
= etype
;
4772 tree field
= TYPE_FIELDS (child_etype
);
4773 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4774 return build_component_ref (expr
, NULL_TREE
, field
, false);
4775 child_etype
= TREE_TYPE (field
);
4776 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4779 /* If we are converting from a smaller form of record type back to it, just
4780 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4781 size on both sides. */
4782 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4783 && smaller_form_type_p (etype
, type
))
4785 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4786 false, false, false, true),
4788 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4791 /* In all other cases of related types, make a NOP_EXPR. */
4792 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4793 return fold_convert (type
, expr
);
4798 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4801 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4802 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4803 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4804 return unchecked_convert (type
, expr
, false);
4805 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4806 return fold_convert (type
,
4807 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4808 convert (TREE_TYPE (type
), expr
),
4809 TYPE_MIN_VALUE (type
)));
4811 /* ... fall through ... */
4815 /* If we are converting an additive expression to an integer type
4816 with lower precision, be wary of the optimization that can be
4817 applied by convert_to_integer. There are 2 problematic cases:
4818 - if the first operand was originally of a biased type,
4819 because we could be recursively called to convert it
4820 to an intermediate type and thus rematerialize the
4821 additive operator endlessly,
4822 - if the expression contains a placeholder, because an
4823 intermediate conversion that changes the sign could
4824 be inserted and thus introduce an artificial overflow
4825 at compile time when the placeholder is substituted. */
4826 if (code
== INTEGER_TYPE
4827 && ecode
== INTEGER_TYPE
4828 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4829 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4831 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4833 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4834 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4835 || CONTAINS_PLACEHOLDER_P (expr
))
4836 return build1 (NOP_EXPR
, type
, expr
);
4839 return fold (convert_to_integer (type
, expr
));
4842 case REFERENCE_TYPE
:
4843 /* If converting between two thin pointers, adjust if needed to account
4844 for differing offsets from the base pointer, depending on whether
4845 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4846 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4849 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)) != NULL_TREE
4850 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype
))))
4853 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != NULL_TREE
4854 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type
))))
4856 tree byte_diff
= size_diffop (type_pos
, etype_pos
);
4858 expr
= build1 (NOP_EXPR
, type
, expr
);
4859 if (integer_zerop (byte_diff
))
4862 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4863 fold_convert (sizetype
, byte_diff
));
4866 /* If converting fat pointer to normal or thin pointer, get the pointer
4867 to the array and then convert it. */
4868 if (TYPE_IS_FAT_POINTER_P (etype
))
4870 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4872 return fold (convert_to_pointer (type
, expr
));
4875 return fold (convert_to_real (type
, expr
));
4878 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4880 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4882 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4883 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4885 return gnat_build_constructor (type
, v
);
4888 /* ... fall through ... */
4891 /* In these cases, assume the front-end has validated the conversion.
4892 If the conversion is valid, it will be a bit-wise conversion, so
4893 it can be viewed as an unchecked conversion. */
4894 return unchecked_convert (type
, expr
, false);
4897 /* This is a either a conversion between a tagged type and some
4898 subtype, which we have to mark as a UNION_TYPE because of
4899 overlapping fields or a conversion of an Unchecked_Union. */
4900 return unchecked_convert (type
, expr
, false);
4902 case UNCONSTRAINED_ARRAY_TYPE
:
4903 /* If the input is a VECTOR_TYPE, convert to the representative
4904 array type first. */
4905 if (ecode
== VECTOR_TYPE
)
4907 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4908 etype
= TREE_TYPE (expr
);
4909 ecode
= TREE_CODE (etype
);
4912 /* If EXPR is a constrained array, take its address, convert it to a
4913 fat pointer, and then dereference it. Likewise if EXPR is a
4914 record containing both a template and a constrained array.
4915 Note that a record representing a justified modular type
4916 always represents a packed constrained array. */
4917 if (ecode
== ARRAY_TYPE
4918 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4919 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4920 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4923 (INDIRECT_REF
, NULL_TREE
,
4924 convert_to_fat_pointer (TREE_TYPE (type
),
4925 build_unary_op (ADDR_EXPR
,
4928 /* Do something very similar for converting one unconstrained
4929 array to another. */
4930 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4932 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4933 convert (TREE_TYPE (type
),
4934 build_unary_op (ADDR_EXPR
,
4940 return fold (convert_to_complex (type
, expr
));
4947 /* Create an expression whose value is that of EXPR converted to the common
4948 index type, which is sizetype. EXPR is supposed to be in the base type
4949 of the GNAT index type. Calling it is equivalent to doing
4951 convert (sizetype, expr)
4953 but we try to distribute the type conversion with the knowledge that EXPR
4954 cannot overflow in its type. This is a best-effort approach and we fall
4955 back to the above expression as soon as difficulties are encountered.
4957 This is necessary to overcome issues that arise when the GNAT base index
4958 type and the GCC common index type (sizetype) don't have the same size,
4959 which is quite frequent on 64-bit architectures. In this case, and if
4960 the GNAT base index type is signed but the iteration type of the loop has
4961 been forced to unsigned, the loop scalar evolution engine cannot compute
4962 a simple evolution for the general induction variables associated with the
4963 array indices, because it will preserve the wrap-around semantics in the
4964 unsigned type of their "inner" part. As a result, many loop optimizations
4967 The solution is to use a special (basic) induction variable that is at
4968 least as large as sizetype, and to express the aforementioned general
4969 induction variables in terms of this induction variable, eliminating
4970 the problematic intermediate truncation to the GNAT base index type.
4971 This is possible as long as the original expression doesn't overflow
4972 and if the middle-end hasn't introduced artificial overflows in the
4973 course of the various simplification it can make to the expression. */
4976 convert_to_index_type (tree expr
)
4978 enum tree_code code
= TREE_CODE (expr
);
4979 tree type
= TREE_TYPE (expr
);
4981 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4982 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4983 if (TYPE_UNSIGNED (type
) || !optimize
)
4984 return convert (sizetype
, expr
);
4989 /* The main effect of the function: replace a loop parameter with its
4990 associated special induction variable. */
4991 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
4992 expr
= DECL_INDUCTION_VAR (expr
);
4997 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4998 /* Bail out as soon as we suspect some sort of type frobbing. */
4999 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
5000 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
5004 /* ... fall through ... */
5006 case NON_LVALUE_EXPR
:
5007 return fold_build1 (code
, sizetype
,
5008 convert_to_index_type (TREE_OPERAND (expr
, 0)));
5013 return fold_build2 (code
, sizetype
,
5014 convert_to_index_type (TREE_OPERAND (expr
, 0)),
5015 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5018 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5019 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5022 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5023 convert_to_index_type (TREE_OPERAND (expr
, 1)),
5024 convert_to_index_type (TREE_OPERAND (expr
, 2)));
5030 return convert (sizetype
, expr
);
5033 /* Remove all conversions that are done in EXP. This includes converting
5034 from a padded type or to a justified modular type. If TRUE_ADDRESS
5035 is true, always return the address of the containing object even if
5036 the address is not bit-aligned. */
5039 remove_conversions (tree exp
, bool true_address
)
5041 switch (TREE_CODE (exp
))
5045 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
5046 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
5048 remove_conversions (VEC_index (constructor_elt
,
5049 CONSTRUCTOR_ELTS (exp
), 0)->value
,
5054 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5055 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5059 case VIEW_CONVERT_EXPR
:
5060 case NON_LVALUE_EXPR
:
5061 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5070 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5071 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5072 likewise return an expression pointing to the underlying array. */
5075 maybe_unconstrained_array (tree exp
)
5077 enum tree_code code
= TREE_CODE (exp
);
5078 tree type
= TREE_TYPE (exp
);
5080 switch (TREE_CODE (type
))
5082 case UNCONSTRAINED_ARRAY_TYPE
:
5083 if (code
== UNCONSTRAINED_ARRAY_REF
)
5085 const bool read_only
= TREE_READONLY (exp
);
5086 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
5088 exp
= TREE_OPERAND (exp
, 0);
5089 type
= TREE_TYPE (exp
);
5091 if (TREE_CODE (exp
) == COND_EXPR
)
5094 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5095 build_component_ref (TREE_OPERAND (exp
, 1),
5100 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5101 build_component_ref (TREE_OPERAND (exp
, 2),
5106 exp
= build3 (COND_EXPR
,
5107 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
5108 TREE_OPERAND (exp
, 0), op1
, op2
);
5112 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5113 build_component_ref (exp
, NULL_TREE
,
5116 TREE_READONLY (exp
) = read_only
;
5117 TREE_THIS_NOTRAP (exp
) = no_trap
;
5121 else if (code
== NULL_EXPR
)
5122 exp
= build1 (NULL_EXPR
,
5123 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
5124 TREE_OPERAND (exp
, 0));
5128 /* If this is a padded type and it contains a template, convert to the
5129 unpadded type first. */
5130 if (TYPE_PADDING_P (type
)
5131 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
5132 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
5134 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5135 type
= TREE_TYPE (exp
);
5138 if (TYPE_CONTAINS_TEMPLATE_P (type
))
5140 exp
= build_component_ref (exp
, NULL_TREE
,
5141 DECL_CHAIN (TYPE_FIELDS (type
)),
5143 type
= TREE_TYPE (exp
);
5145 /* If the array type is padded, convert to the unpadded type. */
5146 if (TYPE_IS_PADDING_P (type
))
5147 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5158 /* Return true if EXPR is an expression that can be folded as an operand
5159 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5162 can_fold_for_view_convert_p (tree expr
)
5166 /* The folder will fold NOP_EXPRs between integral types with the same
5167 precision (in the middle-end's sense). We cannot allow it if the
5168 types don't have the same precision in the Ada sense as well. */
5169 if (TREE_CODE (expr
) != NOP_EXPR
)
5172 t1
= TREE_TYPE (expr
);
5173 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5175 /* Defer to the folder for non-integral conversions. */
5176 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
5179 /* Only fold conversions that preserve both precisions. */
5180 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
5181 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
5187 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5188 If NOTRUNC_P is true, truncation operations should be suppressed.
5190 Special care is required with (source or target) integral types whose
5191 precision is not equal to their size, to make sure we fetch or assign
5192 the value bits whose location might depend on the endianness, e.g.
5194 Rmsize : constant := 8;
5195 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5197 type Bit_Array is array (1 .. Rmsize) of Boolean;
5198 pragma Pack (Bit_Array);
5200 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5202 Value : Int := 2#1000_0001#;
5203 Vbits : Bit_Array := To_Bit_Array (Value);
5205 we expect the 8 bits at Vbits'Address to always contain Value, while
5206 their original location depends on the endianness, at Value'Address
5207 on a little-endian architecture but not on a big-endian one. */
5210 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
5212 tree etype
= TREE_TYPE (expr
);
5213 enum tree_code ecode
= TREE_CODE (etype
);
5214 enum tree_code code
= TREE_CODE (type
);
5217 /* If the expression is already of the right type, we are done. */
5221 /* If both types types are integral just do a normal conversion.
5222 Likewise for a conversion to an unconstrained array. */
5223 if ((((INTEGRAL_TYPE_P (type
)
5224 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
5225 || (POINTER_TYPE_P (type
) && !TYPE_IS_THIN_POINTER_P (type
))
5226 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
5227 && ((INTEGRAL_TYPE_P (etype
)
5228 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
5229 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
5230 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
5231 || code
== UNCONSTRAINED_ARRAY_TYPE
)
5233 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
5235 tree ntype
= copy_type (etype
);
5236 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
5237 TYPE_MAIN_VARIANT (ntype
) = ntype
;
5238 expr
= build1 (NOP_EXPR
, ntype
, expr
);
5241 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5243 tree rtype
= copy_type (type
);
5244 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
5245 TYPE_MAIN_VARIANT (rtype
) = rtype
;
5246 expr
= convert (rtype
, expr
);
5247 expr
= build1 (NOP_EXPR
, type
, expr
);
5250 expr
= convert (type
, expr
);
5253 /* If we are converting to an integral type whose precision is not equal
5254 to its size, first unchecked convert to a record type that contains an
5255 field of the given precision. Then extract the field. */
5256 else if (INTEGRAL_TYPE_P (type
)
5257 && TYPE_RM_SIZE (type
)
5258 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5259 GET_MODE_BITSIZE (TYPE_MODE (type
))))
5261 tree rec_type
= make_node (RECORD_TYPE
);
5262 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
5263 tree field_type
, field
;
5265 if (TYPE_UNSIGNED (type
))
5266 field_type
= make_unsigned_type (prec
);
5268 field_type
= make_signed_type (prec
);
5269 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
5271 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5272 NULL_TREE
, NULL_TREE
, 1, 0);
5274 TYPE_FIELDS (rec_type
) = field
;
5275 layout_type (rec_type
);
5277 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5278 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
5279 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
5282 /* Similarly if we are converting from an integral type whose precision is
5283 not equal to its size, first copy into a field of the given precision
5284 and unchecked convert the record type. */
5285 else if (INTEGRAL_TYPE_P (etype
)
5286 && TYPE_RM_SIZE (etype
)
5287 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
5288 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5290 tree rec_type
= make_node (RECORD_TYPE
);
5291 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
5292 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
5293 tree field_type
, field
;
5295 if (TYPE_UNSIGNED (etype
))
5296 field_type
= make_unsigned_type (prec
);
5298 field_type
= make_signed_type (prec
);
5299 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
5301 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5302 NULL_TREE
, NULL_TREE
, 1, 0);
5304 TYPE_FIELDS (rec_type
) = field
;
5305 layout_type (rec_type
);
5307 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
5308 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
5309 expr
= gnat_build_constructor (rec_type
, v
);
5310 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5313 /* If we are converting from a scalar type to a type with a different size,
5314 we need to pad to have the same size on both sides.
5316 ??? We cannot do it unconditionally because unchecked conversions are
5317 used liberally by the front-end to implement polymorphism, e.g. in:
5319 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5320 return p___size__4 (p__object!(S191s.all));
5322 so we skip all expressions that are references. */
5323 else if (!REFERENCE_CLASS_P (expr
)
5324 && !AGGREGATE_TYPE_P (etype
)
5325 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5326 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
5330 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
5331 false, false, false, true),
5333 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5337 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
5338 false, false, false, true);
5339 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5340 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
5345 /* We have a special case when we are converting between two unconstrained
5346 array types. In that case, take the address, convert the fat pointer
5347 types, and dereference. */
5348 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
5349 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5350 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
5351 build_unary_op (ADDR_EXPR
, NULL_TREE
,
5354 /* Another special case is when we are converting to a vector type from its
5355 representative array type; this a regular conversion. */
5356 else if (code
== VECTOR_TYPE
5357 && ecode
== ARRAY_TYPE
5358 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
5360 expr
= convert (type
, expr
);
5364 expr
= maybe_unconstrained_array (expr
);
5365 etype
= TREE_TYPE (expr
);
5366 ecode
= TREE_CODE (etype
);
5367 if (can_fold_for_view_convert_p (expr
))
5368 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5370 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5373 /* If the result is an integral type whose precision is not equal to its
5374 size, sign- or zero-extend the result. We need not do this if the input
5375 is an integral type of the same precision and signedness or if the output
5376 is a biased type or if both the input and output are unsigned. */
5378 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
5379 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5380 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5381 GET_MODE_BITSIZE (TYPE_MODE (type
)))
5382 && !(INTEGRAL_TYPE_P (etype
)
5383 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
5384 && operand_equal_p (TYPE_RM_SIZE (type
),
5385 (TYPE_RM_SIZE (etype
) != 0
5386 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
5388 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
5391 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
5393 = convert (base_type
,
5394 size_binop (MINUS_EXPR
,
5396 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
5397 TYPE_RM_SIZE (type
)));
5400 build_binary_op (RSHIFT_EXPR
, base_type
,
5401 build_binary_op (LSHIFT_EXPR
, base_type
,
5402 convert (base_type
, expr
),
5407 /* An unchecked conversion should never raise Constraint_Error. The code
5408 below assumes that GCC's conversion routines overflow the same way that
5409 the underlying hardware does. This is probably true. In the rare case
5410 when it is false, we can rely on the fact that such conversions are
5411 erroneous anyway. */
5412 if (TREE_CODE (expr
) == INTEGER_CST
)
5413 TREE_OVERFLOW (expr
) = 0;
5415 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5416 show no longer constant. */
5417 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
5418 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
5420 TREE_CONSTANT (expr
) = 0;
5425 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5426 the latter being a record type as predicated by Is_Record_Type. */
5429 tree_code_for_record_type (Entity_Id gnat_type
)
5431 Node_Id component_list
, component
;
5433 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5434 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5435 if (!Is_Unchecked_Union (gnat_type
))
5438 gnat_type
= Implementation_Base_Type (gnat_type
);
5440 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
5442 for (component
= First_Non_Pragma (Component_Items (component_list
));
5443 Present (component
);
5444 component
= Next_Non_Pragma (component
))
5445 if (Ekind (Defining_Entity (component
)) == E_Component
)
5451 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5452 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5453 according to the presence of an alignment clause on the type or, if it
5454 is an array, on the component type. */
5457 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
5459 gnat_type
= Underlying_Type (gnat_type
);
5461 *align_clause
= Present (Alignment_Clause (gnat_type
));
5463 if (Is_Array_Type (gnat_type
))
5465 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5466 if (Present (Alignment_Clause (gnat_type
)))
5467 *align_clause
= true;
5470 if (!Is_Floating_Point_Type (gnat_type
))
5473 if (UI_To_Int (Esize (gnat_type
)) != 64)
5479 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5480 size is greater or equal to 64 bits, or an array of such a type. Set
5481 ALIGN_CLAUSE according to the presence of an alignment clause on the
5482 type or, if it is an array, on the component type. */
5485 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
5487 gnat_type
= Underlying_Type (gnat_type
);
5489 *align_clause
= Present (Alignment_Clause (gnat_type
));
5491 if (Is_Array_Type (gnat_type
))
5493 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5494 if (Present (Alignment_Clause (gnat_type
)))
5495 *align_clause
= true;
5498 if (!Is_Scalar_Type (gnat_type
))
5501 if (UI_To_Int (Esize (gnat_type
)) < 64)
5507 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5508 component of an aggregate type. */
5511 type_for_nonaliased_component_p (tree gnu_type
)
5513 /* If the type is passed by reference, we may have pointers to the
5514 component so it cannot be made non-aliased. */
5515 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
5518 /* We used to say that any component of aggregate type is aliased
5519 because the front-end may take 'Reference of it. The front-end
5520 has been enhanced in the meantime so as to use a renaming instead
5521 in most cases, but the back-end can probably take the address of
5522 such a component too so we go for the conservative stance.
5524 For instance, we might need the address of any array type, even
5525 if normally passed by copy, to construct a fat pointer if the
5526 component is used as an actual for an unconstrained formal.
5528 Likewise for record types: even if a specific record subtype is
5529 passed by copy, the parent type might be passed by ref (e.g. if
5530 it's of variable size) and we might take the address of a child
5531 component to pass to a parent formal. We have no way to check
5532 for such conditions here. */
5533 if (AGGREGATE_TYPE_P (gnu_type
))
5539 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5542 smaller_form_type_p (tree type
, tree orig_type
)
5546 /* We're not interested in variants here. */
5547 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
5550 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5551 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
5554 size
= TYPE_SIZE (type
);
5555 osize
= TYPE_SIZE (orig_type
);
5557 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
5560 return tree_int_cst_lt (size
, osize
) != 0;
5563 /* Perform final processing on global variables. */
5565 static GTY (()) tree dummy_global
;
5568 gnat_write_global_declarations (void)
5573 /* If we have declared types as used at the global level, insert them in
5574 the global hash table. We use a dummy variable for this purpose. */
5575 if (!VEC_empty (tree
, types_used_by_cur_var_decl
))
5577 struct varpool_node
*node
;
5580 ASM_FORMAT_PRIVATE_NAME (label
, first_global_object_name
, 0);
5582 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, get_identifier (label
),
5584 TREE_STATIC (dummy_global
) = 1;
5585 TREE_ASM_WRITTEN (dummy_global
) = 1;
5586 node
= varpool_node (dummy_global
);
5587 node
->symbol
.force_output
= 1;
5589 while (!VEC_empty (tree
, types_used_by_cur_var_decl
))
5591 tree t
= VEC_pop (tree
, types_used_by_cur_var_decl
);
5592 types_used_by_var_decl_insert (t
, dummy_global
);
5596 /* Output debug information for all global type declarations first. This
5597 ensures that global types whose compilation hasn't been finalized yet,
5598 for example pointers to Taft amendment types, have their compilation
5599 finalized in the right context. */
5600 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
5601 if (TREE_CODE (iter
) == TYPE_DECL
)
5602 debug_hooks
->global_decl (iter
);
5604 /* Proceed to optimize and emit assembly. */
5605 finalize_compilation_unit ();
5607 /* After cgraph has had a chance to emit everything that's going to
5608 be emitted, output debug information for the rest of globals. */
5611 timevar_push (TV_SYMOUT
);
5612 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
5613 if (TREE_CODE (iter
) != TYPE_DECL
)
5614 debug_hooks
->global_decl (iter
);
5615 timevar_pop (TV_SYMOUT
);
5619 /* ************************************************************************
5620 * * GCC builtins support *
5621 * ************************************************************************ */
5623 /* The general scheme is fairly simple:
5625 For each builtin function/type to be declared, gnat_install_builtins calls
5626 internal facilities which eventually get to gnat_push_decl, which in turn
5627 tracks the so declared builtin function decls in the 'builtin_decls' global
5628 datastructure. When an Intrinsic subprogram declaration is processed, we
5629 search this global datastructure to retrieve the associated BUILT_IN DECL
5632 /* Search the chain of currently available builtin declarations for a node
5633 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5634 found, if any, or NULL_TREE otherwise. */
5636 builtin_decl_for (tree name
)
5641 FOR_EACH_VEC_ELT (tree
, builtin_decls
, i
, decl
)
5642 if (DECL_NAME (decl
) == name
)
5648 /* The code below eventually exposes gnat_install_builtins, which declares
5649 the builtin types and functions we might need, either internally or as
5650 user accessible facilities.
5652 ??? This is a first implementation shot, still in rough shape. It is
5653 heavily inspired from the "C" family implementation, with chunks copied
5654 verbatim from there.
5656 Two obvious TODO candidates are
5657 o Use a more efficient name/decl mapping scheme
5658 o Devise a middle-end infrastructure to avoid having to copy
5659 pieces between front-ends. */
5661 /* ----------------------------------------------------------------------- *
5662 * BUILTIN ELEMENTARY TYPES *
5663 * ----------------------------------------------------------------------- */
5665 /* Standard data types to be used in builtin argument declarations. */
5669 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
5671 CTI_CONST_STRING_TYPE
,
5676 static tree c_global_trees
[CTI_MAX
];
5678 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5679 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5680 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5682 /* ??? In addition some attribute handlers, we currently don't support a
5683 (small) number of builtin-types, which in turns inhibits support for a
5684 number of builtin functions. */
5685 #define wint_type_node void_type_node
5686 #define intmax_type_node void_type_node
5687 #define uintmax_type_node void_type_node
5689 /* Build the void_list_node (void_type_node having been created). */
5692 build_void_list_node (void)
5694 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
5698 /* Used to help initialize the builtin-types.def table. When a type of
5699 the correct size doesn't exist, use error_mark_node instead of NULL.
5700 The later results in segfaults even when a decl using the type doesn't
5704 builtin_type_for_size (int size
, bool unsignedp
)
5706 tree type
= gnat_type_for_size (size
, unsignedp
);
5707 return type
? type
: error_mark_node
;
5710 /* Build/push the elementary type decls that builtin functions/types
5714 install_builtin_elementary_types (void)
5716 signed_size_type_node
= gnat_signed_type (size_type_node
);
5717 pid_type_node
= integer_type_node
;
5718 void_list_node
= build_void_list_node ();
5720 string_type_node
= build_pointer_type (char_type_node
);
5721 const_string_type_node
5722 = build_pointer_type (build_qualified_type
5723 (char_type_node
, TYPE_QUAL_CONST
));
5726 /* ----------------------------------------------------------------------- *
5727 * BUILTIN FUNCTION TYPES *
5728 * ----------------------------------------------------------------------- */
5730 /* Now, builtin function types per se. */
5734 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5735 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5736 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5737 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5738 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5739 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5740 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5741 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5742 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5743 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5744 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5745 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5746 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5747 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5748 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5750 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5751 #include "builtin-types.def"
5752 #undef DEF_PRIMITIVE_TYPE
5753 #undef DEF_FUNCTION_TYPE_0
5754 #undef DEF_FUNCTION_TYPE_1
5755 #undef DEF_FUNCTION_TYPE_2
5756 #undef DEF_FUNCTION_TYPE_3
5757 #undef DEF_FUNCTION_TYPE_4
5758 #undef DEF_FUNCTION_TYPE_5
5759 #undef DEF_FUNCTION_TYPE_6
5760 #undef DEF_FUNCTION_TYPE_7
5761 #undef DEF_FUNCTION_TYPE_VAR_0
5762 #undef DEF_FUNCTION_TYPE_VAR_1
5763 #undef DEF_FUNCTION_TYPE_VAR_2
5764 #undef DEF_FUNCTION_TYPE_VAR_3
5765 #undef DEF_FUNCTION_TYPE_VAR_4
5766 #undef DEF_FUNCTION_TYPE_VAR_5
5767 #undef DEF_POINTER_TYPE
5771 typedef enum c_builtin_type builtin_type
;
5773 /* A temporary array used in communication with def_fn_type. */
5774 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5776 /* A helper function for install_builtin_types. Build function type
5777 for DEF with return type RET and N arguments. If VAR is true, then the
5778 function should be variadic after those N arguments.
5780 Takes special care not to ICE if any of the types involved are
5781 error_mark_node, which indicates that said type is not in fact available
5782 (see builtin_type_for_size). In which case the function type as a whole
5783 should be error_mark_node. */
5786 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5789 tree
*args
= XALLOCAVEC (tree
, n
);
5794 for (i
= 0; i
< n
; ++i
)
5796 builtin_type a
= (builtin_type
) va_arg (list
, int);
5797 t
= builtin_types
[a
];
5798 if (t
== error_mark_node
)
5803 t
= builtin_types
[ret
];
5804 if (t
== error_mark_node
)
5807 t
= build_varargs_function_type_array (t
, n
, args
);
5809 t
= build_function_type_array (t
, n
, args
);
5812 builtin_types
[def
] = t
;
5816 /* Build the builtin function types and install them in the builtin_types
5817 array for later use in builtin function decls. */
5820 install_builtin_function_types (void)
5822 tree va_list_ref_type_node
;
5823 tree va_list_arg_type_node
;
5825 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5827 va_list_arg_type_node
= va_list_ref_type_node
=
5828 build_pointer_type (TREE_TYPE (va_list_type_node
));
5832 va_list_arg_type_node
= va_list_type_node
;
5833 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5836 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5837 builtin_types[ENUM] = VALUE;
5838 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5839 def_fn_type (ENUM, RETURN, 0, 0);
5840 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5841 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5842 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5843 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5844 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5845 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5846 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5847 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5848 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5849 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5850 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5852 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5853 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5855 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5856 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5857 def_fn_type (ENUM, RETURN, 1, 0);
5858 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5859 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5860 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5861 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5862 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5863 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5864 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5865 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5866 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5867 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5868 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5869 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5871 #include "builtin-types.def"
5873 #undef DEF_PRIMITIVE_TYPE
5874 #undef DEF_FUNCTION_TYPE_1
5875 #undef DEF_FUNCTION_TYPE_2
5876 #undef DEF_FUNCTION_TYPE_3
5877 #undef DEF_FUNCTION_TYPE_4
5878 #undef DEF_FUNCTION_TYPE_5
5879 #undef DEF_FUNCTION_TYPE_6
5880 #undef DEF_FUNCTION_TYPE_VAR_0
5881 #undef DEF_FUNCTION_TYPE_VAR_1
5882 #undef DEF_FUNCTION_TYPE_VAR_2
5883 #undef DEF_FUNCTION_TYPE_VAR_3
5884 #undef DEF_FUNCTION_TYPE_VAR_4
5885 #undef DEF_FUNCTION_TYPE_VAR_5
5886 #undef DEF_POINTER_TYPE
5887 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5890 /* ----------------------------------------------------------------------- *
5891 * BUILTIN ATTRIBUTES *
5892 * ----------------------------------------------------------------------- */
5894 enum built_in_attribute
5896 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5897 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5898 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5899 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5900 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5901 #include "builtin-attrs.def"
5902 #undef DEF_ATTR_NULL_TREE
5904 #undef DEF_ATTR_STRING
5905 #undef DEF_ATTR_IDENT
5906 #undef DEF_ATTR_TREE_LIST
5910 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5913 install_builtin_attributes (void)
5915 /* Fill in the built_in_attributes array. */
5916 #define DEF_ATTR_NULL_TREE(ENUM) \
5917 built_in_attributes[(int) ENUM] = NULL_TREE;
5918 #define DEF_ATTR_INT(ENUM, VALUE) \
5919 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5920 #define DEF_ATTR_STRING(ENUM, VALUE) \
5921 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5922 #define DEF_ATTR_IDENT(ENUM, STRING) \
5923 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5924 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5925 built_in_attributes[(int) ENUM] \
5926 = tree_cons (built_in_attributes[(int) PURPOSE], \
5927 built_in_attributes[(int) VALUE], \
5928 built_in_attributes[(int) CHAIN]);
5929 #include "builtin-attrs.def"
5930 #undef DEF_ATTR_NULL_TREE
5932 #undef DEF_ATTR_STRING
5933 #undef DEF_ATTR_IDENT
5934 #undef DEF_ATTR_TREE_LIST
5937 /* Handle a "const" attribute; arguments as in
5938 struct attribute_spec.handler. */
5941 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5942 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5945 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5946 TREE_READONLY (*node
) = 1;
5948 *no_add_attrs
= true;
5953 /* Handle a "nothrow" attribute; arguments as in
5954 struct attribute_spec.handler. */
5957 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5958 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5961 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5962 TREE_NOTHROW (*node
) = 1;
5964 *no_add_attrs
= true;
5969 /* Handle a "pure" attribute; arguments as in
5970 struct attribute_spec.handler. */
5973 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5974 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5976 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5977 DECL_PURE_P (*node
) = 1;
5978 /* ??? TODO: Support types. */
5981 warning (OPT_Wattributes
, "%qs attribute ignored",
5982 IDENTIFIER_POINTER (name
));
5983 *no_add_attrs
= true;
5989 /* Handle a "no vops" attribute; arguments as in
5990 struct attribute_spec.handler. */
5993 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5994 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5995 bool *ARG_UNUSED (no_add_attrs
))
5997 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
5998 DECL_IS_NOVOPS (*node
) = 1;
6002 /* Helper for nonnull attribute handling; fetch the operand number
6003 from the attribute argument list. */
6006 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
6008 /* Verify the arg number is a constant. */
6009 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
6010 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
6013 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
6017 /* Handle the "nonnull" attribute. */
6019 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6020 tree args
, int ARG_UNUSED (flags
),
6024 unsigned HOST_WIDE_INT attr_arg_num
;
6026 /* If no arguments are specified, all pointer arguments should be
6027 non-null. Verify a full prototype is given so that the arguments
6028 will have the correct types when we actually check them later. */
6031 if (!prototype_p (type
))
6033 error ("nonnull attribute without arguments on a non-prototype");
6034 *no_add_attrs
= true;
6039 /* Argument list specified. Verify that each argument number references
6040 a pointer argument. */
6041 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
6043 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
6045 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
6047 error ("nonnull argument has invalid operand number (argument %lu)",
6048 (unsigned long) attr_arg_num
);
6049 *no_add_attrs
= true;
6053 if (prototype_p (type
))
6055 function_args_iterator iter
;
6058 function_args_iter_init (&iter
, type
);
6059 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
6061 argument
= function_args_iter_cond (&iter
);
6062 if (!argument
|| ck_num
== arg_num
)
6067 || TREE_CODE (argument
) == VOID_TYPE
)
6069 error ("nonnull argument with out-of-range operand number "
6070 "(argument %lu, operand %lu)",
6071 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6072 *no_add_attrs
= true;
6076 if (TREE_CODE (argument
) != POINTER_TYPE
)
6078 error ("nonnull argument references non-pointer operand "
6079 "(argument %lu, operand %lu)",
6080 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6081 *no_add_attrs
= true;
6090 /* Handle a "sentinel" attribute. */
6093 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
6094 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6096 if (!prototype_p (*node
))
6098 warning (OPT_Wattributes
,
6099 "%qs attribute requires prototypes with named arguments",
6100 IDENTIFIER_POINTER (name
));
6101 *no_add_attrs
= true;
6105 if (!stdarg_p (*node
))
6107 warning (OPT_Wattributes
,
6108 "%qs attribute only applies to variadic functions",
6109 IDENTIFIER_POINTER (name
));
6110 *no_add_attrs
= true;
6116 tree position
= TREE_VALUE (args
);
6118 if (TREE_CODE (position
) != INTEGER_CST
)
6120 warning (0, "requested position is not an integer constant");
6121 *no_add_attrs
= true;
6125 if (tree_int_cst_lt (position
, integer_zero_node
))
6127 warning (0, "requested position is less than zero");
6128 *no_add_attrs
= true;
6136 /* Handle a "noreturn" attribute; arguments as in
6137 struct attribute_spec.handler. */
6140 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6141 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6143 tree type
= TREE_TYPE (*node
);
6145 /* See FIXME comment in c_common_attribute_table. */
6146 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6147 TREE_THIS_VOLATILE (*node
) = 1;
6148 else if (TREE_CODE (type
) == POINTER_TYPE
6149 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
6151 = build_pointer_type
6152 (build_type_variant (TREE_TYPE (type
),
6153 TYPE_READONLY (TREE_TYPE (type
)), 1));
6156 warning (OPT_Wattributes
, "%qs attribute ignored",
6157 IDENTIFIER_POINTER (name
));
6158 *no_add_attrs
= true;
6164 /* Handle a "leaf" attribute; arguments as in
6165 struct attribute_spec.handler. */
6168 handle_leaf_attribute (tree
*node
, tree name
,
6169 tree
ARG_UNUSED (args
),
6170 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6172 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6174 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6175 *no_add_attrs
= true;
6177 if (!TREE_PUBLIC (*node
))
6179 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
6180 *no_add_attrs
= true;
6186 /* Handle a "malloc" attribute; arguments as in
6187 struct attribute_spec.handler. */
6190 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6191 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6193 if (TREE_CODE (*node
) == FUNCTION_DECL
6194 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
6195 DECL_IS_MALLOC (*node
) = 1;
6198 warning (OPT_Wattributes
, "%qs attribute ignored",
6199 IDENTIFIER_POINTER (name
));
6200 *no_add_attrs
= true;
6206 /* Fake handler for attributes we don't properly support. */
6209 fake_attribute_handler (tree
* ARG_UNUSED (node
),
6210 tree
ARG_UNUSED (name
),
6211 tree
ARG_UNUSED (args
),
6212 int ARG_UNUSED (flags
),
6213 bool * ARG_UNUSED (no_add_attrs
))
6218 /* Handle a "type_generic" attribute. */
6221 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6222 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6223 bool * ARG_UNUSED (no_add_attrs
))
6225 /* Ensure we have a function type. */
6226 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
6228 /* Ensure we have a variadic function. */
6229 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
6234 /* Handle a "vector_size" attribute; arguments as in
6235 struct attribute_spec.handler. */
6238 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
6239 int ARG_UNUSED (flags
),
6242 unsigned HOST_WIDE_INT vecsize
, nunits
;
6243 enum machine_mode orig_mode
;
6244 tree type
= *node
, new_type
, size
;
6246 *no_add_attrs
= true;
6248 size
= TREE_VALUE (args
);
6250 if (!host_integerp (size
, 1))
6252 warning (OPT_Wattributes
, "%qs attribute ignored",
6253 IDENTIFIER_POINTER (name
));
6257 /* Get the vector size (in bytes). */
6258 vecsize
= tree_low_cst (size
, 1);
6260 /* We need to provide for vector pointers, vector arrays, and
6261 functions returning vectors. For example:
6263 __attribute__((vector_size(16))) short *foo;
6265 In this case, the mode is SI, but the type being modified is
6266 HI, so we need to look further. */
6268 while (POINTER_TYPE_P (type
)
6269 || TREE_CODE (type
) == FUNCTION_TYPE
6270 || TREE_CODE (type
) == ARRAY_TYPE
)
6271 type
= TREE_TYPE (type
);
6273 /* Get the mode of the type being modified. */
6274 orig_mode
= TYPE_MODE (type
);
6276 if ((!INTEGRAL_TYPE_P (type
)
6277 && !SCALAR_FLOAT_TYPE_P (type
)
6278 && !FIXED_POINT_TYPE_P (type
))
6279 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
6280 && GET_MODE_CLASS (orig_mode
) != MODE_INT
6281 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
6282 || !host_integerp (TYPE_SIZE_UNIT (type
), 1)
6283 || TREE_CODE (type
) == BOOLEAN_TYPE
)
6285 error ("invalid vector type for attribute %qs",
6286 IDENTIFIER_POINTER (name
));
6290 if (vecsize
% tree_low_cst (TYPE_SIZE_UNIT (type
), 1))
6292 error ("vector size not an integral multiple of component size");
6298 error ("zero vector size");
6302 /* Calculate how many units fit in the vector. */
6303 nunits
= vecsize
/ tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
6304 if (nunits
& (nunits
- 1))
6306 error ("number of components of the vector not a power of two");
6310 new_type
= build_vector_type (type
, nunits
);
6312 /* Build back pointers if needed. */
6313 *node
= reconstruct_complex_type (*node
, new_type
);
6318 /* Handle a "vector_type" attribute; arguments as in
6319 struct attribute_spec.handler. */
6322 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6323 int ARG_UNUSED (flags
),
6326 /* Vector representative type and size. */
6327 tree rep_type
= *node
;
6328 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
6331 /* Vector size in bytes and number of units. */
6332 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
6334 /* Vector element type and mode. */
6336 enum machine_mode elem_mode
;
6338 *no_add_attrs
= true;
6340 /* Get the representative array type, possibly nested within a
6341 padding record e.g. for alignment purposes. */
6343 if (TYPE_IS_PADDING_P (rep_type
))
6344 rep_type
= TREE_TYPE (TYPE_FIELDS (rep_type
));
6346 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
6348 error ("attribute %qs applies to array types only",
6349 IDENTIFIER_POINTER (name
));
6353 /* Silently punt on variable sizes. We can't make vector types for them,
6354 need to ignore them on front-end generated subtypes of unconstrained
6355 bases, and this attribute is for binding implementors, not end-users, so
6356 we should never get there from legitimate explicit uses. */
6358 if (!host_integerp (rep_size
, 1))
6361 /* Get the element type/mode and check this is something we know
6362 how to make vectors of. */
6364 elem_type
= TREE_TYPE (rep_type
);
6365 elem_mode
= TYPE_MODE (elem_type
);
6367 if ((!INTEGRAL_TYPE_P (elem_type
)
6368 && !SCALAR_FLOAT_TYPE_P (elem_type
)
6369 && !FIXED_POINT_TYPE_P (elem_type
))
6370 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
6371 && GET_MODE_CLASS (elem_mode
) != MODE_INT
6372 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
6373 || !host_integerp (TYPE_SIZE_UNIT (elem_type
), 1))
6375 error ("invalid element type for attribute %qs",
6376 IDENTIFIER_POINTER (name
));
6380 /* Sanity check the vector size and element type consistency. */
6382 vec_bytes
= tree_low_cst (rep_size
, 1);
6384 if (vec_bytes
% tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1))
6386 error ("vector size not an integral multiple of component size");
6392 error ("zero vector size");
6396 vec_units
= vec_bytes
/ tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1);
6397 if (vec_units
& (vec_units
- 1))
6399 error ("number of components of the vector not a power of two");
6403 /* Build the vector type and replace. */
6405 *node
= build_vector_type (elem_type
, vec_units
);
6406 rep_name
= TYPE_NAME (rep_type
);
6407 if (TREE_CODE (rep_name
) == TYPE_DECL
)
6408 rep_name
= DECL_NAME (rep_name
);
6409 TYPE_NAME (*node
) = rep_name
;
6410 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
6415 /* ----------------------------------------------------------------------- *
6416 * BUILTIN FUNCTIONS *
6417 * ----------------------------------------------------------------------- */
6419 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6420 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6421 if nonansi_p and flag_no_nonansi_builtin. */
6424 def_builtin_1 (enum built_in_function fncode
,
6426 enum built_in_class fnclass
,
6427 tree fntype
, tree libtype
,
6428 bool both_p
, bool fallback_p
,
6429 bool nonansi_p ATTRIBUTE_UNUSED
,
6430 tree fnattrs
, bool implicit_p
)
6433 const char *libname
;
6435 /* Preserve an already installed decl. It most likely was setup in advance
6436 (e.g. as part of the internal builtins) for specific reasons. */
6437 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
6440 gcc_assert ((!both_p
&& !fallback_p
)
6441 || !strncmp (name
, "__builtin_",
6442 strlen ("__builtin_")));
6444 libname
= name
+ strlen ("__builtin_");
6445 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
6446 (fallback_p
? libname
: NULL
),
6449 /* ??? This is normally further controlled by command-line options
6450 like -fno-builtin, but we don't have them for Ada. */
6451 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
6454 set_builtin_decl (fncode
, decl
, implicit_p
);
6457 static int flag_isoc94
= 0;
6458 static int flag_isoc99
= 0;
6460 /* Install what the common builtins.def offers. */
6463 install_builtin_functions (void)
6465 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6466 NONANSI_P, ATTRS, IMPLICIT, COND) \
6468 def_builtin_1 (ENUM, NAME, CLASS, \
6469 builtin_types[(int) TYPE], \
6470 builtin_types[(int) LIBTYPE], \
6471 BOTH_P, FALLBACK_P, NONANSI_P, \
6472 built_in_attributes[(int) ATTRS], IMPLICIT);
6473 #include "builtins.def"
6477 /* ----------------------------------------------------------------------- *
6478 * BUILTIN FUNCTIONS *
6479 * ----------------------------------------------------------------------- */
6481 /* Install the builtin functions we might need. */
6484 gnat_install_builtins (void)
6486 install_builtin_elementary_types ();
6487 install_builtin_function_types ();
6488 install_builtin_attributes ();
6490 /* Install builtins used by generic middle-end pieces first. Some of these
6491 know about internal specificities and control attributes accordingly, for
6492 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6493 the generic definition from builtins.def. */
6494 build_common_builtin_nodes ();
6496 /* Now, install the target specific builtins, such as the AltiVec family on
6497 ppc, and the common set as exposed by builtins.def. */
6498 targetm
.init_builtins ();
6499 install_builtin_functions ();
6502 #include "gt-ada-utils.h"
6503 #include "gtype-ada.h"