utils.c (gnat_pushdecl): Set TYPE_CONTEXT for types attached to a TYPE_DECL.
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "diagnostic-core.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "diagnostic.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
46
47 #include "ada.h"
48 #include "types.h"
49 #include "atree.h"
50 #include "elists.h"
51 #include "namet.h"
52 #include "nlists.h"
53 #include "stringt.h"
54 #include "uintp.h"
55 #include "fe.h"
56 #include "sinfo.h"
57 #include "einfo.h"
58 #include "ada-tree.h"
59 #include "gigi.h"
60
61 /* If nonzero, pretend we are allocating at global level. */
62 int force_global;
63
64 /* The default alignment of "double" floating-point types, i.e. floating
65 point types whose size is equal to 64 bits, or 0 if this alignment is
66 not specifically capped. */
67 int double_float_alignment;
68
69 /* The default alignment of "double" or larger scalar types, i.e. scalar
70 types whose size is greater or equal to 64 bits, or 0 if this alignment
71 is not specifically capped. */
72 int double_scalar_alignment;
73
74 /* Tree nodes for the various types and decls we create. */
75 tree gnat_std_decls[(int) ADT_LAST];
76
77 /* Functions to call for each of the possible raise reasons. */
78 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
79
80 /* Likewise, but with extra info for each of the possible raise reasons. */
81 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
82
83 /* Forward declarations for handlers of attributes. */
84 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
85 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
96
97 /* Fake handler for attributes we don't properly support, typically because
98 they'd require dragging a lot of the common-c front-end circuitry. */
99 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
100
101 /* Table of machine-independent internal attributes for Ada. We support
102 this minimal set of attributes to accommodate the needs of builtins. */
103 const struct attribute_spec gnat_internal_attribute_table[] =
104 {
105 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
106 affects_type_identity } */
107 { "const", 0, 0, true, false, false, handle_const_attribute,
108 false },
109 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
110 false },
111 { "pure", 0, 0, true, false, false, handle_pure_attribute,
112 false },
113 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
114 false },
115 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
116 false },
117 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
118 false },
119 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
120 false },
121 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
122 false },
123 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
124 false },
125 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
126 false },
127
128 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
129 false },
130 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
131 false },
132 { "may_alias", 0, 0, false, true, false, NULL, false },
133
134 /* ??? format and format_arg are heavy and not supported, which actually
135 prevents support for stdio builtins, which we however declare as part
136 of the common builtins.def contents. */
137 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
138 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
139
140 { NULL, 0, 0, false, false, false, NULL, false }
141 };
142
143 /* Associates a GNAT tree node to a GCC tree node. It is used in
144 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
145 of `save_gnu_tree' for more info. */
146 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
147
148 #define GET_GNU_TREE(GNAT_ENTITY) \
149 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
150
151 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
152 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153
154 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
155 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156
157 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
158 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
159
160 #define GET_DUMMY_NODE(GNAT_ENTITY) \
161 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
162
163 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
164 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
165
166 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
167 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
168
169 /* This variable keeps a table for types for each precision so that we only
170 allocate each of them once. Signed and unsigned types are kept separate.
171
172 Note that these types are only used when fold-const requests something
173 special. Perhaps we should NOT share these types; we'll see how it
174 goes later. */
175 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
176
177 /* Likewise for float types, but record these by mode. */
178 static GTY(()) tree float_types[NUM_MACHINE_MODES];
179
180 /* For each binding contour we allocate a binding_level structure to indicate
181 the binding depth. */
182
183 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
184 /* The binding level containing this one (the enclosing binding level). */
185 struct gnat_binding_level *chain;
186 /* The BLOCK node for this level. */
187 tree block;
188 /* If nonzero, the setjmp buffer that needs to be updated for any
189 variable-sized definition within this context. */
190 tree jmpbuf_decl;
191 };
192
193 /* The binding level currently in effect. */
194 static GTY(()) struct gnat_binding_level *current_binding_level;
195
196 /* A chain of gnat_binding_level structures awaiting reuse. */
197 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
198
199 /* The context to be used for global declarations. */
200 static GTY(()) tree global_context;
201
202 /* An array of global declarations. */
203 static GTY(()) VEC(tree,gc) *global_decls;
204
205 /* An array of builtin function declarations. */
206 static GTY(()) VEC(tree,gc) *builtin_decls;
207
208 /* An array of global renaming pointers. */
209 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
210
211 /* A chain of unused BLOCK nodes. */
212 static GTY((deletable)) tree free_block_chain;
213
214 static int pad_type_hash_marked_p (const void *p);
215 static hashval_t pad_type_hash_hash (const void *p);
216 static int pad_type_hash_eq (const void *p1, const void *p2);
217
218 /* A hash table of padded types. It is modelled on the generic type
219 hash table in tree.c, which must thus be used as a reference. */
220 struct GTY(()) pad_type_hash {
221 unsigned long hash;
222 tree type;
223 };
224
225 static GTY ((if_marked ("pad_type_hash_marked_p"),
226 param_is (struct pad_type_hash)))
227 htab_t pad_type_hash_table;
228
229 static tree merge_sizes (tree, tree, tree, bool, bool);
230 static tree compute_related_constant (tree, tree);
231 static tree split_plus (tree, tree *);
232 static tree float_type_for_precision (int, enum machine_mode);
233 static tree convert_to_fat_pointer (tree, tree);
234 static bool potential_alignment_gap (tree, tree, tree);
235 static void process_attributes (tree, struct attrib *);
236 \f
237 /* Initialize data structures of the utils.c module. */
238
239 void
240 init_gnat_utils (void)
241 {
242 /* Initialize the association of GNAT nodes to GCC trees. */
243 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
244
245 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
246 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
247
248 /* Initialize the hash table of padded types. */
249 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
250 pad_type_hash_eq, 0);
251 }
252
253 /* Destroy data structures of the utils.c module. */
254
255 void
256 destroy_gnat_utils (void)
257 {
258 /* Destroy the association of GNAT nodes to GCC trees. */
259 ggc_free (associate_gnat_to_gnu);
260 associate_gnat_to_gnu = NULL;
261
262 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
263 ggc_free (dummy_node_table);
264 dummy_node_table = NULL;
265
266 /* Destroy the hash table of padded types. */
267 htab_delete (pad_type_hash_table);
268 pad_type_hash_table = NULL;
269
270 /* Invalidate the global renaming pointers. */
271 invalidate_global_renaming_pointers ();
272 }
273 \f
274 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
275 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
276 If NO_CHECK is true, the latter check is suppressed.
277
278 If GNU_DECL is zero, reset a previous association. */
279
280 void
281 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
282 {
283 /* Check that GNAT_ENTITY is not already defined and that it is being set
284 to something which is a decl. If that is not the case, this usually
285 means GNAT_ENTITY is defined twice, but occasionally is due to some
286 Gigi problem. */
287 gcc_assert (!(gnu_decl
288 && (PRESENT_GNU_TREE (gnat_entity)
289 || (!no_check && !DECL_P (gnu_decl)))));
290
291 SET_GNU_TREE (gnat_entity, gnu_decl);
292 }
293
294 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
295 that was associated with it. If there is no such tree node, abort.
296
297 In some cases, such as delayed elaboration or expressions that need to
298 be elaborated only once, GNAT_ENTITY is really not an entity. */
299
300 tree
301 get_gnu_tree (Entity_Id gnat_entity)
302 {
303 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
304 return GET_GNU_TREE (gnat_entity);
305 }
306
307 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
308
309 bool
310 present_gnu_tree (Entity_Id gnat_entity)
311 {
312 return PRESENT_GNU_TREE (gnat_entity);
313 }
314 \f
315 /* Make a dummy type corresponding to GNAT_TYPE. */
316
317 tree
318 make_dummy_type (Entity_Id gnat_type)
319 {
320 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
321 tree gnu_type;
322
323 /* If there is an equivalent type, get its underlying type. */
324 if (Present (gnat_underlying))
325 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
326
327 /* If there was no equivalent type (can only happen when just annotating
328 types) or underlying type, go back to the original type. */
329 if (No (gnat_underlying))
330 gnat_underlying = gnat_type;
331
332 /* If it there already a dummy type, use that one. Else make one. */
333 if (PRESENT_DUMMY_NODE (gnat_underlying))
334 return GET_DUMMY_NODE (gnat_underlying);
335
336 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
337 an ENUMERAL_TYPE. */
338 gnu_type = make_node (Is_Record_Type (gnat_underlying)
339 ? tree_code_for_record_type (gnat_underlying)
340 : ENUMERAL_TYPE);
341 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
342 TYPE_DUMMY_P (gnu_type) = 1;
343 TYPE_STUB_DECL (gnu_type)
344 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
345 if (Is_By_Reference_Type (gnat_underlying))
346 TYPE_BY_REFERENCE_P (gnu_type) = 1;
347
348 SET_DUMMY_NODE (gnat_underlying, gnu_type);
349
350 return gnu_type;
351 }
352
353 /* Return the dummy type that was made for GNAT_TYPE, if any. */
354
355 tree
356 get_dummy_type (Entity_Id gnat_type)
357 {
358 return GET_DUMMY_NODE (gnat_type);
359 }
360
361 /* Build dummy fat and thin pointer types whose designated type is specified
362 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
363
364 void
365 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
366 {
367 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
368 tree gnu_fat_type, fields, gnu_object_type;
369
370 gnu_template_type = make_node (RECORD_TYPE);
371 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
372 TYPE_DUMMY_P (gnu_template_type) = 1;
373 gnu_ptr_template = build_pointer_type (gnu_template_type);
374
375 gnu_array_type = make_node (ENUMERAL_TYPE);
376 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
377 TYPE_DUMMY_P (gnu_array_type) = 1;
378 gnu_ptr_array = build_pointer_type (gnu_array_type);
379
380 gnu_fat_type = make_node (RECORD_TYPE);
381 /* Build a stub DECL to trigger the special processing for fat pointer types
382 in gnat_pushdecl. */
383 TYPE_NAME (gnu_fat_type)
384 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
385 gnu_fat_type);
386 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
387 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
388 DECL_CHAIN (fields)
389 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
390 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
391 finish_fat_pointer_type (gnu_fat_type, fields);
392 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
393 /* Suppress debug info until after the type is completed. */
394 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
395
396 gnu_object_type = make_node (RECORD_TYPE);
397 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
398 TYPE_DUMMY_P (gnu_object_type) = 1;
399
400 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
401 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
402 }
403 \f
404 /* Return true if we are in the global binding level. */
405
406 bool
407 global_bindings_p (void)
408 {
409 return force_global || current_function_decl == NULL_TREE;
410 }
411
412 /* Enter a new binding level. */
413
414 void
415 gnat_pushlevel (void)
416 {
417 struct gnat_binding_level *newlevel = NULL;
418
419 /* Reuse a struct for this binding level, if there is one. */
420 if (free_binding_level)
421 {
422 newlevel = free_binding_level;
423 free_binding_level = free_binding_level->chain;
424 }
425 else
426 newlevel = ggc_alloc_gnat_binding_level ();
427
428 /* Use a free BLOCK, if any; otherwise, allocate one. */
429 if (free_block_chain)
430 {
431 newlevel->block = free_block_chain;
432 free_block_chain = BLOCK_CHAIN (free_block_chain);
433 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
434 }
435 else
436 newlevel->block = make_node (BLOCK);
437
438 /* Point the BLOCK we just made to its parent. */
439 if (current_binding_level)
440 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
441
442 BLOCK_VARS (newlevel->block) = NULL_TREE;
443 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
444 TREE_USED (newlevel->block) = 1;
445
446 /* Add this level to the front of the chain (stack) of active levels. */
447 newlevel->chain = current_binding_level;
448 newlevel->jmpbuf_decl = NULL_TREE;
449 current_binding_level = newlevel;
450 }
451
452 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
453 and point FNDECL to this BLOCK. */
454
455 void
456 set_current_block_context (tree fndecl)
457 {
458 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
459 DECL_INITIAL (fndecl) = current_binding_level->block;
460 set_block_for_group (current_binding_level->block);
461 }
462
463 /* Set the jmpbuf_decl for the current binding level to DECL. */
464
465 void
466 set_block_jmpbuf_decl (tree decl)
467 {
468 current_binding_level->jmpbuf_decl = decl;
469 }
470
471 /* Get the jmpbuf_decl, if any, for the current binding level. */
472
473 tree
474 get_block_jmpbuf_decl (void)
475 {
476 return current_binding_level->jmpbuf_decl;
477 }
478
479 /* Exit a binding level. Set any BLOCK into the current code group. */
480
481 void
482 gnat_poplevel (void)
483 {
484 struct gnat_binding_level *level = current_binding_level;
485 tree block = level->block;
486
487 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
488 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
489
490 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
491 are no variables free the block and merge its subblocks into those of its
492 parent block. Otherwise, add it to the list of its parent. */
493 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
494 ;
495 else if (BLOCK_VARS (block) == NULL_TREE)
496 {
497 BLOCK_SUBBLOCKS (level->chain->block)
498 = block_chainon (BLOCK_SUBBLOCKS (block),
499 BLOCK_SUBBLOCKS (level->chain->block));
500 BLOCK_CHAIN (block) = free_block_chain;
501 free_block_chain = block;
502 }
503 else
504 {
505 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
506 BLOCK_SUBBLOCKS (level->chain->block) = block;
507 TREE_USED (block) = 1;
508 set_block_for_group (block);
509 }
510
511 /* Free this binding structure. */
512 current_binding_level = level->chain;
513 level->chain = free_binding_level;
514 free_binding_level = level;
515 }
516
517 /* Exit a binding level and discard the associated BLOCK. */
518
519 void
520 gnat_zaplevel (void)
521 {
522 struct gnat_binding_level *level = current_binding_level;
523 tree block = level->block;
524
525 BLOCK_CHAIN (block) = free_block_chain;
526 free_block_chain = block;
527
528 /* Free this binding structure. */
529 current_binding_level = level->chain;
530 level->chain = free_binding_level;
531 free_binding_level = level;
532 }
533 \f
534 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
535 for location information and flag propagation. */
536
537 void
538 gnat_pushdecl (tree decl, Node_Id gnat_node)
539 {
540 /* If DECL is public external or at top level, it has global context. */
541 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
542 {
543 if (!global_context)
544 global_context = build_translation_unit_decl (NULL_TREE);
545 DECL_CONTEXT (decl) = global_context;
546 }
547 else
548 {
549 DECL_CONTEXT (decl) = current_function_decl;
550
551 /* Functions imported in another function are not really nested.
552 For really nested functions mark them initially as needing
553 a static chain for uses of that flag before unnesting;
554 lower_nested_functions will then recompute it. */
555 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
556 DECL_STATIC_CHAIN (decl) = 1;
557 }
558
559 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
560
561 /* Set the location of DECL and emit a declaration for it. */
562 if (Present (gnat_node))
563 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
564
565 add_decl_expr (decl, gnat_node);
566
567 /* Put the declaration on the list. The list of declarations is in reverse
568 order. The list will be reversed later. Put global declarations in the
569 globals list and local ones in the current block. But skip TYPE_DECLs
570 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
571 with the debugger and aren't needed anyway. */
572 if (!(TREE_CODE (decl) == TYPE_DECL
573 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
574 {
575 if (DECL_EXTERNAL (decl))
576 {
577 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
578 VEC_safe_push (tree, gc, builtin_decls, decl);
579 }
580 else if (global_bindings_p ())
581 VEC_safe_push (tree, gc, global_decls, decl);
582 else
583 {
584 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
585 BLOCK_VARS (current_binding_level->block) = decl;
586 }
587 }
588
589 /* For the declaration of a type, set its name if it either is not already
590 set or if the previous type name was not derived from a source name.
591 We'd rather have the type named with a real name and all the pointer
592 types to the same object have the same POINTER_TYPE node. Code in the
593 equivalent function of c-decl.c makes a copy of the type node here, but
594 that may cause us trouble with incomplete types. We make an exception
595 for fat pointer types because the compiler automatically builds them
596 for unconstrained array types and the debugger uses them to represent
597 both these and pointers to these. */
598 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
599 {
600 tree t = TREE_TYPE (decl);
601
602 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
603 {
604 /* Array and pointer types aren't "tagged" types so we force the
605 type to be associated with its typedef in the DWARF back-end,
606 in order to make sure that the latter is always preserved. */
607 if (!DECL_ARTIFICIAL (decl)
608 && (TREE_CODE (t) == ARRAY_TYPE
609 || TREE_CODE (t) == POINTER_TYPE))
610 {
611 tree tt = build_distinct_type_copy (t);
612 if (TREE_CODE (t) == POINTER_TYPE)
613 TYPE_NEXT_PTR_TO (t) = tt;
614 TYPE_NAME (tt) = DECL_NAME (decl);
615 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
616 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
617 DECL_ORIGINAL_TYPE (decl) = tt;
618 }
619 }
620 else if (TYPE_IS_FAT_POINTER_P (t))
621 {
622 /* We need a variant for the placeholder machinery to work. */
623 tree tt = build_variant_type_copy (t);
624 TYPE_NAME (tt) = decl;
625 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
626 TREE_USED (tt) = TREE_USED (t);
627 TREE_TYPE (decl) = tt;
628 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
629 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
630 else
631 DECL_ORIGINAL_TYPE (decl) = t;
632 DECL_ARTIFICIAL (decl) = 0;
633 t = NULL_TREE;
634 }
635 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
636 ;
637 else
638 t = NULL_TREE;
639
640 /* Propagate the name to all the anonymous variants. This is needed
641 for the type qualifiers machinery to work properly. */
642 if (t)
643 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
644 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
645 {
646 TYPE_NAME (t) = decl;
647 TYPE_CONTEXT (t) = DECL_CONTEXT (decl);
648 }
649 }
650 }
651 \f
652 /* Create a record type that contains a SIZE bytes long field of TYPE with a
653 starting bit position so that it is aligned to ALIGN bits, and leaving at
654 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
655 record is guaranteed to get. */
656
657 tree
658 make_aligning_type (tree type, unsigned int align, tree size,
659 unsigned int base_align, int room)
660 {
661 /* We will be crafting a record type with one field at a position set to be
662 the next multiple of ALIGN past record'address + room bytes. We use a
663 record placeholder to express record'address. */
664 tree record_type = make_node (RECORD_TYPE);
665 tree record = build0 (PLACEHOLDER_EXPR, record_type);
666
667 tree record_addr_st
668 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
669
670 /* The diagram below summarizes the shape of what we manipulate:
671
672 <--------- pos ---------->
673 { +------------+-------------+-----------------+
674 record =>{ |############| ... | field (type) |
675 { +------------+-------------+-----------------+
676 |<-- room -->|<- voffset ->|<---- size ----->|
677 o o
678 | |
679 record_addr vblock_addr
680
681 Every length is in sizetype bytes there, except "pos" which has to be
682 set as a bit position in the GCC tree for the record. */
683 tree room_st = size_int (room);
684 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
685 tree voffset_st, pos, field;
686
687 tree name = TYPE_NAME (type);
688
689 if (TREE_CODE (name) == TYPE_DECL)
690 name = DECL_NAME (name);
691 name = concat_name (name, "ALIGN");
692 TYPE_NAME (record_type) = name;
693
694 /* Compute VOFFSET and then POS. The next byte position multiple of some
695 alignment after some address is obtained by "and"ing the alignment minus
696 1 with the two's complement of the address. */
697 voffset_st = size_binop (BIT_AND_EXPR,
698 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
699 size_int ((align / BITS_PER_UNIT) - 1));
700
701 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
702 pos = size_binop (MULT_EXPR,
703 convert (bitsizetype,
704 size_binop (PLUS_EXPR, room_st, voffset_st)),
705 bitsize_unit_node);
706
707 /* Craft the GCC record representation. We exceptionally do everything
708 manually here because 1) our generic circuitry is not quite ready to
709 handle the complex position/size expressions we are setting up, 2) we
710 have a strong simplifying factor at hand: we know the maximum possible
711 value of voffset, and 3) we have to set/reset at least the sizes in
712 accordance with this maximum value anyway, as we need them to convey
713 what should be "alloc"ated for this type.
714
715 Use -1 as the 'addressable' indication for the field to prevent the
716 creation of a bitfield. We don't need one, it would have damaging
717 consequences on the alignment computation, and create_field_decl would
718 make one without this special argument, for instance because of the
719 complex position expression. */
720 field = create_field_decl (get_identifier ("F"), type, record_type, size,
721 pos, 1, -1);
722 TYPE_FIELDS (record_type) = field;
723
724 TYPE_ALIGN (record_type) = base_align;
725 TYPE_USER_ALIGN (record_type) = 1;
726
727 TYPE_SIZE (record_type)
728 = size_binop (PLUS_EXPR,
729 size_binop (MULT_EXPR, convert (bitsizetype, size),
730 bitsize_unit_node),
731 bitsize_int (align + room * BITS_PER_UNIT));
732 TYPE_SIZE_UNIT (record_type)
733 = size_binop (PLUS_EXPR, size,
734 size_int (room + align / BITS_PER_UNIT));
735
736 SET_TYPE_MODE (record_type, BLKmode);
737 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
738
739 /* Declare it now since it will never be declared otherwise. This is
740 necessary to ensure that its subtrees are properly marked. */
741 create_type_decl (name, record_type, NULL, true, false, Empty);
742
743 return record_type;
744 }
745
746 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
747 as the field type of a packed record if IN_RECORD is true, or as the
748 component type of a packed array if IN_RECORD is false. See if we can
749 rewrite it either as a type that has a non-BLKmode, which we can pack
750 tighter in the packed record case, or as a smaller type. If so, return
751 the new type. If not, return the original type. */
752
753 tree
754 make_packable_type (tree type, bool in_record)
755 {
756 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
757 unsigned HOST_WIDE_INT new_size;
758 tree new_type, old_field, field_list = NULL_TREE;
759 unsigned int align;
760
761 /* No point in doing anything if the size is zero. */
762 if (size == 0)
763 return type;
764
765 new_type = make_node (TREE_CODE (type));
766
767 /* Copy the name and flags from the old type to that of the new.
768 Note that we rely on the pointer equality created here for
769 TYPE_NAME to look through conversions in various places. */
770 TYPE_NAME (new_type) = TYPE_NAME (type);
771 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
772 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
773 if (TREE_CODE (type) == RECORD_TYPE)
774 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
775
776 /* If we are in a record and have a small size, set the alignment to
777 try for an integral mode. Otherwise set it to try for a smaller
778 type with BLKmode. */
779 if (in_record && size <= MAX_FIXED_MODE_SIZE)
780 {
781 align = ceil_pow2 (size);
782 TYPE_ALIGN (new_type) = align;
783 new_size = (size + align - 1) & -align;
784 }
785 else
786 {
787 unsigned HOST_WIDE_INT align;
788
789 /* Do not try to shrink the size if the RM size is not constant. */
790 if (TYPE_CONTAINS_TEMPLATE_P (type)
791 || !host_integerp (TYPE_ADA_SIZE (type), 1))
792 return type;
793
794 /* Round the RM size up to a unit boundary to get the minimal size
795 for a BLKmode record. Give up if it's already the size. */
796 new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
797 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
798 if (new_size == size)
799 return type;
800
801 align = new_size & -new_size;
802 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
803 }
804
805 TYPE_USER_ALIGN (new_type) = 1;
806
807 /* Now copy the fields, keeping the position and size as we don't want
808 to change the layout by propagating the packedness downwards. */
809 for (old_field = TYPE_FIELDS (type); old_field;
810 old_field = DECL_CHAIN (old_field))
811 {
812 tree new_field_type = TREE_TYPE (old_field);
813 tree new_field, new_size;
814
815 if (RECORD_OR_UNION_TYPE_P (new_field_type)
816 && !TYPE_FAT_POINTER_P (new_field_type)
817 && host_integerp (TYPE_SIZE (new_field_type), 1))
818 new_field_type = make_packable_type (new_field_type, true);
819
820 /* However, for the last field in a not already packed record type
821 that is of an aggregate type, we need to use the RM size in the
822 packable version of the record type, see finish_record_type. */
823 if (!DECL_CHAIN (old_field)
824 && !TYPE_PACKED (type)
825 && RECORD_OR_UNION_TYPE_P (new_field_type)
826 && !TYPE_FAT_POINTER_P (new_field_type)
827 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
828 && TYPE_ADA_SIZE (new_field_type))
829 new_size = TYPE_ADA_SIZE (new_field_type);
830 else
831 new_size = DECL_SIZE (old_field);
832
833 new_field
834 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
835 new_size, bit_position (old_field),
836 TYPE_PACKED (type),
837 !DECL_NONADDRESSABLE_P (old_field));
838
839 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
840 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
841 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
842 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
843
844 DECL_CHAIN (new_field) = field_list;
845 field_list = new_field;
846 }
847
848 finish_record_type (new_type, nreverse (field_list), 2, false);
849 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
850 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
851 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
852
853 /* If this is a padding record, we never want to make the size smaller
854 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
855 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
856 {
857 TYPE_SIZE (new_type) = TYPE_SIZE (type);
858 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
859 new_size = size;
860 }
861 else
862 {
863 TYPE_SIZE (new_type) = bitsize_int (new_size);
864 TYPE_SIZE_UNIT (new_type)
865 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
866 }
867
868 if (!TYPE_CONTAINS_TEMPLATE_P (type))
869 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
870
871 compute_record_mode (new_type);
872
873 /* Try harder to get a packable type if necessary, for example
874 in case the record itself contains a BLKmode field. */
875 if (in_record && TYPE_MODE (new_type) == BLKmode)
876 SET_TYPE_MODE (new_type,
877 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
878
879 /* If neither the mode nor the size has shrunk, return the old type. */
880 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
881 return type;
882
883 return new_type;
884 }
885
886 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
887 If TYPE is the best type, return it. Otherwise, make a new type. We
888 only support new integral and pointer types. FOR_BIASED is true if
889 we are making a biased type. */
890
891 tree
892 make_type_from_size (tree type, tree size_tree, bool for_biased)
893 {
894 unsigned HOST_WIDE_INT size;
895 bool biased_p;
896 tree new_type;
897
898 /* If size indicates an error, just return TYPE to avoid propagating
899 the error. Likewise if it's too large to represent. */
900 if (!size_tree || !host_integerp (size_tree, 1))
901 return type;
902
903 size = tree_low_cst (size_tree, 1);
904
905 switch (TREE_CODE (type))
906 {
907 case INTEGER_TYPE:
908 case ENUMERAL_TYPE:
909 case BOOLEAN_TYPE:
910 biased_p = (TREE_CODE (type) == INTEGER_TYPE
911 && TYPE_BIASED_REPRESENTATION_P (type));
912
913 /* Integer types with precision 0 are forbidden. */
914 if (size == 0)
915 size = 1;
916
917 /* Only do something if the type isn't a packed array type and doesn't
918 already have the proper size and the size isn't too large. */
919 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
920 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
921 || size > LONG_LONG_TYPE_SIZE)
922 break;
923
924 biased_p |= for_biased;
925 if (TYPE_UNSIGNED (type) || biased_p)
926 new_type = make_unsigned_type (size);
927 else
928 new_type = make_signed_type (size);
929 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
930 SET_TYPE_RM_MIN_VALUE (new_type,
931 convert (TREE_TYPE (new_type),
932 TYPE_MIN_VALUE (type)));
933 SET_TYPE_RM_MAX_VALUE (new_type,
934 convert (TREE_TYPE (new_type),
935 TYPE_MAX_VALUE (type)));
936 /* Copy the name to show that it's essentially the same type and
937 not a subrange type. */
938 TYPE_NAME (new_type) = TYPE_NAME (type);
939 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
940 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
941 return new_type;
942
943 case RECORD_TYPE:
944 /* Do something if this is a fat pointer, in which case we
945 may need to return the thin pointer. */
946 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
947 {
948 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
949 if (!targetm.valid_pointer_mode (p_mode))
950 p_mode = ptr_mode;
951 return
952 build_pointer_type_for_mode
953 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
954 p_mode, 0);
955 }
956 break;
957
958 case POINTER_TYPE:
959 /* Only do something if this is a thin pointer, in which case we
960 may need to return the fat pointer. */
961 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
962 return
963 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
964 break;
965
966 default:
967 break;
968 }
969
970 return type;
971 }
972
973 /* See if the data pointed to by the hash table slot is marked. */
974
975 static int
976 pad_type_hash_marked_p (const void *p)
977 {
978 const_tree const type = ((const struct pad_type_hash *) p)->type;
979
980 return ggc_marked_p (type);
981 }
982
983 /* Return the cached hash value. */
984
985 static hashval_t
986 pad_type_hash_hash (const void *p)
987 {
988 return ((const struct pad_type_hash *) p)->hash;
989 }
990
991 /* Return 1 iff the padded types are equivalent. */
992
993 static int
994 pad_type_hash_eq (const void *p1, const void *p2)
995 {
996 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
997 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
998 tree type1, type2;
999
1000 if (t1->hash != t2->hash)
1001 return 0;
1002
1003 type1 = t1->type;
1004 type2 = t2->type;
1005
1006 /* We consider that the padded types are equivalent if they pad the same
1007 type and have the same size, alignment and RM size. Taking the mode
1008 into account is redundant since it is determined by the others. */
1009 return
1010 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1011 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1012 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1013 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1014 }
1015
1016 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1017 if needed. We have already verified that SIZE and TYPE are large enough.
1018 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1019 IS_COMPONENT_TYPE is true if this is being done for the component type of
1020 an array. IS_USER_TYPE is true if the original type needs to be completed.
1021 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1022 the RM size of the resulting type is to be set to SIZE too. */
1023
1024 tree
1025 maybe_pad_type (tree type, tree size, unsigned int align,
1026 Entity_Id gnat_entity, bool is_component_type,
1027 bool is_user_type, bool definition, bool set_rm_size)
1028 {
1029 tree orig_size = TYPE_SIZE (type);
1030 tree record, field;
1031
1032 /* If TYPE is a padded type, see if it agrees with any size and alignment
1033 we were given. If so, return the original type. Otherwise, strip
1034 off the padding, since we will either be returning the inner type
1035 or repadding it. If no size or alignment is specified, use that of
1036 the original padded type. */
1037 if (TYPE_IS_PADDING_P (type))
1038 {
1039 if ((!size
1040 || operand_equal_p (round_up (size,
1041 MAX (align, TYPE_ALIGN (type))),
1042 round_up (TYPE_SIZE (type),
1043 MAX (align, TYPE_ALIGN (type))),
1044 0))
1045 && (align == 0 || align == TYPE_ALIGN (type)))
1046 return type;
1047
1048 if (!size)
1049 size = TYPE_SIZE (type);
1050 if (align == 0)
1051 align = TYPE_ALIGN (type);
1052
1053 type = TREE_TYPE (TYPE_FIELDS (type));
1054 orig_size = TYPE_SIZE (type);
1055 }
1056
1057 /* If the size is either not being changed or is being made smaller (which
1058 is not done here and is only valid for bitfields anyway), show the size
1059 isn't changing. Likewise, clear the alignment if it isn't being
1060 changed. Then return if we aren't doing anything. */
1061 if (size
1062 && (operand_equal_p (size, orig_size, 0)
1063 || (TREE_CODE (orig_size) == INTEGER_CST
1064 && tree_int_cst_lt (size, orig_size))))
1065 size = NULL_TREE;
1066
1067 if (align == TYPE_ALIGN (type))
1068 align = 0;
1069
1070 if (align == 0 && !size)
1071 return type;
1072
1073 /* If requested, complete the original type and give it a name. */
1074 if (is_user_type)
1075 create_type_decl (get_entity_name (gnat_entity), type,
1076 NULL, !Comes_From_Source (gnat_entity),
1077 !(TYPE_NAME (type)
1078 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1079 && DECL_IGNORED_P (TYPE_NAME (type))),
1080 gnat_entity);
1081
1082 /* We used to modify the record in place in some cases, but that could
1083 generate incorrect debugging information. So make a new record
1084 type and name. */
1085 record = make_node (RECORD_TYPE);
1086 TYPE_PADDING_P (record) = 1;
1087
1088 if (Present (gnat_entity))
1089 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1090
1091 TYPE_ALIGN (record) = align;
1092 TYPE_SIZE (record) = size ? size : orig_size;
1093 TYPE_SIZE_UNIT (record)
1094 = convert (sizetype,
1095 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1096 bitsize_unit_node));
1097
1098 /* If we are changing the alignment and the input type is a record with
1099 BLKmode and a small constant size, try to make a form that has an
1100 integral mode. This might allow the padding record to also have an
1101 integral mode, which will be much more efficient. There is no point
1102 in doing so if a size is specified unless it is also a small constant
1103 size and it is incorrect to do so if we cannot guarantee that the mode
1104 will be naturally aligned since the field must always be addressable.
1105
1106 ??? This might not always be a win when done for a stand-alone object:
1107 since the nominal and the effective type of the object will now have
1108 different modes, a VIEW_CONVERT_EXPR will be required for converting
1109 between them and it might be hard to overcome afterwards, including
1110 at the RTL level when the stand-alone object is accessed as a whole. */
1111 if (align != 0
1112 && RECORD_OR_UNION_TYPE_P (type)
1113 && TYPE_MODE (type) == BLKmode
1114 && !TYPE_BY_REFERENCE_P (type)
1115 && TREE_CODE (orig_size) == INTEGER_CST
1116 && !TREE_OVERFLOW (orig_size)
1117 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1118 && (!size
1119 || (TREE_CODE (size) == INTEGER_CST
1120 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1121 {
1122 tree packable_type = make_packable_type (type, true);
1123 if (TYPE_MODE (packable_type) != BLKmode
1124 && align >= TYPE_ALIGN (packable_type))
1125 type = packable_type;
1126 }
1127
1128 /* Now create the field with the original size. */
1129 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1130 bitsize_zero_node, 0, 1);
1131 DECL_INTERNAL_P (field) = 1;
1132
1133 /* Do not emit debug info until after the auxiliary record is built. */
1134 finish_record_type (record, field, 1, false);
1135
1136 /* Set the RM size if requested. */
1137 if (set_rm_size)
1138 {
1139 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1140
1141 /* If the padded type is complete and has constant size, we canonicalize
1142 it by means of the hash table. This is consistent with the language
1143 semantics and ensures that gigi and the middle-end have a common view
1144 of these padded types. */
1145 if (TREE_CONSTANT (TYPE_SIZE (record)))
1146 {
1147 hashval_t hashcode;
1148 struct pad_type_hash in, *h;
1149 void **loc;
1150
1151 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1152 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1153 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1154 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1155
1156 in.hash = hashcode;
1157 in.type = record;
1158 h = (struct pad_type_hash *)
1159 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1160 if (h)
1161 {
1162 record = h->type;
1163 goto built;
1164 }
1165
1166 h = ggc_alloc_pad_type_hash ();
1167 h->hash = hashcode;
1168 h->type = record;
1169 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1170 INSERT);
1171 *loc = (void *)h;
1172 }
1173 }
1174
1175 /* Unless debugging information isn't being written for the input type,
1176 write a record that shows what we are a subtype of and also make a
1177 variable that indicates our size, if still variable. */
1178 if (TREE_CODE (orig_size) != INTEGER_CST
1179 && TYPE_NAME (record)
1180 && TYPE_NAME (type)
1181 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1182 && DECL_IGNORED_P (TYPE_NAME (type))))
1183 {
1184 tree marker = make_node (RECORD_TYPE);
1185 tree name = TYPE_NAME (record);
1186 tree orig_name = TYPE_NAME (type);
1187
1188 if (TREE_CODE (name) == TYPE_DECL)
1189 name = DECL_NAME (name);
1190
1191 if (TREE_CODE (orig_name) == TYPE_DECL)
1192 orig_name = DECL_NAME (orig_name);
1193
1194 TYPE_NAME (marker) = concat_name (name, "XVS");
1195 finish_record_type (marker,
1196 create_field_decl (orig_name,
1197 build_reference_type (type),
1198 marker, NULL_TREE, NULL_TREE,
1199 0, 0),
1200 0, true);
1201
1202 add_parallel_type (record, marker);
1203
1204 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1205 TYPE_SIZE_UNIT (marker)
1206 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1207 TYPE_SIZE_UNIT (record), false, false, false,
1208 false, NULL, gnat_entity);
1209 }
1210
1211 rest_of_record_type_compilation (record);
1212
1213 built:
1214 /* If the size was widened explicitly, maybe give a warning. Take the
1215 original size as the maximum size of the input if there was an
1216 unconstrained record involved and round it up to the specified alignment,
1217 if one was specified. But don't do it if we are just annotating types
1218 and the type is tagged, since tagged types aren't fully laid out in this
1219 mode. */
1220 if (!size
1221 || TREE_CODE (size) == COND_EXPR
1222 || TREE_CODE (size) == MAX_EXPR
1223 || No (gnat_entity)
1224 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1225 return record;
1226
1227 if (CONTAINS_PLACEHOLDER_P (orig_size))
1228 orig_size = max_size (orig_size, true);
1229
1230 if (align)
1231 orig_size = round_up (orig_size, align);
1232
1233 if (!operand_equal_p (size, orig_size, 0)
1234 && !(TREE_CODE (size) == INTEGER_CST
1235 && TREE_CODE (orig_size) == INTEGER_CST
1236 && (TREE_OVERFLOW (size)
1237 || TREE_OVERFLOW (orig_size)
1238 || tree_int_cst_lt (size, orig_size))))
1239 {
1240 Node_Id gnat_error_node = Empty;
1241
1242 if (Is_Packed_Array_Type (gnat_entity))
1243 gnat_entity = Original_Array_Type (gnat_entity);
1244
1245 if ((Ekind (gnat_entity) == E_Component
1246 || Ekind (gnat_entity) == E_Discriminant)
1247 && Present (Component_Clause (gnat_entity)))
1248 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1249 else if (Present (Size_Clause (gnat_entity)))
1250 gnat_error_node = Expression (Size_Clause (gnat_entity));
1251
1252 /* Generate message only for entities that come from source, since
1253 if we have an entity created by expansion, the message will be
1254 generated for some other corresponding source entity. */
1255 if (Comes_From_Source (gnat_entity))
1256 {
1257 if (Present (gnat_error_node))
1258 post_error_ne_tree ("{^ }bits of & unused?",
1259 gnat_error_node, gnat_entity,
1260 size_diffop (size, orig_size));
1261 else if (is_component_type)
1262 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1263 gnat_entity, gnat_entity,
1264 size_diffop (size, orig_size));
1265 }
1266 }
1267
1268 return record;
1269 }
1270 \f
1271 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1272 If this is a multi-dimensional array type, do this recursively.
1273
1274 OP may be
1275 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1276 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1277 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1278
1279 void
1280 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1281 {
1282 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1283 of a one-dimensional array, since the padding has the same alias set
1284 as the field type, but if it's a multi-dimensional array, we need to
1285 see the inner types. */
1286 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1287 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1288 || TYPE_PADDING_P (gnu_old_type)))
1289 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1290
1291 /* Unconstrained array types are deemed incomplete and would thus be given
1292 alias set 0. Retrieve the underlying array type. */
1293 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1294 gnu_old_type
1295 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1296 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1297 gnu_new_type
1298 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1299
1300 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1301 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1302 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1303 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1304
1305 switch (op)
1306 {
1307 case ALIAS_SET_COPY:
1308 /* The alias set shouldn't be copied between array types with different
1309 aliasing settings because this can break the aliasing relationship
1310 between the array type and its element type. */
1311 #ifndef ENABLE_CHECKING
1312 if (flag_strict_aliasing)
1313 #endif
1314 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1315 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1316 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1317 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1318
1319 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1320 break;
1321
1322 case ALIAS_SET_SUBSET:
1323 case ALIAS_SET_SUPERSET:
1324 {
1325 alias_set_type old_set = get_alias_set (gnu_old_type);
1326 alias_set_type new_set = get_alias_set (gnu_new_type);
1327
1328 /* Do nothing if the alias sets conflict. This ensures that we
1329 never call record_alias_subset several times for the same pair
1330 or at all for alias set 0. */
1331 if (!alias_sets_conflict_p (old_set, new_set))
1332 {
1333 if (op == ALIAS_SET_SUBSET)
1334 record_alias_subset (old_set, new_set);
1335 else
1336 record_alias_subset (new_set, old_set);
1337 }
1338 }
1339 break;
1340
1341 default:
1342 gcc_unreachable ();
1343 }
1344
1345 record_component_aliases (gnu_new_type);
1346 }
1347 \f
1348 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1349 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1350
1351 void
1352 record_builtin_type (const char *name, tree type, bool artificial_p)
1353 {
1354 tree type_decl = build_decl (input_location,
1355 TYPE_DECL, get_identifier (name), type);
1356 DECL_ARTIFICIAL (type_decl) = artificial_p;
1357 TYPE_ARTIFICIAL (type) = artificial_p;
1358 gnat_pushdecl (type_decl, Empty);
1359
1360 if (debug_hooks->type_decl)
1361 debug_hooks->type_decl (type_decl, false);
1362 }
1363 \f
1364 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1365 finish constructing the record type as a fat pointer type. */
1366
1367 void
1368 finish_fat_pointer_type (tree record_type, tree field_list)
1369 {
1370 /* Make sure we can put it into a register. */
1371 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1372
1373 /* Show what it really is. */
1374 TYPE_FAT_POINTER_P (record_type) = 1;
1375
1376 /* Do not emit debug info for it since the types of its fields may still be
1377 incomplete at this point. */
1378 finish_record_type (record_type, field_list, 0, false);
1379
1380 /* Force type_contains_placeholder_p to return true on it. Although the
1381 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1382 type but the representation of the unconstrained array. */
1383 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1384 }
1385
1386 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1387 finish constructing the record or union type. If REP_LEVEL is zero, this
1388 record has no representation clause and so will be entirely laid out here.
1389 If REP_LEVEL is one, this record has a representation clause and has been
1390 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1391 this record is derived from a parent record and thus inherits its layout;
1392 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1393 we need to write debug information about this type. */
1394
1395 void
1396 finish_record_type (tree record_type, tree field_list, int rep_level,
1397 bool debug_info_p)
1398 {
1399 enum tree_code code = TREE_CODE (record_type);
1400 tree name = TYPE_NAME (record_type);
1401 tree ada_size = bitsize_zero_node;
1402 tree size = bitsize_zero_node;
1403 bool had_size = TYPE_SIZE (record_type) != 0;
1404 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1405 bool had_align = TYPE_ALIGN (record_type) != 0;
1406 tree field;
1407
1408 TYPE_FIELDS (record_type) = field_list;
1409
1410 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1411 generate debug info and have a parallel type. */
1412 if (name && TREE_CODE (name) == TYPE_DECL)
1413 name = DECL_NAME (name);
1414 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1415
1416 /* Globally initialize the record first. If this is a rep'ed record,
1417 that just means some initializations; otherwise, layout the record. */
1418 if (rep_level > 0)
1419 {
1420 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1421
1422 if (!had_size_unit)
1423 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1424
1425 if (!had_size)
1426 TYPE_SIZE (record_type) = bitsize_zero_node;
1427
1428 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1429 out just like a UNION_TYPE, since the size will be fixed. */
1430 else if (code == QUAL_UNION_TYPE)
1431 code = UNION_TYPE;
1432 }
1433 else
1434 {
1435 /* Ensure there isn't a size already set. There can be in an error
1436 case where there is a rep clause but all fields have errors and
1437 no longer have a position. */
1438 TYPE_SIZE (record_type) = 0;
1439
1440 /* Ensure we use the traditional GCC layout for bitfields when we need
1441 to pack the record type or have a representation clause. The other
1442 possible layout (Microsoft C compiler), if available, would prevent
1443 efficient packing in almost all cases. */
1444 #ifdef TARGET_MS_BITFIELD_LAYOUT
1445 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1446 decl_attributes (&record_type,
1447 tree_cons (get_identifier ("gcc_struct"),
1448 NULL_TREE, NULL_TREE),
1449 ATTR_FLAG_TYPE_IN_PLACE);
1450 #endif
1451
1452 layout_type (record_type);
1453 }
1454
1455 /* At this point, the position and size of each field is known. It was
1456 either set before entry by a rep clause, or by laying out the type above.
1457
1458 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1459 to compute the Ada size; the GCC size and alignment (for rep'ed records
1460 that are not padding types); and the mode (for rep'ed records). We also
1461 clear the DECL_BIT_FIELD indication for the cases we know have not been
1462 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1463
1464 if (code == QUAL_UNION_TYPE)
1465 field_list = nreverse (field_list);
1466
1467 for (field = field_list; field; field = DECL_CHAIN (field))
1468 {
1469 tree type = TREE_TYPE (field);
1470 tree pos = bit_position (field);
1471 tree this_size = DECL_SIZE (field);
1472 tree this_ada_size;
1473
1474 if (RECORD_OR_UNION_TYPE_P (type)
1475 && !TYPE_FAT_POINTER_P (type)
1476 && !TYPE_CONTAINS_TEMPLATE_P (type)
1477 && TYPE_ADA_SIZE (type))
1478 this_ada_size = TYPE_ADA_SIZE (type);
1479 else
1480 this_ada_size = this_size;
1481
1482 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1483 if (DECL_BIT_FIELD (field)
1484 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1485 {
1486 unsigned int align = TYPE_ALIGN (type);
1487
1488 /* In the general case, type alignment is required. */
1489 if (value_factor_p (pos, align))
1490 {
1491 /* The enclosing record type must be sufficiently aligned.
1492 Otherwise, if no alignment was specified for it and it
1493 has been laid out already, bump its alignment to the
1494 desired one if this is compatible with its size. */
1495 if (TYPE_ALIGN (record_type) >= align)
1496 {
1497 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1498 DECL_BIT_FIELD (field) = 0;
1499 }
1500 else if (!had_align
1501 && rep_level == 0
1502 && value_factor_p (TYPE_SIZE (record_type), align))
1503 {
1504 TYPE_ALIGN (record_type) = align;
1505 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1506 DECL_BIT_FIELD (field) = 0;
1507 }
1508 }
1509
1510 /* In the non-strict alignment case, only byte alignment is. */
1511 if (!STRICT_ALIGNMENT
1512 && DECL_BIT_FIELD (field)
1513 && value_factor_p (pos, BITS_PER_UNIT))
1514 DECL_BIT_FIELD (field) = 0;
1515 }
1516
1517 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1518 field is technically not addressable. Except that it can actually
1519 be addressed if it is BLKmode and happens to be properly aligned. */
1520 if (DECL_BIT_FIELD (field)
1521 && !(DECL_MODE (field) == BLKmode
1522 && value_factor_p (pos, BITS_PER_UNIT)))
1523 DECL_NONADDRESSABLE_P (field) = 1;
1524
1525 /* A type must be as aligned as its most aligned field that is not
1526 a bit-field. But this is already enforced by layout_type. */
1527 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1528 TYPE_ALIGN (record_type)
1529 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1530
1531 switch (code)
1532 {
1533 case UNION_TYPE:
1534 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1535 size = size_binop (MAX_EXPR, size, this_size);
1536 break;
1537
1538 case QUAL_UNION_TYPE:
1539 ada_size
1540 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1541 this_ada_size, ada_size);
1542 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1543 this_size, size);
1544 break;
1545
1546 case RECORD_TYPE:
1547 /* Since we know here that all fields are sorted in order of
1548 increasing bit position, the size of the record is one
1549 higher than the ending bit of the last field processed
1550 unless we have a rep clause, since in that case we might
1551 have a field outside a QUAL_UNION_TYPE that has a higher ending
1552 position. So use a MAX in that case. Also, if this field is a
1553 QUAL_UNION_TYPE, we need to take into account the previous size in
1554 the case of empty variants. */
1555 ada_size
1556 = merge_sizes (ada_size, pos, this_ada_size,
1557 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1558 size
1559 = merge_sizes (size, pos, this_size,
1560 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1561 break;
1562
1563 default:
1564 gcc_unreachable ();
1565 }
1566 }
1567
1568 if (code == QUAL_UNION_TYPE)
1569 nreverse (field_list);
1570
1571 if (rep_level < 2)
1572 {
1573 /* If this is a padding record, we never want to make the size smaller
1574 than what was specified in it, if any. */
1575 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1576 size = TYPE_SIZE (record_type);
1577
1578 /* Now set any of the values we've just computed that apply. */
1579 if (!TYPE_FAT_POINTER_P (record_type)
1580 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1581 SET_TYPE_ADA_SIZE (record_type, ada_size);
1582
1583 if (rep_level > 0)
1584 {
1585 tree size_unit = had_size_unit
1586 ? TYPE_SIZE_UNIT (record_type)
1587 : convert (sizetype,
1588 size_binop (CEIL_DIV_EXPR, size,
1589 bitsize_unit_node));
1590 unsigned int align = TYPE_ALIGN (record_type);
1591
1592 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1593 TYPE_SIZE_UNIT (record_type)
1594 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1595
1596 compute_record_mode (record_type);
1597 }
1598 }
1599
1600 if (debug_info_p)
1601 rest_of_record_type_compilation (record_type);
1602 }
1603
1604 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1605
1606 void
1607 add_parallel_type (tree type, tree parallel_type)
1608 {
1609 tree decl = TYPE_STUB_DECL (type);
1610
1611 while (DECL_PARALLEL_TYPE (decl))
1612 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1613
1614 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1615 }
1616
1617 /* Return true if TYPE has a parallel type. */
1618
1619 static bool
1620 has_parallel_type (tree type)
1621 {
1622 tree decl = TYPE_STUB_DECL (type);
1623
1624 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1625 }
1626
1627 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1628 associated with it. It need not be invoked directly in most cases since
1629 finish_record_type takes care of doing so, but this can be necessary if
1630 a parallel type is to be attached to the record type. */
1631
1632 void
1633 rest_of_record_type_compilation (tree record_type)
1634 {
1635 bool var_size = false;
1636 tree field;
1637
1638 /* If this is a padded type, the bulk of the debug info has already been
1639 generated for the field's type. */
1640 if (TYPE_IS_PADDING_P (record_type))
1641 return;
1642
1643 /* If the type already has a parallel type (XVS type), then we're done. */
1644 if (has_parallel_type (record_type))
1645 return;
1646
1647 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1648 {
1649 /* We need to make an XVE/XVU record if any field has variable size,
1650 whether or not the record does. For example, if we have a union,
1651 it may be that all fields, rounded up to the alignment, have the
1652 same size, in which case we'll use that size. But the debug
1653 output routines (except Dwarf2) won't be able to output the fields,
1654 so we need to make the special record. */
1655 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1656 /* If a field has a non-constant qualifier, the record will have
1657 variable size too. */
1658 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1659 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1660 {
1661 var_size = true;
1662 break;
1663 }
1664 }
1665
1666 /* If this record type is of variable size, make a parallel record type that
1667 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1668 if (var_size)
1669 {
1670 tree new_record_type
1671 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1672 ? UNION_TYPE : TREE_CODE (record_type));
1673 tree orig_name = TYPE_NAME (record_type), new_name;
1674 tree last_pos = bitsize_zero_node;
1675 tree old_field, prev_old_field = NULL_TREE;
1676
1677 if (TREE_CODE (orig_name) == TYPE_DECL)
1678 orig_name = DECL_NAME (orig_name);
1679
1680 new_name
1681 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1682 ? "XVU" : "XVE");
1683 TYPE_NAME (new_record_type) = new_name;
1684 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1685 TYPE_STUB_DECL (new_record_type)
1686 = create_type_stub_decl (new_name, new_record_type);
1687 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1688 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1689 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1690 TYPE_SIZE_UNIT (new_record_type)
1691 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1692
1693 /* Now scan all the fields, replacing each field with a new
1694 field corresponding to the new encoding. */
1695 for (old_field = TYPE_FIELDS (record_type); old_field;
1696 old_field = DECL_CHAIN (old_field))
1697 {
1698 tree field_type = TREE_TYPE (old_field);
1699 tree field_name = DECL_NAME (old_field);
1700 tree new_field;
1701 tree curpos = bit_position (old_field);
1702 bool var = false;
1703 unsigned int align = 0;
1704 tree pos;
1705
1706 /* See how the position was modified from the last position.
1707
1708 There are two basic cases we support: a value was added
1709 to the last position or the last position was rounded to
1710 a boundary and they something was added. Check for the
1711 first case first. If not, see if there is any evidence
1712 of rounding. If so, round the last position and try
1713 again.
1714
1715 If this is a union, the position can be taken as zero. */
1716
1717 /* Some computations depend on the shape of the position expression,
1718 so strip conversions to make sure it's exposed. */
1719 curpos = remove_conversions (curpos, true);
1720
1721 if (TREE_CODE (new_record_type) == UNION_TYPE)
1722 pos = bitsize_zero_node, align = 0;
1723 else
1724 pos = compute_related_constant (curpos, last_pos);
1725
1726 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1727 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1728 {
1729 tree offset = TREE_OPERAND (curpos, 0);
1730 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1731
1732 /* An offset which is a bitwise AND with a negative power of 2
1733 means an alignment corresponding to this power of 2. Note
1734 that, as sizetype is sign-extended but nonetheless unsigned,
1735 we don't directly use tree_int_cst_sgn. */
1736 offset = remove_conversions (offset, true);
1737 if (TREE_CODE (offset) == BIT_AND_EXPR
1738 && host_integerp (TREE_OPERAND (offset, 1), 0)
1739 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
1740 {
1741 unsigned int pow
1742 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1743 if (exact_log2 (pow) > 0)
1744 align *= pow;
1745 }
1746
1747 pos = compute_related_constant (curpos,
1748 round_up (last_pos, align));
1749 }
1750 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1751 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1752 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1753 && host_integerp (TREE_OPERAND
1754 (TREE_OPERAND (curpos, 0), 1),
1755 1))
1756 {
1757 align
1758 = tree_low_cst
1759 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1760 pos = compute_related_constant (curpos,
1761 round_up (last_pos, align));
1762 }
1763 else if (potential_alignment_gap (prev_old_field, old_field,
1764 pos))
1765 {
1766 align = TYPE_ALIGN (field_type);
1767 pos = compute_related_constant (curpos,
1768 round_up (last_pos, align));
1769 }
1770
1771 /* If we can't compute a position, set it to zero.
1772
1773 ??? We really should abort here, but it's too much work
1774 to get this correct for all cases. */
1775
1776 if (!pos)
1777 pos = bitsize_zero_node;
1778
1779 /* See if this type is variable-sized and make a pointer type
1780 and indicate the indirection if so. Beware that the debug
1781 back-end may adjust the position computed above according
1782 to the alignment of the field type, i.e. the pointer type
1783 in this case, if we don't preventively counter that. */
1784 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1785 {
1786 field_type = build_pointer_type (field_type);
1787 if (align != 0 && TYPE_ALIGN (field_type) > align)
1788 {
1789 field_type = copy_node (field_type);
1790 TYPE_ALIGN (field_type) = align;
1791 }
1792 var = true;
1793 }
1794
1795 /* Make a new field name, if necessary. */
1796 if (var || align != 0)
1797 {
1798 char suffix[16];
1799
1800 if (align != 0)
1801 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1802 align / BITS_PER_UNIT);
1803 else
1804 strcpy (suffix, "XVL");
1805
1806 field_name = concat_name (field_name, suffix);
1807 }
1808
1809 new_field
1810 = create_field_decl (field_name, field_type, new_record_type,
1811 DECL_SIZE (old_field), pos, 0, 0);
1812 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1813 TYPE_FIELDS (new_record_type) = new_field;
1814
1815 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1816 zero. The only time it's not the last field of the record
1817 is when there are other components at fixed positions after
1818 it (meaning there was a rep clause for every field) and we
1819 want to be able to encode them. */
1820 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1821 (TREE_CODE (TREE_TYPE (old_field))
1822 == QUAL_UNION_TYPE)
1823 ? bitsize_zero_node
1824 : DECL_SIZE (old_field));
1825 prev_old_field = old_field;
1826 }
1827
1828 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
1829
1830 add_parallel_type (record_type, new_record_type);
1831 }
1832 }
1833
1834 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1835 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1836 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1837 replace a value of zero with the old size. If HAS_REP is true, we take the
1838 MAX of the end position of this field with LAST_SIZE. In all other cases,
1839 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1840
1841 static tree
1842 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1843 bool has_rep)
1844 {
1845 tree type = TREE_TYPE (last_size);
1846 tree new_size;
1847
1848 if (!special || TREE_CODE (size) != COND_EXPR)
1849 {
1850 new_size = size_binop (PLUS_EXPR, first_bit, size);
1851 if (has_rep)
1852 new_size = size_binop (MAX_EXPR, last_size, new_size);
1853 }
1854
1855 else
1856 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1857 integer_zerop (TREE_OPERAND (size, 1))
1858 ? last_size : merge_sizes (last_size, first_bit,
1859 TREE_OPERAND (size, 1),
1860 1, has_rep),
1861 integer_zerop (TREE_OPERAND (size, 2))
1862 ? last_size : merge_sizes (last_size, first_bit,
1863 TREE_OPERAND (size, 2),
1864 1, has_rep));
1865
1866 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1867 when fed through substitute_in_expr) into thinking that a constant
1868 size is not constant. */
1869 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1870 new_size = TREE_OPERAND (new_size, 0);
1871
1872 return new_size;
1873 }
1874
1875 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1876 related by the addition of a constant. Return that constant if so. */
1877
1878 static tree
1879 compute_related_constant (tree op0, tree op1)
1880 {
1881 tree op0_var, op1_var;
1882 tree op0_con = split_plus (op0, &op0_var);
1883 tree op1_con = split_plus (op1, &op1_var);
1884 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1885
1886 if (operand_equal_p (op0_var, op1_var, 0))
1887 return result;
1888 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1889 return result;
1890 else
1891 return 0;
1892 }
1893
1894 /* Utility function of above to split a tree OP which may be a sum, into a
1895 constant part, which is returned, and a variable part, which is stored
1896 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1897 bitsizetype. */
1898
1899 static tree
1900 split_plus (tree in, tree *pvar)
1901 {
1902 /* Strip conversions in order to ease the tree traversal and maximize the
1903 potential for constant or plus/minus discovery. We need to be careful
1904 to always return and set *pvar to bitsizetype trees, but it's worth
1905 the effort. */
1906 in = remove_conversions (in, false);
1907
1908 *pvar = convert (bitsizetype, in);
1909
1910 if (TREE_CODE (in) == INTEGER_CST)
1911 {
1912 *pvar = bitsize_zero_node;
1913 return convert (bitsizetype, in);
1914 }
1915 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1916 {
1917 tree lhs_var, rhs_var;
1918 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1919 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1920
1921 if (lhs_var == TREE_OPERAND (in, 0)
1922 && rhs_var == TREE_OPERAND (in, 1))
1923 return bitsize_zero_node;
1924
1925 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1926 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1927 }
1928 else
1929 return bitsize_zero_node;
1930 }
1931 \f
1932 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1933 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1934 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1935 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1936 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1937 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1938 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1939 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1940 invisible reference. */
1941
1942 tree
1943 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1944 bool return_unconstrained_p, bool return_by_direct_ref_p,
1945 bool return_by_invisi_ref_p)
1946 {
1947 /* A list of the data type nodes of the subprogram formal parameters.
1948 This list is generated by traversing the input list of PARM_DECL
1949 nodes. */
1950 VEC(tree,gc) *param_type_list = NULL;
1951 tree t, type;
1952
1953 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1954 VEC_safe_push (tree, gc, param_type_list, TREE_TYPE (t));
1955
1956 type = build_function_type_vec (return_type, param_type_list);
1957
1958 /* TYPE may have been shared since GCC hashes types. If it has a different
1959 CICO_LIST, make a copy. Likewise for the various flags. */
1960 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1961 return_by_direct_ref_p, return_by_invisi_ref_p))
1962 {
1963 type = copy_type (type);
1964 TYPE_CI_CO_LIST (type) = cico_list;
1965 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1966 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1967 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1968 }
1969
1970 return type;
1971 }
1972 \f
1973 /* Return a copy of TYPE but safe to modify in any way. */
1974
1975 tree
1976 copy_type (tree type)
1977 {
1978 tree new_type = copy_node (type);
1979
1980 /* Unshare the language-specific data. */
1981 if (TYPE_LANG_SPECIFIC (type))
1982 {
1983 TYPE_LANG_SPECIFIC (new_type) = NULL;
1984 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1985 }
1986
1987 /* And the contents of the language-specific slot if needed. */
1988 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1989 && TYPE_RM_VALUES (type))
1990 {
1991 TYPE_RM_VALUES (new_type) = NULL_TREE;
1992 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1993 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1994 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1995 }
1996
1997 /* copy_node clears this field instead of copying it, because it is
1998 aliased with TREE_CHAIN. */
1999 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2000
2001 TYPE_POINTER_TO (new_type) = 0;
2002 TYPE_REFERENCE_TO (new_type) = 0;
2003 TYPE_MAIN_VARIANT (new_type) = new_type;
2004 TYPE_NEXT_VARIANT (new_type) = 0;
2005
2006 return new_type;
2007 }
2008 \f
2009 /* Return a subtype of sizetype with range MIN to MAX and whose
2010 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2011 of the associated TYPE_DECL. */
2012
2013 tree
2014 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2015 {
2016 /* First build a type for the desired range. */
2017 tree type = build_nonshared_range_type (sizetype, min, max);
2018
2019 /* Then set the index type. */
2020 SET_TYPE_INDEX_TYPE (type, index);
2021 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
2022
2023 return type;
2024 }
2025
2026 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2027 sizetype is used. */
2028
2029 tree
2030 create_range_type (tree type, tree min, tree max)
2031 {
2032 tree range_type;
2033
2034 if (type == NULL_TREE)
2035 type = sizetype;
2036
2037 /* First build a type with the base range. */
2038 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2039 TYPE_MAX_VALUE (type));
2040
2041 /* Then set the actual range. */
2042 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2043 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
2044
2045 return range_type;
2046 }
2047 \f
2048 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2049 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2050 its data type. */
2051
2052 tree
2053 create_type_stub_decl (tree type_name, tree type)
2054 {
2055 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2056 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2057 emitted in DWARF. */
2058 tree type_decl = build_decl (input_location,
2059 TYPE_DECL, type_name, type);
2060 DECL_ARTIFICIAL (type_decl) = 1;
2061 TYPE_ARTIFICIAL (type) = 1;
2062 return type_decl;
2063 }
2064
2065 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2066 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2067 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2068 true if we need to write debug information about this type. GNAT_NODE
2069 is used for the position of the decl. */
2070
2071 tree
2072 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
2073 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
2074 {
2075 enum tree_code code = TREE_CODE (type);
2076 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2077 tree type_decl;
2078
2079 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2080 gcc_assert (!TYPE_IS_DUMMY_P (type));
2081
2082 /* If the type hasn't been named yet, we're naming it; preserve an existing
2083 TYPE_STUB_DECL that has been attached to it for some purpose. */
2084 if (!named && TYPE_STUB_DECL (type))
2085 {
2086 type_decl = TYPE_STUB_DECL (type);
2087 DECL_NAME (type_decl) = type_name;
2088 }
2089 else
2090 type_decl = build_decl (input_location,
2091 TYPE_DECL, type_name, type);
2092
2093 DECL_ARTIFICIAL (type_decl) = artificial_p;
2094 TYPE_ARTIFICIAL (type) = artificial_p;
2095
2096 /* Add this decl to the current binding level. */
2097 gnat_pushdecl (type_decl, gnat_node);
2098
2099 process_attributes (type_decl, attr_list);
2100
2101 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2102 This causes the name to be also viewed as a "tag" by the debug
2103 back-end, with the advantage that no DW_TAG_typedef is emitted
2104 for artificial "tagged" types in DWARF. */
2105 if (!named)
2106 TYPE_STUB_DECL (type) = type_decl;
2107
2108 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2109 back-end doesn't support, and for others if we don't need to. */
2110 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2111 DECL_IGNORED_P (type_decl) = 1;
2112
2113 return type_decl;
2114 }
2115 \f
2116 /* Return a VAR_DECL or CONST_DECL node.
2117
2118 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2119 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2120 the GCC tree for an optional initial expression; NULL_TREE if none.
2121
2122 CONST_FLAG is true if this variable is constant, in which case we might
2123 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2124
2125 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2126 definition to be made visible outside of the current compilation unit, for
2127 instance variable definitions in a package specification.
2128
2129 EXTERN_FLAG is true when processing an external variable declaration (as
2130 opposed to a definition: no storage is to be allocated for the variable).
2131
2132 STATIC_FLAG is only relevant when not at top level. In that case
2133 it indicates whether to always allocate storage to the variable.
2134
2135 GNAT_NODE is used for the position of the decl. */
2136
2137 tree
2138 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2139 bool const_flag, bool public_flag, bool extern_flag,
2140 bool static_flag, bool const_decl_allowed_p,
2141 struct attrib *attr_list, Node_Id gnat_node)
2142 {
2143 /* Whether the initializer is a constant initializer. At the global level
2144 or for an external object or an object to be allocated in static memory,
2145 we check that it is a valid constant expression for use in initializing
2146 a static variable; otherwise, we only check that it is constant. */
2147 bool init_const
2148 = (var_init != 0
2149 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2150 && (global_bindings_p () || extern_flag || static_flag
2151 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2152 : TREE_CONSTANT (var_init)));
2153
2154 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2155 case the initializer may be used in-lieu of the DECL node (as done in
2156 Identifier_to_gnu). This is useful to prevent the need of elaboration
2157 code when an identifier for which such a decl is made is in turn used as
2158 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2159 but extra constraints apply to this choice (see below) and are not
2160 relevant to the distinction we wish to make. */
2161 bool constant_p = const_flag && init_const;
2162
2163 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2164 and may be used for scalars in general but not for aggregates. */
2165 tree var_decl
2166 = build_decl (input_location,
2167 (constant_p && const_decl_allowed_p
2168 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2169 var_name, type);
2170
2171 /* If this is external, throw away any initializations (they will be done
2172 elsewhere) unless this is a constant for which we would like to remain
2173 able to get the initializer. If we are defining a global here, leave a
2174 constant initialization and save any variable elaborations for the
2175 elaboration routine. If we are just annotating types, throw away the
2176 initialization if it isn't a constant. */
2177 if ((extern_flag && !constant_p)
2178 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2179 var_init = NULL_TREE;
2180
2181 /* At the global level, an initializer requiring code to be generated
2182 produces elaboration statements. Check that such statements are allowed,
2183 that is, not violating a No_Elaboration_Code restriction. */
2184 if (global_bindings_p () && var_init != 0 && !init_const)
2185 Check_Elaboration_Code_Allowed (gnat_node);
2186
2187 DECL_INITIAL (var_decl) = var_init;
2188 TREE_READONLY (var_decl) = const_flag;
2189 DECL_EXTERNAL (var_decl) = extern_flag;
2190 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2191 TREE_CONSTANT (var_decl) = constant_p;
2192 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2193 = TYPE_VOLATILE (type);
2194
2195 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2196 try to fiddle with DECL_COMMON. However, on platforms that don't
2197 support global BSS sections, uninitialized global variables would
2198 go in DATA instead, thus increasing the size of the executable. */
2199 if (!flag_no_common
2200 && TREE_CODE (var_decl) == VAR_DECL
2201 && TREE_PUBLIC (var_decl)
2202 && !have_global_bss_p ())
2203 DECL_COMMON (var_decl) = 1;
2204
2205 /* At the global binding level, we need to allocate static storage for the
2206 variable if it isn't external. Otherwise, we allocate automatic storage
2207 unless requested not to. */
2208 TREE_STATIC (var_decl)
2209 = !extern_flag && (static_flag || global_bindings_p ());
2210
2211 /* For an external constant whose initializer is not absolute, do not emit
2212 debug info. In DWARF this would mean a global relocation in a read-only
2213 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2214 if (extern_flag
2215 && constant_p
2216 && var_init
2217 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2218 != null_pointer_node)
2219 DECL_IGNORED_P (var_decl) = 1;
2220
2221 /* Add this decl to the current binding level. */
2222 gnat_pushdecl (var_decl, gnat_node);
2223
2224 if (TREE_SIDE_EFFECTS (var_decl))
2225 TREE_ADDRESSABLE (var_decl) = 1;
2226
2227 if (TREE_CODE (var_decl) == VAR_DECL)
2228 {
2229 if (asm_name)
2230 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2231 process_attributes (var_decl, attr_list);
2232 if (global_bindings_p ())
2233 rest_of_decl_compilation (var_decl, true, 0);
2234 }
2235
2236 return var_decl;
2237 }
2238 \f
2239 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2240
2241 static bool
2242 aggregate_type_contains_array_p (tree type)
2243 {
2244 switch (TREE_CODE (type))
2245 {
2246 case RECORD_TYPE:
2247 case UNION_TYPE:
2248 case QUAL_UNION_TYPE:
2249 {
2250 tree field;
2251 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2252 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2253 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2254 return true;
2255 return false;
2256 }
2257
2258 case ARRAY_TYPE:
2259 return true;
2260
2261 default:
2262 gcc_unreachable ();
2263 }
2264 }
2265
2266 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2267 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2268 nonzero, it is the specified size of the field. If POS is nonzero, it is
2269 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2270 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2271 means we are allowed to take the address of the field; if it is negative,
2272 we should not make a bitfield, which is used by make_aligning_type. */
2273
2274 tree
2275 create_field_decl (tree field_name, tree field_type, tree record_type,
2276 tree size, tree pos, int packed, int addressable)
2277 {
2278 tree field_decl = build_decl (input_location,
2279 FIELD_DECL, field_name, field_type);
2280
2281 DECL_CONTEXT (field_decl) = record_type;
2282 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2283
2284 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2285 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2286 Likewise for an aggregate without specified position that contains an
2287 array, because in this case slices of variable length of this array
2288 must be handled by GCC and variable-sized objects need to be aligned
2289 to at least a byte boundary. */
2290 if (packed && (TYPE_MODE (field_type) == BLKmode
2291 || (!pos
2292 && AGGREGATE_TYPE_P (field_type)
2293 && aggregate_type_contains_array_p (field_type))))
2294 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2295
2296 /* If a size is specified, use it. Otherwise, if the record type is packed
2297 compute a size to use, which may differ from the object's natural size.
2298 We always set a size in this case to trigger the checks for bitfield
2299 creation below, which is typically required when no position has been
2300 specified. */
2301 if (size)
2302 size = convert (bitsizetype, size);
2303 else if (packed == 1)
2304 {
2305 size = rm_size (field_type);
2306 if (TYPE_MODE (field_type) == BLKmode)
2307 size = round_up (size, BITS_PER_UNIT);
2308 }
2309
2310 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2311 specified for two reasons: first if the size differs from the natural
2312 size. Second, if the alignment is insufficient. There are a number of
2313 ways the latter can be true.
2314
2315 We never make a bitfield if the type of the field has a nonconstant size,
2316 because no such entity requiring bitfield operations should reach here.
2317
2318 We do *preventively* make a bitfield when there might be the need for it
2319 but we don't have all the necessary information to decide, as is the case
2320 of a field with no specified position in a packed record.
2321
2322 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2323 in layout_decl or finish_record_type to clear the bit_field indication if
2324 it is in fact not needed. */
2325 if (addressable >= 0
2326 && size
2327 && TREE_CODE (size) == INTEGER_CST
2328 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2329 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2330 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2331 || packed
2332 || (TYPE_ALIGN (record_type) != 0
2333 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2334 {
2335 DECL_BIT_FIELD (field_decl) = 1;
2336 DECL_SIZE (field_decl) = size;
2337 if (!packed && !pos)
2338 {
2339 if (TYPE_ALIGN (record_type) != 0
2340 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2341 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2342 else
2343 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2344 }
2345 }
2346
2347 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2348
2349 /* Bump the alignment if need be, either for bitfield/packing purposes or
2350 to satisfy the type requirements if no such consideration applies. When
2351 we get the alignment from the type, indicate if this is from an explicit
2352 user request, which prevents stor-layout from lowering it later on. */
2353 {
2354 unsigned int bit_align
2355 = (DECL_BIT_FIELD (field_decl) ? 1
2356 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2357
2358 if (bit_align > DECL_ALIGN (field_decl))
2359 DECL_ALIGN (field_decl) = bit_align;
2360 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2361 {
2362 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2363 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2364 }
2365 }
2366
2367 if (pos)
2368 {
2369 /* We need to pass in the alignment the DECL is known to have.
2370 This is the lowest-order bit set in POS, but no more than
2371 the alignment of the record, if one is specified. Note
2372 that an alignment of 0 is taken as infinite. */
2373 unsigned int known_align;
2374
2375 if (host_integerp (pos, 1))
2376 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
2377 else
2378 known_align = BITS_PER_UNIT;
2379
2380 if (TYPE_ALIGN (record_type)
2381 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2382 known_align = TYPE_ALIGN (record_type);
2383
2384 layout_decl (field_decl, known_align);
2385 SET_DECL_OFFSET_ALIGN (field_decl,
2386 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
2387 : BITS_PER_UNIT);
2388 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2389 &DECL_FIELD_BIT_OFFSET (field_decl),
2390 DECL_OFFSET_ALIGN (field_decl), pos);
2391 }
2392
2393 /* In addition to what our caller says, claim the field is addressable if we
2394 know that its type is not suitable.
2395
2396 The field may also be "technically" nonaddressable, meaning that even if
2397 we attempt to take the field's address we will actually get the address
2398 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2399 value we have at this point is not accurate enough, so we don't account
2400 for this here and let finish_record_type decide. */
2401 if (!addressable && !type_for_nonaliased_component_p (field_type))
2402 addressable = 1;
2403
2404 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2405
2406 return field_decl;
2407 }
2408 \f
2409 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2410 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2411 (either an In parameter or an address of a pass-by-ref parameter). */
2412
2413 tree
2414 create_param_decl (tree param_name, tree param_type, bool readonly)
2415 {
2416 tree param_decl = build_decl (input_location,
2417 PARM_DECL, param_name, param_type);
2418
2419 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2420 can lead to various ABI violations. */
2421 if (targetm.calls.promote_prototypes (NULL_TREE)
2422 && INTEGRAL_TYPE_P (param_type)
2423 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2424 {
2425 /* We have to be careful about biased types here. Make a subtype
2426 of integer_type_node with the proper biasing. */
2427 if (TREE_CODE (param_type) == INTEGER_TYPE
2428 && TYPE_BIASED_REPRESENTATION_P (param_type))
2429 {
2430 tree subtype
2431 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2432 TREE_TYPE (subtype) = integer_type_node;
2433 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2434 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2435 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2436 param_type = subtype;
2437 }
2438 else
2439 param_type = integer_type_node;
2440 }
2441
2442 DECL_ARG_TYPE (param_decl) = param_type;
2443 TREE_READONLY (param_decl) = readonly;
2444 return param_decl;
2445 }
2446 \f
2447 /* Given a DECL and ATTR_LIST, process the listed attributes. */
2448
2449 static void
2450 process_attributes (tree decl, struct attrib *attr_list)
2451 {
2452 for (; attr_list; attr_list = attr_list->next)
2453 switch (attr_list->type)
2454 {
2455 case ATTR_MACHINE_ATTRIBUTE:
2456 input_location = DECL_SOURCE_LOCATION (decl);
2457 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
2458 NULL_TREE),
2459 ATTR_FLAG_TYPE_IN_PLACE);
2460 break;
2461
2462 case ATTR_LINK_ALIAS:
2463 if (! DECL_EXTERNAL (decl))
2464 {
2465 TREE_STATIC (decl) = 1;
2466 assemble_alias (decl, attr_list->name);
2467 }
2468 break;
2469
2470 case ATTR_WEAK_EXTERNAL:
2471 if (SUPPORTS_WEAK)
2472 declare_weak (decl);
2473 else
2474 post_error ("?weak declarations not supported on this target",
2475 attr_list->error_point);
2476 break;
2477
2478 case ATTR_LINK_SECTION:
2479 if (targetm_common.have_named_sections)
2480 {
2481 DECL_SECTION_NAME (decl)
2482 = build_string (IDENTIFIER_LENGTH (attr_list->name),
2483 IDENTIFIER_POINTER (attr_list->name));
2484 DECL_COMMON (decl) = 0;
2485 }
2486 else
2487 post_error ("?section attributes are not supported for this target",
2488 attr_list->error_point);
2489 break;
2490
2491 case ATTR_LINK_CONSTRUCTOR:
2492 DECL_STATIC_CONSTRUCTOR (decl) = 1;
2493 TREE_USED (decl) = 1;
2494 break;
2495
2496 case ATTR_LINK_DESTRUCTOR:
2497 DECL_STATIC_DESTRUCTOR (decl) = 1;
2498 TREE_USED (decl) = 1;
2499 break;
2500
2501 case ATTR_THREAD_LOCAL_STORAGE:
2502 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
2503 DECL_COMMON (decl) = 0;
2504 break;
2505 }
2506 }
2507 \f
2508 /* Record DECL as a global renaming pointer. */
2509
2510 void
2511 record_global_renaming_pointer (tree decl)
2512 {
2513 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2514 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
2515 }
2516
2517 /* Invalidate the global renaming pointers. */
2518
2519 void
2520 invalidate_global_renaming_pointers (void)
2521 {
2522 unsigned int i;
2523 tree iter;
2524
2525 FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
2526 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2527
2528 VEC_free (tree, gc, global_renaming_pointers);
2529 }
2530
2531 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2532 a power of 2. */
2533
2534 bool
2535 value_factor_p (tree value, HOST_WIDE_INT factor)
2536 {
2537 if (host_integerp (value, 1))
2538 return tree_low_cst (value, 1) % factor == 0;
2539
2540 if (TREE_CODE (value) == MULT_EXPR)
2541 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2542 || value_factor_p (TREE_OPERAND (value, 1), factor));
2543
2544 return false;
2545 }
2546
2547 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2548 unless we can prove these 2 fields are laid out in such a way that no gap
2549 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2550 is the distance in bits between the end of PREV_FIELD and the starting
2551 position of CURR_FIELD. It is ignored if null. */
2552
2553 static bool
2554 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2555 {
2556 /* If this is the first field of the record, there cannot be any gap */
2557 if (!prev_field)
2558 return false;
2559
2560 /* If the previous field is a union type, then return False: The only
2561 time when such a field is not the last field of the record is when
2562 there are other components at fixed positions after it (meaning there
2563 was a rep clause for every field), in which case we don't want the
2564 alignment constraint to override them. */
2565 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2566 return false;
2567
2568 /* If the distance between the end of prev_field and the beginning of
2569 curr_field is constant, then there is a gap if the value of this
2570 constant is not null. */
2571 if (offset && host_integerp (offset, 1))
2572 return !integer_zerop (offset);
2573
2574 /* If the size and position of the previous field are constant,
2575 then check the sum of this size and position. There will be a gap
2576 iff it is not multiple of the current field alignment. */
2577 if (host_integerp (DECL_SIZE (prev_field), 1)
2578 && host_integerp (bit_position (prev_field), 1))
2579 return ((tree_low_cst (bit_position (prev_field), 1)
2580 + tree_low_cst (DECL_SIZE (prev_field), 1))
2581 % DECL_ALIGN (curr_field) != 0);
2582
2583 /* If both the position and size of the previous field are multiples
2584 of the current field alignment, there cannot be any gap. */
2585 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2586 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2587 return false;
2588
2589 /* Fallback, return that there may be a potential gap */
2590 return true;
2591 }
2592
2593 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2594 of the decl. */
2595
2596 tree
2597 create_label_decl (tree label_name, Node_Id gnat_node)
2598 {
2599 tree label_decl
2600 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2601
2602 DECL_MODE (label_decl) = VOIDmode;
2603
2604 /* Add this decl to the current binding level. */
2605 gnat_pushdecl (label_decl, gnat_node);
2606
2607 return label_decl;
2608 }
2609 \f
2610 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2611 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2612 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2613 PARM_DECL nodes chained through the DECL_CHAIN field).
2614
2615 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2616 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2617 used for the position of the decl. */
2618
2619 tree
2620 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
2621 tree param_decl_list, bool inline_flag, bool public_flag,
2622 bool extern_flag, bool artificial_flag,
2623 struct attrib *attr_list, Node_Id gnat_node)
2624 {
2625 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2626 subprog_type);
2627 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2628 TREE_TYPE (subprog_type));
2629 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
2630
2631 /* If this is a non-inline function nested inside an inlined external
2632 function, we cannot honor both requests without cloning the nested
2633 function in the current unit since it is private to the other unit.
2634 We could inline the nested function as well but it's probably better
2635 to err on the side of too little inlining. */
2636 if (!inline_flag
2637 && !public_flag
2638 && current_function_decl
2639 && DECL_DECLARED_INLINE_P (current_function_decl)
2640 && DECL_EXTERNAL (current_function_decl))
2641 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
2642
2643 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2644 DECL_EXTERNAL (subprog_decl) = extern_flag;
2645 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
2646 DECL_NO_INLINE_WARNING_P (subprog_decl) = inline_flag && artificial_flag;
2647
2648 TREE_PUBLIC (subprog_decl) = public_flag;
2649 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
2650 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2651 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
2652
2653 DECL_ARTIFICIAL (result_decl) = 1;
2654 DECL_IGNORED_P (result_decl) = 1;
2655 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2656 DECL_RESULT (subprog_decl) = result_decl;
2657
2658 if (asm_name)
2659 {
2660 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2661
2662 /* The expand_main_function circuitry expects "main_identifier_node" to
2663 designate the DECL_NAME of the 'main' entry point, in turn expected
2664 to be declared as the "main" function literally by default. Ada
2665 program entry points are typically declared with a different name
2666 within the binder generated file, exported as 'main' to satisfy the
2667 system expectations. Force main_identifier_node in this case. */
2668 if (asm_name == main_identifier_node)
2669 DECL_NAME (subprog_decl) = main_identifier_node;
2670 }
2671
2672 /* Add this decl to the current binding level. */
2673 gnat_pushdecl (subprog_decl, gnat_node);
2674
2675 process_attributes (subprog_decl, attr_list);
2676
2677 /* Output the assembler code and/or RTL for the declaration. */
2678 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2679
2680 return subprog_decl;
2681 }
2682 \f
2683 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2684 body. This routine needs to be invoked before processing the declarations
2685 appearing in the subprogram. */
2686
2687 void
2688 begin_subprog_body (tree subprog_decl)
2689 {
2690 tree param_decl;
2691
2692 announce_function (subprog_decl);
2693
2694 /* This function is being defined. */
2695 TREE_STATIC (subprog_decl) = 1;
2696
2697 current_function_decl = subprog_decl;
2698
2699 /* Enter a new binding level and show that all the parameters belong to
2700 this function. */
2701 gnat_pushlevel ();
2702
2703 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2704 param_decl = DECL_CHAIN (param_decl))
2705 DECL_CONTEXT (param_decl) = subprog_decl;
2706
2707 make_decl_rtl (subprog_decl);
2708 }
2709
2710 /* Finish translating the current subprogram and set its BODY. */
2711
2712 void
2713 end_subprog_body (tree body)
2714 {
2715 tree fndecl = current_function_decl;
2716
2717 /* Attach the BLOCK for this level to the function and pop the level. */
2718 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2719 DECL_INITIAL (fndecl) = current_binding_level->block;
2720 gnat_poplevel ();
2721
2722 /* Mark the RESULT_DECL as being in this subprogram. */
2723 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2724
2725 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2726 if (TREE_CODE (body) == BIND_EXPR)
2727 {
2728 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2729 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2730 }
2731
2732 DECL_SAVED_TREE (fndecl) = body;
2733
2734 current_function_decl = decl_function_context (fndecl);
2735 }
2736
2737 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2738
2739 void
2740 rest_of_subprog_body_compilation (tree subprog_decl)
2741 {
2742 /* We cannot track the location of errors past this point. */
2743 error_gnat_node = Empty;
2744
2745 /* If we're only annotating types, don't actually compile this function. */
2746 if (type_annotate_only)
2747 return;
2748
2749 /* Dump functions before gimplification. */
2750 dump_function (TDI_original, subprog_decl);
2751
2752 if (!decl_function_context (subprog_decl))
2753 cgraph_finalize_function (subprog_decl, false);
2754 else
2755 /* Register this function with cgraph just far enough to get it
2756 added to our parent's nested function list. */
2757 (void) cgraph_get_create_node (subprog_decl);
2758 }
2759
2760 tree
2761 gnat_builtin_function (tree decl)
2762 {
2763 gnat_pushdecl (decl, Empty);
2764 return decl;
2765 }
2766
2767 /* Return an integer type with the number of bits of precision given by
2768 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2769 it is a signed type. */
2770
2771 tree
2772 gnat_type_for_size (unsigned precision, int unsignedp)
2773 {
2774 tree t;
2775 char type_name[20];
2776
2777 if (precision <= 2 * MAX_BITS_PER_WORD
2778 && signed_and_unsigned_types[precision][unsignedp])
2779 return signed_and_unsigned_types[precision][unsignedp];
2780
2781 if (unsignedp)
2782 t = make_unsigned_type (precision);
2783 else
2784 t = make_signed_type (precision);
2785
2786 if (precision <= 2 * MAX_BITS_PER_WORD)
2787 signed_and_unsigned_types[precision][unsignedp] = t;
2788
2789 if (!TYPE_NAME (t))
2790 {
2791 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2792 TYPE_NAME (t) = get_identifier (type_name);
2793 }
2794
2795 return t;
2796 }
2797
2798 /* Likewise for floating-point types. */
2799
2800 static tree
2801 float_type_for_precision (int precision, enum machine_mode mode)
2802 {
2803 tree t;
2804 char type_name[20];
2805
2806 if (float_types[(int) mode])
2807 return float_types[(int) mode];
2808
2809 float_types[(int) mode] = t = make_node (REAL_TYPE);
2810 TYPE_PRECISION (t) = precision;
2811 layout_type (t);
2812
2813 gcc_assert (TYPE_MODE (t) == mode);
2814 if (!TYPE_NAME (t))
2815 {
2816 sprintf (type_name, "FLOAT_%d", precision);
2817 TYPE_NAME (t) = get_identifier (type_name);
2818 }
2819
2820 return t;
2821 }
2822
2823 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2824 an unsigned type; otherwise a signed type is returned. */
2825
2826 tree
2827 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2828 {
2829 if (mode == BLKmode)
2830 return NULL_TREE;
2831
2832 if (mode == VOIDmode)
2833 return void_type_node;
2834
2835 if (COMPLEX_MODE_P (mode))
2836 return NULL_TREE;
2837
2838 if (SCALAR_FLOAT_MODE_P (mode))
2839 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2840
2841 if (SCALAR_INT_MODE_P (mode))
2842 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2843
2844 if (VECTOR_MODE_P (mode))
2845 {
2846 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2847 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2848 if (inner_type)
2849 return build_vector_type_for_mode (inner_type, mode);
2850 }
2851
2852 return NULL_TREE;
2853 }
2854
2855 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2856
2857 tree
2858 gnat_unsigned_type (tree type_node)
2859 {
2860 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2861
2862 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2863 {
2864 type = copy_node (type);
2865 TREE_TYPE (type) = type_node;
2866 }
2867 else if (TREE_TYPE (type_node)
2868 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2869 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2870 {
2871 type = copy_node (type);
2872 TREE_TYPE (type) = TREE_TYPE (type_node);
2873 }
2874
2875 return type;
2876 }
2877
2878 /* Return the signed version of a TYPE_NODE, a scalar type. */
2879
2880 tree
2881 gnat_signed_type (tree type_node)
2882 {
2883 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2884
2885 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2886 {
2887 type = copy_node (type);
2888 TREE_TYPE (type) = type_node;
2889 }
2890 else if (TREE_TYPE (type_node)
2891 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2892 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2893 {
2894 type = copy_node (type);
2895 TREE_TYPE (type) = TREE_TYPE (type_node);
2896 }
2897
2898 return type;
2899 }
2900
2901 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2902 transparently converted to each other. */
2903
2904 int
2905 gnat_types_compatible_p (tree t1, tree t2)
2906 {
2907 enum tree_code code;
2908
2909 /* This is the default criterion. */
2910 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2911 return 1;
2912
2913 /* We only check structural equivalence here. */
2914 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2915 return 0;
2916
2917 /* Vector types are also compatible if they have the same number of subparts
2918 and the same form of (scalar) element type. */
2919 if (code == VECTOR_TYPE
2920 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2921 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2922 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2923 return 1;
2924
2925 /* Array types are also compatible if they are constrained and have the same
2926 domain(s) and the same component type. */
2927 if (code == ARRAY_TYPE
2928 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2929 || (TYPE_DOMAIN (t1)
2930 && TYPE_DOMAIN (t2)
2931 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2932 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2933 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2934 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2935 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2936 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2937 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2938 return 1;
2939
2940 return 0;
2941 }
2942
2943 /* Return true if EXPR is a useless type conversion. */
2944
2945 bool
2946 gnat_useless_type_conversion (tree expr)
2947 {
2948 if (CONVERT_EXPR_P (expr)
2949 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2950 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2951 return gnat_types_compatible_p (TREE_TYPE (expr),
2952 TREE_TYPE (TREE_OPERAND (expr, 0)));
2953
2954 return false;
2955 }
2956
2957 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2958
2959 bool
2960 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2961 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2962 {
2963 return TYPE_CI_CO_LIST (t) == cico_list
2964 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2965 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2966 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2967 }
2968 \f
2969 /* EXP is an expression for the size of an object. If this size contains
2970 discriminant references, replace them with the maximum (if MAX_P) or
2971 minimum (if !MAX_P) possible value of the discriminant. */
2972
2973 tree
2974 max_size (tree exp, bool max_p)
2975 {
2976 enum tree_code code = TREE_CODE (exp);
2977 tree type = TREE_TYPE (exp);
2978
2979 switch (TREE_CODE_CLASS (code))
2980 {
2981 case tcc_declaration:
2982 case tcc_constant:
2983 return exp;
2984
2985 case tcc_vl_exp:
2986 if (code == CALL_EXPR)
2987 {
2988 tree t, *argarray;
2989 int n, i;
2990
2991 t = maybe_inline_call_in_expr (exp);
2992 if (t)
2993 return max_size (t, max_p);
2994
2995 n = call_expr_nargs (exp);
2996 gcc_assert (n > 0);
2997 argarray = XALLOCAVEC (tree, n);
2998 for (i = 0; i < n; i++)
2999 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3000 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3001 }
3002 break;
3003
3004 case tcc_reference:
3005 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3006 modify. Otherwise, we treat it like a variable. */
3007 if (!CONTAINS_PLACEHOLDER_P (exp))
3008 return exp;
3009
3010 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3011 return
3012 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3013
3014 case tcc_comparison:
3015 return max_p ? size_one_node : size_zero_node;
3016
3017 case tcc_unary:
3018 case tcc_binary:
3019 case tcc_expression:
3020 switch (TREE_CODE_LENGTH (code))
3021 {
3022 case 1:
3023 if (code == SAVE_EXPR)
3024 return exp;
3025 else if (code == NON_LVALUE_EXPR)
3026 return max_size (TREE_OPERAND (exp, 0), max_p);
3027 else
3028 return
3029 fold_build1 (code, type,
3030 max_size (TREE_OPERAND (exp, 0),
3031 code == NEGATE_EXPR ? !max_p : max_p));
3032
3033 case 2:
3034 if (code == COMPOUND_EXPR)
3035 return max_size (TREE_OPERAND (exp, 1), max_p);
3036
3037 {
3038 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3039 tree rhs = max_size (TREE_OPERAND (exp, 1),
3040 code == MINUS_EXPR ? !max_p : max_p);
3041
3042 /* Special-case wanting the maximum value of a MIN_EXPR.
3043 In that case, if one side overflows, return the other.
3044 sizetype is signed, but we know sizes are non-negative.
3045 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3046 overflowing and the RHS a variable. */
3047 if (max_p
3048 && code == MIN_EXPR
3049 && TREE_CODE (rhs) == INTEGER_CST
3050 && TREE_OVERFLOW (rhs))
3051 return lhs;
3052 else if (max_p
3053 && code == MIN_EXPR
3054 && TREE_CODE (lhs) == INTEGER_CST
3055 && TREE_OVERFLOW (lhs))
3056 return rhs;
3057 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
3058 && TREE_CODE (lhs) == INTEGER_CST
3059 && TREE_OVERFLOW (lhs)
3060 && !TREE_CONSTANT (rhs))
3061 return lhs;
3062 else
3063 return fold_build2 (code, type, lhs, rhs);
3064 }
3065
3066 case 3:
3067 if (code == COND_EXPR)
3068 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3069 max_size (TREE_OPERAND (exp, 1), max_p),
3070 max_size (TREE_OPERAND (exp, 2), max_p));
3071 }
3072
3073 /* Other tree classes cannot happen. */
3074 default:
3075 break;
3076 }
3077
3078 gcc_unreachable ();
3079 }
3080 \f
3081 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3082 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3083 Return a constructor for the template. */
3084
3085 tree
3086 build_template (tree template_type, tree array_type, tree expr)
3087 {
3088 VEC(constructor_elt,gc) *template_elts = NULL;
3089 tree bound_list = NULL_TREE;
3090 tree field;
3091
3092 while (TREE_CODE (array_type) == RECORD_TYPE
3093 && (TYPE_PADDING_P (array_type)
3094 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3095 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3096
3097 if (TREE_CODE (array_type) == ARRAY_TYPE
3098 || (TREE_CODE (array_type) == INTEGER_TYPE
3099 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3100 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3101
3102 /* First make the list for a CONSTRUCTOR for the template. Go down the
3103 field list of the template instead of the type chain because this
3104 array might be an Ada array of arrays and we can't tell where the
3105 nested arrays stop being the underlying object. */
3106
3107 for (field = TYPE_FIELDS (template_type); field;
3108 (bound_list
3109 ? (bound_list = TREE_CHAIN (bound_list))
3110 : (array_type = TREE_TYPE (array_type))),
3111 field = DECL_CHAIN (DECL_CHAIN (field)))
3112 {
3113 tree bounds, min, max;
3114
3115 /* If we have a bound list, get the bounds from there. Likewise
3116 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3117 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3118 This will give us a maximum range. */
3119 if (bound_list)
3120 bounds = TREE_VALUE (bound_list);
3121 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3122 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3123 else if (expr && TREE_CODE (expr) == PARM_DECL
3124 && DECL_BY_COMPONENT_PTR_P (expr))
3125 bounds = TREE_TYPE (field);
3126 else
3127 gcc_unreachable ();
3128
3129 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3130 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3131
3132 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3133 substitute it from OBJECT. */
3134 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3135 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3136
3137 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3138 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3139 }
3140
3141 return gnat_build_constructor (template_type, template_elts);
3142 }
3143 \f
3144 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3145 being built; the new decl is chained on to the front of the list. */
3146
3147 static tree
3148 make_descriptor_field (const char *name, tree type, tree rec_type,
3149 tree initial, tree field_list)
3150 {
3151 tree field
3152 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3153 NULL_TREE, 0, 0);
3154
3155 DECL_INITIAL (field) = initial;
3156 DECL_CHAIN (field) = field_list;
3157 return field;
3158 }
3159
3160 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3161 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3162 type contains in its DECL_INITIAL the expression to use when a constructor
3163 is made for the type. GNAT_ENTITY is an entity used to print out an error
3164 message if the mechanism cannot be applied to an object of that type and
3165 also for the name. */
3166
3167 tree
3168 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3169 {
3170 tree record_type = make_node (RECORD_TYPE);
3171 tree pointer32_type, pointer64_type;
3172 tree field_list = NULL_TREE;
3173 int klass, ndim, i, dtype = 0;
3174 tree inner_type, tem;
3175 tree *idx_arr;
3176
3177 /* If TYPE is an unconstrained array, use the underlying array type. */
3178 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3179 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3180
3181 /* If this is an array, compute the number of dimensions in the array,
3182 get the index types, and point to the inner type. */
3183 if (TREE_CODE (type) != ARRAY_TYPE)
3184 ndim = 0;
3185 else
3186 for (ndim = 1, inner_type = type;
3187 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3188 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3189 ndim++, inner_type = TREE_TYPE (inner_type))
3190 ;
3191
3192 idx_arr = XALLOCAVEC (tree, ndim);
3193
3194 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
3195 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3196 for (i = ndim - 1, inner_type = type;
3197 i >= 0;
3198 i--, inner_type = TREE_TYPE (inner_type))
3199 idx_arr[i] = TYPE_DOMAIN (inner_type);
3200 else
3201 for (i = 0, inner_type = type;
3202 i < ndim;
3203 i++, inner_type = TREE_TYPE (inner_type))
3204 idx_arr[i] = TYPE_DOMAIN (inner_type);
3205
3206 /* Now get the DTYPE value. */
3207 switch (TREE_CODE (type))
3208 {
3209 case INTEGER_TYPE:
3210 case ENUMERAL_TYPE:
3211 case BOOLEAN_TYPE:
3212 if (TYPE_VAX_FLOATING_POINT_P (type))
3213 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3214 {
3215 case 6:
3216 dtype = 10;
3217 break;
3218 case 9:
3219 dtype = 11;
3220 break;
3221 case 15:
3222 dtype = 27;
3223 break;
3224 }
3225 else
3226 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3227 {
3228 case 8:
3229 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3230 break;
3231 case 16:
3232 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3233 break;
3234 case 32:
3235 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3236 break;
3237 case 64:
3238 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3239 break;
3240 case 128:
3241 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3242 break;
3243 }
3244 break;
3245
3246 case REAL_TYPE:
3247 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3248 break;
3249
3250 case COMPLEX_TYPE:
3251 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3252 && TYPE_VAX_FLOATING_POINT_P (type))
3253 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3254 {
3255 case 6:
3256 dtype = 12;
3257 break;
3258 case 9:
3259 dtype = 13;
3260 break;
3261 case 15:
3262 dtype = 29;
3263 }
3264 else
3265 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3266 break;
3267
3268 case ARRAY_TYPE:
3269 dtype = 14;
3270 break;
3271
3272 default:
3273 break;
3274 }
3275
3276 /* Get the CLASS value. */
3277 switch (mech)
3278 {
3279 case By_Descriptor_A:
3280 case By_Short_Descriptor_A:
3281 klass = 4;
3282 break;
3283 case By_Descriptor_NCA:
3284 case By_Short_Descriptor_NCA:
3285 klass = 10;
3286 break;
3287 case By_Descriptor_SB:
3288 case By_Short_Descriptor_SB:
3289 klass = 15;
3290 break;
3291 case By_Descriptor:
3292 case By_Short_Descriptor:
3293 case By_Descriptor_S:
3294 case By_Short_Descriptor_S:
3295 default:
3296 klass = 1;
3297 break;
3298 }
3299
3300 /* Make the type for a descriptor for VMS. The first four fields are the
3301 same for all types. */
3302 field_list
3303 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3304 size_in_bytes ((mech == By_Descriptor_A
3305 || mech == By_Short_Descriptor_A)
3306 ? inner_type : type),
3307 field_list);
3308 field_list
3309 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3310 size_int (dtype), field_list);
3311 field_list
3312 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3313 size_int (klass), field_list);
3314
3315 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
3316 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3317
3318 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3319 that we cannot build a template call to the CE routine as it would get a
3320 wrong source location; instead we use a second placeholder for it. */
3321 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3322 build0 (PLACEHOLDER_EXPR, type));
3323 tem = build3 (COND_EXPR, pointer32_type,
3324 Pmode != SImode
3325 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3326 build_int_cstu (pointer64_type, 0x80000000))
3327 : boolean_false_node,
3328 build0 (PLACEHOLDER_EXPR, void_type_node),
3329 convert (pointer32_type, tem));
3330
3331 field_list
3332 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
3333 field_list);
3334
3335 switch (mech)
3336 {
3337 case By_Descriptor:
3338 case By_Short_Descriptor:
3339 case By_Descriptor_S:
3340 case By_Short_Descriptor_S:
3341 break;
3342
3343 case By_Descriptor_SB:
3344 case By_Short_Descriptor_SB:
3345 field_list
3346 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3347 record_type,
3348 (TREE_CODE (type) == ARRAY_TYPE
3349 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3350 : size_zero_node),
3351 field_list);
3352 field_list
3353 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3354 record_type,
3355 (TREE_CODE (type) == ARRAY_TYPE
3356 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3357 : size_zero_node),
3358 field_list);
3359 break;
3360
3361 case By_Descriptor_A:
3362 case By_Short_Descriptor_A:
3363 case By_Descriptor_NCA:
3364 case By_Short_Descriptor_NCA:
3365 field_list
3366 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3367 record_type, size_zero_node, field_list);
3368
3369 field_list
3370 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3371 record_type, size_zero_node, field_list);
3372
3373 field_list
3374 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3375 record_type,
3376 size_int ((mech == By_Descriptor_NCA
3377 || mech == By_Short_Descriptor_NCA)
3378 ? 0
3379 /* Set FL_COLUMN, FL_COEFF, and
3380 FL_BOUNDS. */
3381 : (TREE_CODE (type) == ARRAY_TYPE
3382 && TYPE_CONVENTION_FORTRAN_P
3383 (type)
3384 ? 224 : 192)),
3385 field_list);
3386
3387 field_list
3388 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3389 record_type, size_int (ndim), field_list);
3390
3391 field_list
3392 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3393 record_type, size_in_bytes (type),
3394 field_list);
3395
3396 /* Now build a pointer to the 0,0,0... element. */
3397 tem = build0 (PLACEHOLDER_EXPR, type);
3398 for (i = 0, inner_type = type; i < ndim;
3399 i++, inner_type = TREE_TYPE (inner_type))
3400 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3401 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3402 NULL_TREE, NULL_TREE);
3403
3404 field_list
3405 = make_descriptor_field ("A0", pointer32_type, record_type,
3406 build1 (ADDR_EXPR, pointer32_type, tem),
3407 field_list);
3408
3409 /* Next come the addressing coefficients. */
3410 tem = size_one_node;
3411 for (i = 0; i < ndim; i++)
3412 {
3413 char fname[3];
3414 tree idx_length
3415 = size_binop (MULT_EXPR, tem,
3416 size_binop (PLUS_EXPR,
3417 size_binop (MINUS_EXPR,
3418 TYPE_MAX_VALUE (idx_arr[i]),
3419 TYPE_MIN_VALUE (idx_arr[i])),
3420 size_int (1)));
3421
3422 fname[0] = ((mech == By_Descriptor_NCA ||
3423 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
3424 fname[1] = '0' + i, fname[2] = 0;
3425 field_list
3426 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3427 record_type, idx_length, field_list);
3428
3429 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
3430 tem = idx_length;
3431 }
3432
3433 /* Finally here are the bounds. */
3434 for (i = 0; i < ndim; i++)
3435 {
3436 char fname[3];
3437
3438 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3439 field_list
3440 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3441 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3442 field_list);
3443
3444 fname[0] = 'U';
3445 field_list
3446 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3447 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3448 field_list);
3449 }
3450 break;
3451
3452 default:
3453 post_error ("unsupported descriptor type for &", gnat_entity);
3454 }
3455
3456 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
3457 finish_record_type (record_type, nreverse (field_list), 0, false);
3458 return record_type;
3459 }
3460
3461 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3462 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3463 type contains in its DECL_INITIAL the expression to use when a constructor
3464 is made for the type. GNAT_ENTITY is an entity used to print out an error
3465 message if the mechanism cannot be applied to an object of that type and
3466 also for the name. */
3467
3468 tree
3469 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3470 {
3471 tree record_type = make_node (RECORD_TYPE);
3472 tree pointer64_type;
3473 tree field_list = NULL_TREE;
3474 int klass, ndim, i, dtype = 0;
3475 tree inner_type, tem;
3476 tree *idx_arr;
3477
3478 /* If TYPE is an unconstrained array, use the underlying array type. */
3479 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3480 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3481
3482 /* If this is an array, compute the number of dimensions in the array,
3483 get the index types, and point to the inner type. */
3484 if (TREE_CODE (type) != ARRAY_TYPE)
3485 ndim = 0;
3486 else
3487 for (ndim = 1, inner_type = type;
3488 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3489 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3490 ndim++, inner_type = TREE_TYPE (inner_type))
3491 ;
3492
3493 idx_arr = XALLOCAVEC (tree, ndim);
3494
3495 if (mech != By_Descriptor_NCA
3496 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3497 for (i = ndim - 1, inner_type = type;
3498 i >= 0;
3499 i--, inner_type = TREE_TYPE (inner_type))
3500 idx_arr[i] = TYPE_DOMAIN (inner_type);
3501 else
3502 for (i = 0, inner_type = type;
3503 i < ndim;
3504 i++, inner_type = TREE_TYPE (inner_type))
3505 idx_arr[i] = TYPE_DOMAIN (inner_type);
3506
3507 /* Now get the DTYPE value. */
3508 switch (TREE_CODE (type))
3509 {
3510 case INTEGER_TYPE:
3511 case ENUMERAL_TYPE:
3512 case BOOLEAN_TYPE:
3513 if (TYPE_VAX_FLOATING_POINT_P (type))
3514 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3515 {
3516 case 6:
3517 dtype = 10;
3518 break;
3519 case 9:
3520 dtype = 11;
3521 break;
3522 case 15:
3523 dtype = 27;
3524 break;
3525 }
3526 else
3527 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3528 {
3529 case 8:
3530 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3531 break;
3532 case 16:
3533 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3534 break;
3535 case 32:
3536 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3537 break;
3538 case 64:
3539 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3540 break;
3541 case 128:
3542 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3543 break;
3544 }
3545 break;
3546
3547 case REAL_TYPE:
3548 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3549 break;
3550
3551 case COMPLEX_TYPE:
3552 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3553 && TYPE_VAX_FLOATING_POINT_P (type))
3554 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3555 {
3556 case 6:
3557 dtype = 12;
3558 break;
3559 case 9:
3560 dtype = 13;
3561 break;
3562 case 15:
3563 dtype = 29;
3564 }
3565 else
3566 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3567 break;
3568
3569 case ARRAY_TYPE:
3570 dtype = 14;
3571 break;
3572
3573 default:
3574 break;
3575 }
3576
3577 /* Get the CLASS value. */
3578 switch (mech)
3579 {
3580 case By_Descriptor_A:
3581 klass = 4;
3582 break;
3583 case By_Descriptor_NCA:
3584 klass = 10;
3585 break;
3586 case By_Descriptor_SB:
3587 klass = 15;
3588 break;
3589 case By_Descriptor:
3590 case By_Descriptor_S:
3591 default:
3592 klass = 1;
3593 break;
3594 }
3595
3596 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3597 are the same for all types. */
3598 field_list
3599 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3600 record_type, size_int (1), field_list);
3601 field_list
3602 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3603 record_type, size_int (dtype), field_list);
3604 field_list
3605 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3606 record_type, size_int (klass), field_list);
3607 field_list
3608 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3609 record_type, size_int (-1), field_list);
3610 field_list
3611 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3612 record_type,
3613 size_in_bytes (mech == By_Descriptor_A
3614 ? inner_type : type),
3615 field_list);
3616
3617 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3618
3619 field_list
3620 = make_descriptor_field ("POINTER", pointer64_type, record_type,
3621 build_unary_op (ADDR_EXPR, pointer64_type,
3622 build0 (PLACEHOLDER_EXPR, type)),
3623 field_list);
3624
3625 switch (mech)
3626 {
3627 case By_Descriptor:
3628 case By_Descriptor_S:
3629 break;
3630
3631 case By_Descriptor_SB:
3632 field_list
3633 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3634 record_type,
3635 (TREE_CODE (type) == ARRAY_TYPE
3636 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3637 : size_zero_node),
3638 field_list);
3639 field_list
3640 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3641 record_type,
3642 (TREE_CODE (type) == ARRAY_TYPE
3643 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3644 : size_zero_node),
3645 field_list);
3646 break;
3647
3648 case By_Descriptor_A:
3649 case By_Descriptor_NCA:
3650 field_list
3651 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3652 record_type, size_zero_node, field_list);
3653
3654 field_list
3655 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3656 record_type, size_zero_node, field_list);
3657
3658 dtype = (mech == By_Descriptor_NCA
3659 ? 0
3660 /* Set FL_COLUMN, FL_COEFF, and
3661 FL_BOUNDS. */
3662 : (TREE_CODE (type) == ARRAY_TYPE
3663 && TYPE_CONVENTION_FORTRAN_P (type)
3664 ? 224 : 192));
3665 field_list
3666 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3667 record_type, size_int (dtype),
3668 field_list);
3669
3670 field_list
3671 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3672 record_type, size_int (ndim), field_list);
3673
3674 field_list
3675 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3676 record_type, size_int (0), field_list);
3677 field_list
3678 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3679 record_type, size_in_bytes (type),
3680 field_list);
3681
3682 /* Now build a pointer to the 0,0,0... element. */
3683 tem = build0 (PLACEHOLDER_EXPR, type);
3684 for (i = 0, inner_type = type; i < ndim;
3685 i++, inner_type = TREE_TYPE (inner_type))
3686 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3687 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3688 NULL_TREE, NULL_TREE);
3689
3690 field_list
3691 = make_descriptor_field ("A0", pointer64_type, record_type,
3692 build1 (ADDR_EXPR, pointer64_type, tem),
3693 field_list);
3694
3695 /* Next come the addressing coefficients. */
3696 tem = size_one_node;
3697 for (i = 0; i < ndim; i++)
3698 {
3699 char fname[3];
3700 tree idx_length
3701 = size_binop (MULT_EXPR, tem,
3702 size_binop (PLUS_EXPR,
3703 size_binop (MINUS_EXPR,
3704 TYPE_MAX_VALUE (idx_arr[i]),
3705 TYPE_MIN_VALUE (idx_arr[i])),
3706 size_int (1)));
3707
3708 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3709 fname[1] = '0' + i, fname[2] = 0;
3710 field_list
3711 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3712 record_type, idx_length, field_list);
3713
3714 if (mech == By_Descriptor_NCA)
3715 tem = idx_length;
3716 }
3717
3718 /* Finally here are the bounds. */
3719 for (i = 0; i < ndim; i++)
3720 {
3721 char fname[3];
3722
3723 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3724 field_list
3725 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3726 record_type,
3727 TYPE_MIN_VALUE (idx_arr[i]), field_list);
3728
3729 fname[0] = 'U';
3730 field_list
3731 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3732 record_type,
3733 TYPE_MAX_VALUE (idx_arr[i]), field_list);
3734 }
3735 break;
3736
3737 default:
3738 post_error ("unsupported descriptor type for &", gnat_entity);
3739 }
3740
3741 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3742 finish_record_type (record_type, nreverse (field_list), 0, false);
3743 return record_type;
3744 }
3745
3746 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3747 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3748
3749 tree
3750 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3751 {
3752 VEC(constructor_elt,gc) *v = NULL;
3753 tree field;
3754
3755 gnu_expr = maybe_unconstrained_array (gnu_expr);
3756 gnu_expr = gnat_protect_expr (gnu_expr);
3757 gnat_mark_addressable (gnu_expr);
3758
3759 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3760 routine in case we have a 32-bit descriptor. */
3761 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3762 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3763 N_Raise_Constraint_Error),
3764 gnu_expr);
3765
3766 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3767 {
3768 tree value
3769 = convert (TREE_TYPE (field),
3770 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3771 gnu_expr));
3772 CONSTRUCTOR_APPEND_ELT (v, field, value);
3773 }
3774
3775 return gnat_build_constructor (gnu_type, v);
3776 }
3777
3778 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3779 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3780 which the VMS descriptor is passed. */
3781
3782 static tree
3783 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3784 {
3785 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3786 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3787 /* The CLASS field is the 3rd field in the descriptor. */
3788 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3789 /* The POINTER field is the 6th field in the descriptor. */
3790 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3791
3792 /* Retrieve the value of the POINTER field. */
3793 tree gnu_expr64
3794 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3795
3796 if (POINTER_TYPE_P (gnu_type))
3797 return convert (gnu_type, gnu_expr64);
3798
3799 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3800 {
3801 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3802 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3803 tree template_type = TREE_TYPE (p_bounds_type);
3804 tree min_field = TYPE_FIELDS (template_type);
3805 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3806 tree template_tree, template_addr, aflags, dimct, t, u;
3807 /* See the head comment of build_vms_descriptor. */
3808 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3809 tree lfield, ufield;
3810 VEC(constructor_elt,gc) *v;
3811
3812 /* Convert POINTER to the pointer-to-array type. */
3813 gnu_expr64 = convert (p_array_type, gnu_expr64);
3814
3815 switch (iklass)
3816 {
3817 case 1: /* Class S */
3818 case 15: /* Class SB */
3819 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3820 v = VEC_alloc (constructor_elt, gc, 2);
3821 t = DECL_CHAIN (DECL_CHAIN (klass));
3822 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3823 CONSTRUCTOR_APPEND_ELT (v, min_field,
3824 convert (TREE_TYPE (min_field),
3825 integer_one_node));
3826 CONSTRUCTOR_APPEND_ELT (v, max_field,
3827 convert (TREE_TYPE (max_field), t));
3828 template_tree = gnat_build_constructor (template_type, v);
3829 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3830
3831 /* For class S, we are done. */
3832 if (iklass == 1)
3833 break;
3834
3835 /* Test that we really have a SB descriptor, like DEC Ada. */
3836 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3837 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3838 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3839 /* If so, there is already a template in the descriptor and
3840 it is located right after the POINTER field. The fields are
3841 64bits so they must be repacked. */
3842 t = DECL_CHAIN (pointer);
3843 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3844 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3845
3846 t = DECL_CHAIN (t);
3847 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3848 ufield = convert
3849 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3850
3851 /* Build the template in the form of a constructor. */
3852 v = VEC_alloc (constructor_elt, gc, 2);
3853 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3854 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3855 ufield);
3856 template_tree = gnat_build_constructor (template_type, v);
3857
3858 /* Otherwise use the {1, LENGTH} template we build above. */
3859 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3860 build_unary_op (ADDR_EXPR, p_bounds_type,
3861 template_tree),
3862 template_addr);
3863 break;
3864
3865 case 4: /* Class A */
3866 /* The AFLAGS field is the 3rd field after the pointer in the
3867 descriptor. */
3868 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3869 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3870 /* The DIMCT field is the next field in the descriptor after
3871 aflags. */
3872 t = DECL_CHAIN (t);
3873 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3874 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3875 or FL_COEFF or FL_BOUNDS not set. */
3876 u = build_int_cst (TREE_TYPE (aflags), 192);
3877 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3878 build_binary_op (NE_EXPR, boolean_type_node,
3879 dimct,
3880 convert (TREE_TYPE (dimct),
3881 size_one_node)),
3882 build_binary_op (NE_EXPR, boolean_type_node,
3883 build2 (BIT_AND_EXPR,
3884 TREE_TYPE (aflags),
3885 aflags, u),
3886 u));
3887 /* There is already a template in the descriptor and it is located
3888 in block 3. The fields are 64bits so they must be repacked. */
3889 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3890 (t)))));
3891 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3892 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3893
3894 t = DECL_CHAIN (t);
3895 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3896 ufield = convert
3897 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3898
3899 /* Build the template in the form of a constructor. */
3900 v = VEC_alloc (constructor_elt, gc, 2);
3901 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3902 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3903 ufield);
3904 template_tree = gnat_build_constructor (template_type, v);
3905 template_tree = build3 (COND_EXPR, template_type, u,
3906 build_call_raise (CE_Length_Check_Failed, Empty,
3907 N_Raise_Constraint_Error),
3908 template_tree);
3909 template_addr
3910 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3911 break;
3912
3913 case 10: /* Class NCA */
3914 default:
3915 post_error ("unsupported descriptor type for &", gnat_subprog);
3916 template_addr = integer_zero_node;
3917 break;
3918 }
3919
3920 /* Build the fat pointer in the form of a constructor. */
3921 v = VEC_alloc (constructor_elt, gc, 2);
3922 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3923 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3924 template_addr);
3925 return gnat_build_constructor (gnu_type, v);
3926 }
3927
3928 else
3929 gcc_unreachable ();
3930 }
3931
3932 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3933 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3934 which the VMS descriptor is passed. */
3935
3936 static tree
3937 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3938 {
3939 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3940 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3941 /* The CLASS field is the 3rd field in the descriptor. */
3942 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3943 /* The POINTER field is the 4th field in the descriptor. */
3944 tree pointer = DECL_CHAIN (klass);
3945
3946 /* Retrieve the value of the POINTER field. */
3947 tree gnu_expr32
3948 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3949
3950 if (POINTER_TYPE_P (gnu_type))
3951 return convert (gnu_type, gnu_expr32);
3952
3953 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3954 {
3955 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3956 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3957 tree template_type = TREE_TYPE (p_bounds_type);
3958 tree min_field = TYPE_FIELDS (template_type);
3959 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3960 tree template_tree, template_addr, aflags, dimct, t, u;
3961 /* See the head comment of build_vms_descriptor. */
3962 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3963 VEC(constructor_elt,gc) *v;
3964
3965 /* Convert POINTER to the pointer-to-array type. */
3966 gnu_expr32 = convert (p_array_type, gnu_expr32);
3967
3968 switch (iklass)
3969 {
3970 case 1: /* Class S */
3971 case 15: /* Class SB */
3972 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3973 v = VEC_alloc (constructor_elt, gc, 2);
3974 t = TYPE_FIELDS (desc_type);
3975 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3976 CONSTRUCTOR_APPEND_ELT (v, min_field,
3977 convert (TREE_TYPE (min_field),
3978 integer_one_node));
3979 CONSTRUCTOR_APPEND_ELT (v, max_field,
3980 convert (TREE_TYPE (max_field), t));
3981 template_tree = gnat_build_constructor (template_type, v);
3982 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3983
3984 /* For class S, we are done. */
3985 if (iklass == 1)
3986 break;
3987
3988 /* Test that we really have a SB descriptor, like DEC Ada. */
3989 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3990 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3991 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3992 /* If so, there is already a template in the descriptor and
3993 it is located right after the POINTER field. */
3994 t = DECL_CHAIN (pointer);
3995 template_tree
3996 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3997 /* Otherwise use the {1, LENGTH} template we build above. */
3998 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3999 build_unary_op (ADDR_EXPR, p_bounds_type,
4000 template_tree),
4001 template_addr);
4002 break;
4003
4004 case 4: /* Class A */
4005 /* The AFLAGS field is the 7th field in the descriptor. */
4006 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4007 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4008 /* The DIMCT field is the 8th field in the descriptor. */
4009 t = DECL_CHAIN (t);
4010 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4011 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4012 or FL_COEFF or FL_BOUNDS not set. */
4013 u = build_int_cst (TREE_TYPE (aflags), 192);
4014 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4015 build_binary_op (NE_EXPR, boolean_type_node,
4016 dimct,
4017 convert (TREE_TYPE (dimct),
4018 size_one_node)),
4019 build_binary_op (NE_EXPR, boolean_type_node,
4020 build2 (BIT_AND_EXPR,
4021 TREE_TYPE (aflags),
4022 aflags, u),
4023 u));
4024 /* There is already a template in the descriptor and it is
4025 located at the start of block 3 (12th field). */
4026 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
4027 template_tree
4028 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4029 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
4030 build_call_raise (CE_Length_Check_Failed, Empty,
4031 N_Raise_Constraint_Error),
4032 template_tree);
4033 template_addr
4034 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4035 break;
4036
4037 case 10: /* Class NCA */
4038 default:
4039 post_error ("unsupported descriptor type for &", gnat_subprog);
4040 template_addr = integer_zero_node;
4041 break;
4042 }
4043
4044 /* Build the fat pointer in the form of a constructor. */
4045 v = VEC_alloc (constructor_elt, gc, 2);
4046 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
4047 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4048 template_addr);
4049
4050 return gnat_build_constructor (gnu_type, v);
4051 }
4052
4053 else
4054 gcc_unreachable ();
4055 }
4056
4057 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4058 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4059 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
4060 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
4061 passed. */
4062
4063 tree
4064 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
4065 bool by_ref, Entity_Id gnat_subprog)
4066 {
4067 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4068 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4069 tree mbo = TYPE_FIELDS (desc_type);
4070 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
4071 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
4072 tree real_type, is64bit, gnu_expr32, gnu_expr64;
4073
4074 if (by_ref)
4075 real_type = TREE_TYPE (gnu_type);
4076 else
4077 real_type = gnu_type;
4078
4079 /* If the field name is not MBO, it must be 32-bit and no alternate.
4080 Otherwise primary must be 64-bit and alternate 32-bit. */
4081 if (strcmp (mbostr, "MBO") != 0)
4082 {
4083 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
4084 if (by_ref)
4085 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
4086 return ret;
4087 }
4088
4089 /* Build the test for 64-bit descriptor. */
4090 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4091 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
4092 is64bit
4093 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4094 build_binary_op (EQ_EXPR, boolean_type_node,
4095 convert (integer_type_node, mbo),
4096 integer_one_node),
4097 build_binary_op (EQ_EXPR, boolean_type_node,
4098 convert (integer_type_node, mbmo),
4099 integer_minus_one_node));
4100
4101 /* Build the 2 possible end results. */
4102 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
4103 if (by_ref)
4104 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
4105 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
4106 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
4107 if (by_ref)
4108 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
4109
4110 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
4111 }
4112 \f
4113 /* Build a type to be used to represent an aliased object whose nominal type
4114 is an unconstrained array. This consists of a RECORD_TYPE containing a
4115 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4116 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4117 an arbitrary unconstrained object. Use NAME as the name of the record.
4118 DEBUG_INFO_P is true if we need to write debug information for the type. */
4119
4120 tree
4121 build_unc_object_type (tree template_type, tree object_type, tree name,
4122 bool debug_info_p)
4123 {
4124 tree type = make_node (RECORD_TYPE);
4125 tree template_field
4126 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4127 NULL_TREE, NULL_TREE, 0, 1);
4128 tree array_field
4129 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4130 NULL_TREE, NULL_TREE, 0, 1);
4131
4132 TYPE_NAME (type) = name;
4133 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4134 DECL_CHAIN (template_field) = array_field;
4135 finish_record_type (type, template_field, 0, true);
4136
4137 /* Declare it now since it will never be declared otherwise. This is
4138 necessary to ensure that its subtrees are properly marked. */
4139 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
4140
4141 return type;
4142 }
4143
4144 /* Same, taking a thin or fat pointer type instead of a template type. */
4145
4146 tree
4147 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4148 tree name, bool debug_info_p)
4149 {
4150 tree template_type;
4151
4152 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4153
4154 template_type
4155 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4156 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4157 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4158
4159 return
4160 build_unc_object_type (template_type, object_type, name, debug_info_p);
4161 }
4162 \f
4163 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4164 In the normal case this is just two adjustments, but we have more to
4165 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4166
4167 void
4168 update_pointer_to (tree old_type, tree new_type)
4169 {
4170 tree ptr = TYPE_POINTER_TO (old_type);
4171 tree ref = TYPE_REFERENCE_TO (old_type);
4172 tree t;
4173
4174 /* If this is the main variant, process all the other variants first. */
4175 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4176 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4177 update_pointer_to (t, new_type);
4178
4179 /* If no pointers and no references, we are done. */
4180 if (!ptr && !ref)
4181 return;
4182
4183 /* Merge the old type qualifiers in the new type.
4184
4185 Each old variant has qualifiers for specific reasons, and the new
4186 designated type as well. Each set of qualifiers represents useful
4187 information grabbed at some point, and merging the two simply unifies
4188 these inputs into the final type description.
4189
4190 Consider for instance a volatile type frozen after an access to constant
4191 type designating it; after the designated type's freeze, we get here with
4192 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4193 when the access type was processed. We will make a volatile and readonly
4194 designated type, because that's what it really is.
4195
4196 We might also get here for a non-dummy OLD_TYPE variant with different
4197 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4198 to private record type elaboration (see the comments around the call to
4199 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4200 the qualifiers in those cases too, to avoid accidentally discarding the
4201 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4202 new_type
4203 = build_qualified_type (new_type,
4204 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4205
4206 /* If old type and new type are identical, there is nothing to do. */
4207 if (old_type == new_type)
4208 return;
4209
4210 /* Otherwise, first handle the simple case. */
4211 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4212 {
4213 tree new_ptr, new_ref;
4214
4215 /* If pointer or reference already points to new type, nothing to do.
4216 This can happen as update_pointer_to can be invoked multiple times
4217 on the same couple of types because of the type variants. */
4218 if ((ptr && TREE_TYPE (ptr) == new_type)
4219 || (ref && TREE_TYPE (ref) == new_type))
4220 return;
4221
4222 /* Chain PTR and its variants at the end. */
4223 new_ptr = TYPE_POINTER_TO (new_type);
4224 if (new_ptr)
4225 {
4226 while (TYPE_NEXT_PTR_TO (new_ptr))
4227 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4228 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4229 }
4230 else
4231 TYPE_POINTER_TO (new_type) = ptr;
4232
4233 /* Now adjust them. */
4234 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4235 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4236 {
4237 TREE_TYPE (t) = new_type;
4238 if (TYPE_NULL_BOUNDS (t))
4239 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4240 }
4241
4242 /* Chain REF and its variants at the end. */
4243 new_ref = TYPE_REFERENCE_TO (new_type);
4244 if (new_ref)
4245 {
4246 while (TYPE_NEXT_REF_TO (new_ref))
4247 new_ref = TYPE_NEXT_REF_TO (new_ref);
4248 TYPE_NEXT_REF_TO (new_ref) = ref;
4249 }
4250 else
4251 TYPE_REFERENCE_TO (new_type) = ref;
4252
4253 /* Now adjust them. */
4254 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4255 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4256 TREE_TYPE (t) = new_type;
4257
4258 TYPE_POINTER_TO (old_type) = NULL_TREE;
4259 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4260 }
4261
4262 /* Now deal with the unconstrained array case. In this case the pointer
4263 is actually a record where both fields are pointers to dummy nodes.
4264 Turn them into pointers to the correct types using update_pointer_to.
4265 Likewise for the pointer to the object record (thin pointer). */
4266 else
4267 {
4268 tree new_ptr = TYPE_POINTER_TO (new_type);
4269
4270 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4271
4272 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4273 since update_pointer_to can be invoked multiple times on the same
4274 couple of types because of the type variants. */
4275 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4276 return;
4277
4278 update_pointer_to
4279 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4280 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4281
4282 update_pointer_to
4283 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4284 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4285
4286 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4287 TYPE_OBJECT_RECORD_TYPE (new_type));
4288
4289 TYPE_POINTER_TO (old_type) = NULL_TREE;
4290 }
4291 }
4292 \f
4293 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4294 unconstrained one. This involves making or finding a template. */
4295
4296 static tree
4297 convert_to_fat_pointer (tree type, tree expr)
4298 {
4299 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4300 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4301 tree etype = TREE_TYPE (expr);
4302 tree template_tree;
4303 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
4304
4305 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4306 array (compare_fat_pointers ensures that this is the full discriminant)
4307 and a valid pointer to the bounds. This latter property is necessary
4308 since the compiler can hoist the load of the bounds done through it. */
4309 if (integer_zerop (expr))
4310 {
4311 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4312 tree null_bounds, t;
4313
4314 if (TYPE_NULL_BOUNDS (ptr_template_type))
4315 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4316 else
4317 {
4318 /* The template type can still be dummy at this point so we build an
4319 empty constructor. The middle-end will fill it in with zeros. */
4320 t = build_constructor (template_type, NULL);
4321 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4322 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4323 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4324 }
4325
4326 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4327 fold_convert (p_array_type, null_pointer_node));
4328 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4329 t = build_constructor (type, v);
4330 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4331 TREE_CONSTANT (t) = 0;
4332 TREE_STATIC (t) = 1;
4333
4334 return t;
4335 }
4336
4337 /* If EXPR is a thin pointer, make template and data from the record. */
4338 if (TYPE_IS_THIN_POINTER_P (etype))
4339 {
4340 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4341
4342 expr = gnat_protect_expr (expr);
4343 if (TREE_CODE (expr) == ADDR_EXPR)
4344 expr = TREE_OPERAND (expr, 0);
4345 else
4346 {
4347 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4348 the thin pointer value has been shifted so we first need to shift
4349 it back to get the template address. */
4350 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4351 expr
4352 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4353 fold_build1 (NEGATE_EXPR, sizetype,
4354 byte_position
4355 (DECL_CHAIN (field))));
4356 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
4357 }
4358
4359 template_tree = build_component_ref (expr, NULL_TREE, field, false);
4360 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4361 build_component_ref (expr, NULL_TREE,
4362 DECL_CHAIN (field), false));
4363 }
4364
4365 /* Otherwise, build the constructor for the template. */
4366 else
4367 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
4368
4369 /* The final result is a constructor for the fat pointer.
4370
4371 If EXPR is an argument of a foreign convention subprogram, the type it
4372 points to is directly the component type. In this case, the expression
4373 type may not match the corresponding FIELD_DECL type at this point, so we
4374 call "convert" here to fix that up if necessary. This type consistency is
4375 required, for instance because it ensures that possible later folding of
4376 COMPONENT_REFs against this constructor always yields something of the
4377 same type as the initial reference.
4378
4379 Note that the call to "build_template" above is still fine because it
4380 will only refer to the provided TEMPLATE_TYPE in this case. */
4381 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4382 convert (p_array_type, expr));
4383 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4384 build_unary_op (ADDR_EXPR, NULL_TREE,
4385 template_tree));
4386 return gnat_build_constructor (type, v);
4387 }
4388 \f
4389 /* Create an expression whose value is that of EXPR,
4390 converted to type TYPE. The TREE_TYPE of the value
4391 is always TYPE. This function implements all reasonable
4392 conversions; callers should filter out those that are
4393 not permitted by the language being compiled. */
4394
4395 tree
4396 convert (tree type, tree expr)
4397 {
4398 tree etype = TREE_TYPE (expr);
4399 enum tree_code ecode = TREE_CODE (etype);
4400 enum tree_code code = TREE_CODE (type);
4401
4402 /* If the expression is already of the right type, we are done. */
4403 if (etype == type)
4404 return expr;
4405
4406 /* If both input and output have padding and are of variable size, do this
4407 as an unchecked conversion. Likewise if one is a mere variant of the
4408 other, so we avoid a pointless unpad/repad sequence. */
4409 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4410 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4411 && (!TREE_CONSTANT (TYPE_SIZE (type))
4412 || !TREE_CONSTANT (TYPE_SIZE (etype))
4413 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4414 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4415 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4416 ;
4417
4418 /* If the output type has padding, convert to the inner type and make a
4419 constructor to build the record, unless a variable size is involved. */
4420 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4421 {
4422 VEC(constructor_elt,gc) *v;
4423
4424 /* If we previously converted from another type and our type is
4425 of variable size, remove the conversion to avoid the need for
4426 variable-sized temporaries. Likewise for a conversion between
4427 original and packable version. */
4428 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4429 && (!TREE_CONSTANT (TYPE_SIZE (type))
4430 || (ecode == RECORD_TYPE
4431 && TYPE_NAME (etype)
4432 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4433 expr = TREE_OPERAND (expr, 0);
4434
4435 /* If we are just removing the padding from expr, convert the original
4436 object if we have variable size in order to avoid the need for some
4437 variable-sized temporaries. Likewise if the padding is a variant
4438 of the other, so we avoid a pointless unpad/repad sequence. */
4439 if (TREE_CODE (expr) == COMPONENT_REF
4440 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4441 && (!TREE_CONSTANT (TYPE_SIZE (type))
4442 || TYPE_MAIN_VARIANT (type)
4443 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4444 || (ecode == RECORD_TYPE
4445 && TYPE_NAME (etype)
4446 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4447 return convert (type, TREE_OPERAND (expr, 0));
4448
4449 /* If the inner type is of self-referential size and the expression type
4450 is a record, do this as an unchecked conversion. But first pad the
4451 expression if possible to have the same size on both sides. */
4452 if (ecode == RECORD_TYPE
4453 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4454 {
4455 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4456 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4457 false, false, false, true),
4458 expr);
4459 return unchecked_convert (type, expr, false);
4460 }
4461
4462 /* If we are converting between array types with variable size, do the
4463 final conversion as an unchecked conversion, again to avoid the need
4464 for some variable-sized temporaries. If valid, this conversion is
4465 very likely purely technical and without real effects. */
4466 if (ecode == ARRAY_TYPE
4467 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4468 && !TREE_CONSTANT (TYPE_SIZE (etype))
4469 && !TREE_CONSTANT (TYPE_SIZE (type)))
4470 return unchecked_convert (type,
4471 convert (TREE_TYPE (TYPE_FIELDS (type)),
4472 expr),
4473 false);
4474
4475 v = VEC_alloc (constructor_elt, gc, 1);
4476 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4477 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4478 return gnat_build_constructor (type, v);
4479 }
4480
4481 /* If the input type has padding, remove it and convert to the output type.
4482 The conditions ordering is arranged to ensure that the output type is not
4483 a padding type here, as it is not clear whether the conversion would
4484 always be correct if this was to happen. */
4485 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4486 {
4487 tree unpadded;
4488
4489 /* If we have just converted to this padded type, just get the
4490 inner expression. */
4491 if (TREE_CODE (expr) == CONSTRUCTOR
4492 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
4493 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
4494 == TYPE_FIELDS (etype))
4495 unpadded
4496 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
4497
4498 /* Otherwise, build an explicit component reference. */
4499 else
4500 unpadded
4501 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4502
4503 return convert (type, unpadded);
4504 }
4505
4506 /* If the input is a biased type, adjust first. */
4507 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4508 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4509 fold_convert (TREE_TYPE (etype),
4510 expr),
4511 TYPE_MIN_VALUE (etype)));
4512
4513 /* If the input is a justified modular type, we need to extract the actual
4514 object before converting it to any other type with the exceptions of an
4515 unconstrained array or of a mere type variant. It is useful to avoid the
4516 extraction and conversion in the type variant case because it could end
4517 up replacing a VAR_DECL expr by a constructor and we might be about the
4518 take the address of the result. */
4519 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4520 && code != UNCONSTRAINED_ARRAY_TYPE
4521 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4522 return convert (type, build_component_ref (expr, NULL_TREE,
4523 TYPE_FIELDS (etype), false));
4524
4525 /* If converting to a type that contains a template, convert to the data
4526 type and then build the template. */
4527 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4528 {
4529 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4530 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
4531
4532 /* If the source already has a template, get a reference to the
4533 associated array only, as we are going to rebuild a template
4534 for the target type anyway. */
4535 expr = maybe_unconstrained_array (expr);
4536
4537 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4538 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4539 obj_type, NULL_TREE));
4540 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4541 convert (obj_type, expr));
4542 return gnat_build_constructor (type, v);
4543 }
4544
4545 /* There are some cases of expressions that we process specially. */
4546 switch (TREE_CODE (expr))
4547 {
4548 case ERROR_MARK:
4549 return expr;
4550
4551 case NULL_EXPR:
4552 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4553 conversion in gnat_expand_expr. NULL_EXPR does not represent
4554 and actual value, so no conversion is needed. */
4555 expr = copy_node (expr);
4556 TREE_TYPE (expr) = type;
4557 return expr;
4558
4559 case STRING_CST:
4560 /* If we are converting a STRING_CST to another constrained array type,
4561 just make a new one in the proper type. */
4562 if (code == ecode && AGGREGATE_TYPE_P (etype)
4563 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4564 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4565 {
4566 expr = copy_node (expr);
4567 TREE_TYPE (expr) = type;
4568 return expr;
4569 }
4570 break;
4571
4572 case VECTOR_CST:
4573 /* If we are converting a VECTOR_CST to a mere variant type, just make
4574 a new one in the proper type. */
4575 if (code == ecode && gnat_types_compatible_p (type, etype))
4576 {
4577 expr = copy_node (expr);
4578 TREE_TYPE (expr) = type;
4579 return expr;
4580 }
4581
4582 case CONSTRUCTOR:
4583 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4584 a new one in the proper type. */
4585 if (code == ecode && gnat_types_compatible_p (type, etype))
4586 {
4587 expr = copy_node (expr);
4588 TREE_TYPE (expr) = type;
4589 CONSTRUCTOR_ELTS (expr)
4590 = VEC_copy (constructor_elt, gc, CONSTRUCTOR_ELTS (expr));
4591 return expr;
4592 }
4593
4594 /* Likewise for a conversion between original and packable version, or
4595 conversion between types of the same size and with the same list of
4596 fields, but we have to work harder to preserve type consistency. */
4597 if (code == ecode
4598 && code == RECORD_TYPE
4599 && (TYPE_NAME (type) == TYPE_NAME (etype)
4600 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4601
4602 {
4603 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4604 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4605 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4606 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4607 unsigned HOST_WIDE_INT idx;
4608 tree index, value;
4609
4610 /* Whether we need to clear TREE_CONSTANT et al. on the output
4611 constructor when we convert in place. */
4612 bool clear_constant = false;
4613
4614 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4615 {
4616 constructor_elt *elt;
4617 /* We expect only simple constructors. */
4618 if (!SAME_FIELD_P (index, efield))
4619 break;
4620 /* The field must be the same. */
4621 if (!SAME_FIELD_P (efield, field))
4622 break;
4623 elt = VEC_quick_push (constructor_elt, v, NULL);
4624 elt->index = field;
4625 elt->value = convert (TREE_TYPE (field), value);
4626
4627 /* If packing has made this field a bitfield and the input
4628 value couldn't be emitted statically any more, we need to
4629 clear TREE_CONSTANT on our output. */
4630 if (!clear_constant
4631 && TREE_CONSTANT (expr)
4632 && !CONSTRUCTOR_BITFIELD_P (efield)
4633 && CONSTRUCTOR_BITFIELD_P (field)
4634 && !initializer_constant_valid_for_bitfield_p (value))
4635 clear_constant = true;
4636
4637 efield = DECL_CHAIN (efield);
4638 field = DECL_CHAIN (field);
4639 }
4640
4641 /* If we have been able to match and convert all the input fields
4642 to their output type, convert in place now. We'll fallback to a
4643 view conversion downstream otherwise. */
4644 if (idx == len)
4645 {
4646 expr = copy_node (expr);
4647 TREE_TYPE (expr) = type;
4648 CONSTRUCTOR_ELTS (expr) = v;
4649 if (clear_constant)
4650 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4651 return expr;
4652 }
4653 }
4654
4655 /* Likewise for a conversion between array type and vector type with a
4656 compatible representative array. */
4657 else if (code == VECTOR_TYPE
4658 && ecode == ARRAY_TYPE
4659 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4660 etype))
4661 {
4662 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4663 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4664 VEC(constructor_elt,gc) *v;
4665 unsigned HOST_WIDE_INT ix;
4666 tree value;
4667
4668 /* Build a VECTOR_CST from a *constant* array constructor. */
4669 if (TREE_CONSTANT (expr))
4670 {
4671 bool constant_p = true;
4672
4673 /* Iterate through elements and check if all constructor
4674 elements are *_CSTs. */
4675 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4676 if (!CONSTANT_CLASS_P (value))
4677 {
4678 constant_p = false;
4679 break;
4680 }
4681
4682 if (constant_p)
4683 return build_vector_from_ctor (type,
4684 CONSTRUCTOR_ELTS (expr));
4685 }
4686
4687 /* Otherwise, build a regular vector constructor. */
4688 v = VEC_alloc (constructor_elt, gc, len);
4689 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4690 {
4691 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4692 elt->index = NULL_TREE;
4693 elt->value = value;
4694 }
4695 expr = copy_node (expr);
4696 TREE_TYPE (expr) = type;
4697 CONSTRUCTOR_ELTS (expr) = v;
4698 return expr;
4699 }
4700 break;
4701
4702 case UNCONSTRAINED_ARRAY_REF:
4703 /* First retrieve the underlying array. */
4704 expr = maybe_unconstrained_array (expr);
4705 etype = TREE_TYPE (expr);
4706 ecode = TREE_CODE (etype);
4707 break;
4708
4709 case VIEW_CONVERT_EXPR:
4710 {
4711 /* GCC 4.x is very sensitive to type consistency overall, and view
4712 conversions thus are very frequent. Even though just "convert"ing
4713 the inner operand to the output type is fine in most cases, it
4714 might expose unexpected input/output type mismatches in special
4715 circumstances so we avoid such recursive calls when we can. */
4716 tree op0 = TREE_OPERAND (expr, 0);
4717
4718 /* If we are converting back to the original type, we can just
4719 lift the input conversion. This is a common occurrence with
4720 switches back-and-forth amongst type variants. */
4721 if (type == TREE_TYPE (op0))
4722 return op0;
4723
4724 /* Otherwise, if we're converting between two aggregate or vector
4725 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4726 target type in place or to just convert the inner expression. */
4727 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4728 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4729 {
4730 /* If we are converting between mere variants, we can just
4731 substitute the VIEW_CONVERT_EXPR in place. */
4732 if (gnat_types_compatible_p (type, etype))
4733 return build1 (VIEW_CONVERT_EXPR, type, op0);
4734
4735 /* Otherwise, we may just bypass the input view conversion unless
4736 one of the types is a fat pointer, which is handled by
4737 specialized code below which relies on exact type matching. */
4738 else if (!TYPE_IS_FAT_POINTER_P (type)
4739 && !TYPE_IS_FAT_POINTER_P (etype))
4740 return convert (type, op0);
4741 }
4742
4743 break;
4744 }
4745
4746 default:
4747 break;
4748 }
4749
4750 /* Check for converting to a pointer to an unconstrained array. */
4751 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4752 return convert_to_fat_pointer (type, expr);
4753
4754 /* If we are converting between two aggregate or vector types that are mere
4755 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4756 to a vector type from its representative array type. */
4757 else if ((code == ecode
4758 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4759 && gnat_types_compatible_p (type, etype))
4760 || (code == VECTOR_TYPE
4761 && ecode == ARRAY_TYPE
4762 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4763 etype)))
4764 return build1 (VIEW_CONVERT_EXPR, type, expr);
4765
4766 /* If we are converting between tagged types, try to upcast properly. */
4767 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4768 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4769 {
4770 tree child_etype = etype;
4771 do {
4772 tree field = TYPE_FIELDS (child_etype);
4773 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4774 return build_component_ref (expr, NULL_TREE, field, false);
4775 child_etype = TREE_TYPE (field);
4776 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4777 }
4778
4779 /* If we are converting from a smaller form of record type back to it, just
4780 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4781 size on both sides. */
4782 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4783 && smaller_form_type_p (etype, type))
4784 {
4785 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4786 false, false, false, true),
4787 expr);
4788 return build1 (VIEW_CONVERT_EXPR, type, expr);
4789 }
4790
4791 /* In all other cases of related types, make a NOP_EXPR. */
4792 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4793 return fold_convert (type, expr);
4794
4795 switch (code)
4796 {
4797 case VOID_TYPE:
4798 return fold_build1 (CONVERT_EXPR, type, expr);
4799
4800 case INTEGER_TYPE:
4801 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4802 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4803 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4804 return unchecked_convert (type, expr, false);
4805 else if (TYPE_BIASED_REPRESENTATION_P (type))
4806 return fold_convert (type,
4807 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4808 convert (TREE_TYPE (type), expr),
4809 TYPE_MIN_VALUE (type)));
4810
4811 /* ... fall through ... */
4812
4813 case ENUMERAL_TYPE:
4814 case BOOLEAN_TYPE:
4815 /* If we are converting an additive expression to an integer type
4816 with lower precision, be wary of the optimization that can be
4817 applied by convert_to_integer. There are 2 problematic cases:
4818 - if the first operand was originally of a biased type,
4819 because we could be recursively called to convert it
4820 to an intermediate type and thus rematerialize the
4821 additive operator endlessly,
4822 - if the expression contains a placeholder, because an
4823 intermediate conversion that changes the sign could
4824 be inserted and thus introduce an artificial overflow
4825 at compile time when the placeholder is substituted. */
4826 if (code == INTEGER_TYPE
4827 && ecode == INTEGER_TYPE
4828 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4829 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4830 {
4831 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4832
4833 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4834 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4835 || CONTAINS_PLACEHOLDER_P (expr))
4836 return build1 (NOP_EXPR, type, expr);
4837 }
4838
4839 return fold (convert_to_integer (type, expr));
4840
4841 case POINTER_TYPE:
4842 case REFERENCE_TYPE:
4843 /* If converting between two thin pointers, adjust if needed to account
4844 for differing offsets from the base pointer, depending on whether
4845 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4846 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4847 {
4848 tree etype_pos
4849 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4850 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4851 : size_zero_node;
4852 tree type_pos
4853 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4854 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4855 : size_zero_node;
4856 tree byte_diff = size_diffop (type_pos, etype_pos);
4857
4858 expr = build1 (NOP_EXPR, type, expr);
4859 if (integer_zerop (byte_diff))
4860 return expr;
4861
4862 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4863 fold_convert (sizetype, byte_diff));
4864 }
4865
4866 /* If converting fat pointer to normal or thin pointer, get the pointer
4867 to the array and then convert it. */
4868 if (TYPE_IS_FAT_POINTER_P (etype))
4869 expr
4870 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4871
4872 return fold (convert_to_pointer (type, expr));
4873
4874 case REAL_TYPE:
4875 return fold (convert_to_real (type, expr));
4876
4877 case RECORD_TYPE:
4878 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4879 {
4880 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4881
4882 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4883 convert (TREE_TYPE (TYPE_FIELDS (type)),
4884 expr));
4885 return gnat_build_constructor (type, v);
4886 }
4887
4888 /* ... fall through ... */
4889
4890 case ARRAY_TYPE:
4891 /* In these cases, assume the front-end has validated the conversion.
4892 If the conversion is valid, it will be a bit-wise conversion, so
4893 it can be viewed as an unchecked conversion. */
4894 return unchecked_convert (type, expr, false);
4895
4896 case UNION_TYPE:
4897 /* This is a either a conversion between a tagged type and some
4898 subtype, which we have to mark as a UNION_TYPE because of
4899 overlapping fields or a conversion of an Unchecked_Union. */
4900 return unchecked_convert (type, expr, false);
4901
4902 case UNCONSTRAINED_ARRAY_TYPE:
4903 /* If the input is a VECTOR_TYPE, convert to the representative
4904 array type first. */
4905 if (ecode == VECTOR_TYPE)
4906 {
4907 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4908 etype = TREE_TYPE (expr);
4909 ecode = TREE_CODE (etype);
4910 }
4911
4912 /* If EXPR is a constrained array, take its address, convert it to a
4913 fat pointer, and then dereference it. Likewise if EXPR is a
4914 record containing both a template and a constrained array.
4915 Note that a record representing a justified modular type
4916 always represents a packed constrained array. */
4917 if (ecode == ARRAY_TYPE
4918 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4919 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4920 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4921 return
4922 build_unary_op
4923 (INDIRECT_REF, NULL_TREE,
4924 convert_to_fat_pointer (TREE_TYPE (type),
4925 build_unary_op (ADDR_EXPR,
4926 NULL_TREE, expr)));
4927
4928 /* Do something very similar for converting one unconstrained
4929 array to another. */
4930 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4931 return
4932 build_unary_op (INDIRECT_REF, NULL_TREE,
4933 convert (TREE_TYPE (type),
4934 build_unary_op (ADDR_EXPR,
4935 NULL_TREE, expr)));
4936 else
4937 gcc_unreachable ();
4938
4939 case COMPLEX_TYPE:
4940 return fold (convert_to_complex (type, expr));
4941
4942 default:
4943 gcc_unreachable ();
4944 }
4945 }
4946
4947 /* Create an expression whose value is that of EXPR converted to the common
4948 index type, which is sizetype. EXPR is supposed to be in the base type
4949 of the GNAT index type. Calling it is equivalent to doing
4950
4951 convert (sizetype, expr)
4952
4953 but we try to distribute the type conversion with the knowledge that EXPR
4954 cannot overflow in its type. This is a best-effort approach and we fall
4955 back to the above expression as soon as difficulties are encountered.
4956
4957 This is necessary to overcome issues that arise when the GNAT base index
4958 type and the GCC common index type (sizetype) don't have the same size,
4959 which is quite frequent on 64-bit architectures. In this case, and if
4960 the GNAT base index type is signed but the iteration type of the loop has
4961 been forced to unsigned, the loop scalar evolution engine cannot compute
4962 a simple evolution for the general induction variables associated with the
4963 array indices, because it will preserve the wrap-around semantics in the
4964 unsigned type of their "inner" part. As a result, many loop optimizations
4965 are blocked.
4966
4967 The solution is to use a special (basic) induction variable that is at
4968 least as large as sizetype, and to express the aforementioned general
4969 induction variables in terms of this induction variable, eliminating
4970 the problematic intermediate truncation to the GNAT base index type.
4971 This is possible as long as the original expression doesn't overflow
4972 and if the middle-end hasn't introduced artificial overflows in the
4973 course of the various simplification it can make to the expression. */
4974
4975 tree
4976 convert_to_index_type (tree expr)
4977 {
4978 enum tree_code code = TREE_CODE (expr);
4979 tree type = TREE_TYPE (expr);
4980
4981 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4982 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4983 if (TYPE_UNSIGNED (type) || !optimize)
4984 return convert (sizetype, expr);
4985
4986 switch (code)
4987 {
4988 case VAR_DECL:
4989 /* The main effect of the function: replace a loop parameter with its
4990 associated special induction variable. */
4991 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4992 expr = DECL_INDUCTION_VAR (expr);
4993 break;
4994
4995 CASE_CONVERT:
4996 {
4997 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4998 /* Bail out as soon as we suspect some sort of type frobbing. */
4999 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5000 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5001 break;
5002 }
5003
5004 /* ... fall through ... */
5005
5006 case NON_LVALUE_EXPR:
5007 return fold_build1 (code, sizetype,
5008 convert_to_index_type (TREE_OPERAND (expr, 0)));
5009
5010 case PLUS_EXPR:
5011 case MINUS_EXPR:
5012 case MULT_EXPR:
5013 return fold_build2 (code, sizetype,
5014 convert_to_index_type (TREE_OPERAND (expr, 0)),
5015 convert_to_index_type (TREE_OPERAND (expr, 1)));
5016
5017 case COMPOUND_EXPR:
5018 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5019 convert_to_index_type (TREE_OPERAND (expr, 1)));
5020
5021 case COND_EXPR:
5022 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5023 convert_to_index_type (TREE_OPERAND (expr, 1)),
5024 convert_to_index_type (TREE_OPERAND (expr, 2)));
5025
5026 default:
5027 break;
5028 }
5029
5030 return convert (sizetype, expr);
5031 }
5032 \f
5033 /* Remove all conversions that are done in EXP. This includes converting
5034 from a padded type or to a justified modular type. If TRUE_ADDRESS
5035 is true, always return the address of the containing object even if
5036 the address is not bit-aligned. */
5037
5038 tree
5039 remove_conversions (tree exp, bool true_address)
5040 {
5041 switch (TREE_CODE (exp))
5042 {
5043 case CONSTRUCTOR:
5044 if (true_address
5045 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5046 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5047 return
5048 remove_conversions (VEC_index (constructor_elt,
5049 CONSTRUCTOR_ELTS (exp), 0)->value,
5050 true);
5051 break;
5052
5053 case COMPONENT_REF:
5054 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5055 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5056 break;
5057
5058 CASE_CONVERT:
5059 case VIEW_CONVERT_EXPR:
5060 case NON_LVALUE_EXPR:
5061 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5062
5063 default:
5064 break;
5065 }
5066
5067 return exp;
5068 }
5069 \f
5070 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5071 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5072 likewise return an expression pointing to the underlying array. */
5073
5074 tree
5075 maybe_unconstrained_array (tree exp)
5076 {
5077 enum tree_code code = TREE_CODE (exp);
5078 tree type = TREE_TYPE (exp);
5079
5080 switch (TREE_CODE (type))
5081 {
5082 case UNCONSTRAINED_ARRAY_TYPE:
5083 if (code == UNCONSTRAINED_ARRAY_REF)
5084 {
5085 const bool read_only = TREE_READONLY (exp);
5086 const bool no_trap = TREE_THIS_NOTRAP (exp);
5087
5088 exp = TREE_OPERAND (exp, 0);
5089 type = TREE_TYPE (exp);
5090
5091 if (TREE_CODE (exp) == COND_EXPR)
5092 {
5093 tree op1
5094 = build_unary_op (INDIRECT_REF, NULL_TREE,
5095 build_component_ref (TREE_OPERAND (exp, 1),
5096 NULL_TREE,
5097 TYPE_FIELDS (type),
5098 false));
5099 tree op2
5100 = build_unary_op (INDIRECT_REF, NULL_TREE,
5101 build_component_ref (TREE_OPERAND (exp, 2),
5102 NULL_TREE,
5103 TYPE_FIELDS (type),
5104 false));
5105
5106 exp = build3 (COND_EXPR,
5107 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5108 TREE_OPERAND (exp, 0), op1, op2);
5109 }
5110 else
5111 {
5112 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5113 build_component_ref (exp, NULL_TREE,
5114 TYPE_FIELDS (type),
5115 false));
5116 TREE_READONLY (exp) = read_only;
5117 TREE_THIS_NOTRAP (exp) = no_trap;
5118 }
5119 }
5120
5121 else if (code == NULL_EXPR)
5122 exp = build1 (NULL_EXPR,
5123 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5124 TREE_OPERAND (exp, 0));
5125 break;
5126
5127 case RECORD_TYPE:
5128 /* If this is a padded type and it contains a template, convert to the
5129 unpadded type first. */
5130 if (TYPE_PADDING_P (type)
5131 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5132 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5133 {
5134 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5135 type = TREE_TYPE (exp);
5136 }
5137
5138 if (TYPE_CONTAINS_TEMPLATE_P (type))
5139 {
5140 exp = build_component_ref (exp, NULL_TREE,
5141 DECL_CHAIN (TYPE_FIELDS (type)),
5142 false);
5143 type = TREE_TYPE (exp);
5144
5145 /* If the array type is padded, convert to the unpadded type. */
5146 if (TYPE_IS_PADDING_P (type))
5147 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5148 }
5149 break;
5150
5151 default:
5152 break;
5153 }
5154
5155 return exp;
5156 }
5157 \f
5158 /* Return true if EXPR is an expression that can be folded as an operand
5159 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5160
5161 static bool
5162 can_fold_for_view_convert_p (tree expr)
5163 {
5164 tree t1, t2;
5165
5166 /* The folder will fold NOP_EXPRs between integral types with the same
5167 precision (in the middle-end's sense). We cannot allow it if the
5168 types don't have the same precision in the Ada sense as well. */
5169 if (TREE_CODE (expr) != NOP_EXPR)
5170 return true;
5171
5172 t1 = TREE_TYPE (expr);
5173 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5174
5175 /* Defer to the folder for non-integral conversions. */
5176 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5177 return true;
5178
5179 /* Only fold conversions that preserve both precisions. */
5180 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5181 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5182 return true;
5183
5184 return false;
5185 }
5186
5187 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5188 If NOTRUNC_P is true, truncation operations should be suppressed.
5189
5190 Special care is required with (source or target) integral types whose
5191 precision is not equal to their size, to make sure we fetch or assign
5192 the value bits whose location might depend on the endianness, e.g.
5193
5194 Rmsize : constant := 8;
5195 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5196
5197 type Bit_Array is array (1 .. Rmsize) of Boolean;
5198 pragma Pack (Bit_Array);
5199
5200 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5201
5202 Value : Int := 2#1000_0001#;
5203 Vbits : Bit_Array := To_Bit_Array (Value);
5204
5205 we expect the 8 bits at Vbits'Address to always contain Value, while
5206 their original location depends on the endianness, at Value'Address
5207 on a little-endian architecture but not on a big-endian one. */
5208
5209 tree
5210 unchecked_convert (tree type, tree expr, bool notrunc_p)
5211 {
5212 tree etype = TREE_TYPE (expr);
5213 enum tree_code ecode = TREE_CODE (etype);
5214 enum tree_code code = TREE_CODE (type);
5215 int c;
5216
5217 /* If the expression is already of the right type, we are done. */
5218 if (etype == type)
5219 return expr;
5220
5221 /* If both types types are integral just do a normal conversion.
5222 Likewise for a conversion to an unconstrained array. */
5223 if ((((INTEGRAL_TYPE_P (type)
5224 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
5225 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5226 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5227 && ((INTEGRAL_TYPE_P (etype)
5228 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
5229 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5230 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5231 || code == UNCONSTRAINED_ARRAY_TYPE)
5232 {
5233 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5234 {
5235 tree ntype = copy_type (etype);
5236 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5237 TYPE_MAIN_VARIANT (ntype) = ntype;
5238 expr = build1 (NOP_EXPR, ntype, expr);
5239 }
5240
5241 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5242 {
5243 tree rtype = copy_type (type);
5244 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5245 TYPE_MAIN_VARIANT (rtype) = rtype;
5246 expr = convert (rtype, expr);
5247 expr = build1 (NOP_EXPR, type, expr);
5248 }
5249 else
5250 expr = convert (type, expr);
5251 }
5252
5253 /* If we are converting to an integral type whose precision is not equal
5254 to its size, first unchecked convert to a record type that contains an
5255 field of the given precision. Then extract the field. */
5256 else if (INTEGRAL_TYPE_P (type)
5257 && TYPE_RM_SIZE (type)
5258 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5259 GET_MODE_BITSIZE (TYPE_MODE (type))))
5260 {
5261 tree rec_type = make_node (RECORD_TYPE);
5262 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5263 tree field_type, field;
5264
5265 if (TYPE_UNSIGNED (type))
5266 field_type = make_unsigned_type (prec);
5267 else
5268 field_type = make_signed_type (prec);
5269 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5270
5271 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5272 NULL_TREE, NULL_TREE, 1, 0);
5273
5274 TYPE_FIELDS (rec_type) = field;
5275 layout_type (rec_type);
5276
5277 expr = unchecked_convert (rec_type, expr, notrunc_p);
5278 expr = build_component_ref (expr, NULL_TREE, field, false);
5279 expr = fold_build1 (NOP_EXPR, type, expr);
5280 }
5281
5282 /* Similarly if we are converting from an integral type whose precision is
5283 not equal to its size, first copy into a field of the given precision
5284 and unchecked convert the record type. */
5285 else if (INTEGRAL_TYPE_P (etype)
5286 && TYPE_RM_SIZE (etype)
5287 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5288 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5289 {
5290 tree rec_type = make_node (RECORD_TYPE);
5291 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5292 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
5293 tree field_type, field;
5294
5295 if (TYPE_UNSIGNED (etype))
5296 field_type = make_unsigned_type (prec);
5297 else
5298 field_type = make_signed_type (prec);
5299 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5300
5301 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5302 NULL_TREE, NULL_TREE, 1, 0);
5303
5304 TYPE_FIELDS (rec_type) = field;
5305 layout_type (rec_type);
5306
5307 expr = fold_build1 (NOP_EXPR, field_type, expr);
5308 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5309 expr = gnat_build_constructor (rec_type, v);
5310 expr = unchecked_convert (type, expr, notrunc_p);
5311 }
5312
5313 /* If we are converting from a scalar type to a type with a different size,
5314 we need to pad to have the same size on both sides.
5315
5316 ??? We cannot do it unconditionally because unchecked conversions are
5317 used liberally by the front-end to implement polymorphism, e.g. in:
5318
5319 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5320 return p___size__4 (p__object!(S191s.all));
5321
5322 so we skip all expressions that are references. */
5323 else if (!REFERENCE_CLASS_P (expr)
5324 && !AGGREGATE_TYPE_P (etype)
5325 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5326 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5327 {
5328 if (c < 0)
5329 {
5330 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5331 false, false, false, true),
5332 expr);
5333 expr = unchecked_convert (type, expr, notrunc_p);
5334 }
5335 else
5336 {
5337 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5338 false, false, false, true);
5339 expr = unchecked_convert (rec_type, expr, notrunc_p);
5340 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5341 false);
5342 }
5343 }
5344
5345 /* We have a special case when we are converting between two unconstrained
5346 array types. In that case, take the address, convert the fat pointer
5347 types, and dereference. */
5348 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5349 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5350 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5351 build_unary_op (ADDR_EXPR, NULL_TREE,
5352 expr)));
5353
5354 /* Another special case is when we are converting to a vector type from its
5355 representative array type; this a regular conversion. */
5356 else if (code == VECTOR_TYPE
5357 && ecode == ARRAY_TYPE
5358 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5359 etype))
5360 expr = convert (type, expr);
5361
5362 else
5363 {
5364 expr = maybe_unconstrained_array (expr);
5365 etype = TREE_TYPE (expr);
5366 ecode = TREE_CODE (etype);
5367 if (can_fold_for_view_convert_p (expr))
5368 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5369 else
5370 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5371 }
5372
5373 /* If the result is an integral type whose precision is not equal to its
5374 size, sign- or zero-extend the result. We need not do this if the input
5375 is an integral type of the same precision and signedness or if the output
5376 is a biased type or if both the input and output are unsigned. */
5377 if (!notrunc_p
5378 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5379 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5380 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5381 GET_MODE_BITSIZE (TYPE_MODE (type)))
5382 && !(INTEGRAL_TYPE_P (etype)
5383 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5384 && operand_equal_p (TYPE_RM_SIZE (type),
5385 (TYPE_RM_SIZE (etype) != 0
5386 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5387 0))
5388 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5389 {
5390 tree base_type
5391 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5392 tree shift_expr
5393 = convert (base_type,
5394 size_binop (MINUS_EXPR,
5395 bitsize_int
5396 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5397 TYPE_RM_SIZE (type)));
5398 expr
5399 = convert (type,
5400 build_binary_op (RSHIFT_EXPR, base_type,
5401 build_binary_op (LSHIFT_EXPR, base_type,
5402 convert (base_type, expr),
5403 shift_expr),
5404 shift_expr));
5405 }
5406
5407 /* An unchecked conversion should never raise Constraint_Error. The code
5408 below assumes that GCC's conversion routines overflow the same way that
5409 the underlying hardware does. This is probably true. In the rare case
5410 when it is false, we can rely on the fact that such conversions are
5411 erroneous anyway. */
5412 if (TREE_CODE (expr) == INTEGER_CST)
5413 TREE_OVERFLOW (expr) = 0;
5414
5415 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5416 show no longer constant. */
5417 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5418 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5419 OEP_ONLY_CONST))
5420 TREE_CONSTANT (expr) = 0;
5421
5422 return expr;
5423 }
5424 \f
5425 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5426 the latter being a record type as predicated by Is_Record_Type. */
5427
5428 enum tree_code
5429 tree_code_for_record_type (Entity_Id gnat_type)
5430 {
5431 Node_Id component_list, component;
5432
5433 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5434 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5435 if (!Is_Unchecked_Union (gnat_type))
5436 return RECORD_TYPE;
5437
5438 gnat_type = Implementation_Base_Type (gnat_type);
5439 component_list
5440 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5441
5442 for (component = First_Non_Pragma (Component_Items (component_list));
5443 Present (component);
5444 component = Next_Non_Pragma (component))
5445 if (Ekind (Defining_Entity (component)) == E_Component)
5446 return RECORD_TYPE;
5447
5448 return UNION_TYPE;
5449 }
5450
5451 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5452 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5453 according to the presence of an alignment clause on the type or, if it
5454 is an array, on the component type. */
5455
5456 bool
5457 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5458 {
5459 gnat_type = Underlying_Type (gnat_type);
5460
5461 *align_clause = Present (Alignment_Clause (gnat_type));
5462
5463 if (Is_Array_Type (gnat_type))
5464 {
5465 gnat_type = Underlying_Type (Component_Type (gnat_type));
5466 if (Present (Alignment_Clause (gnat_type)))
5467 *align_clause = true;
5468 }
5469
5470 if (!Is_Floating_Point_Type (gnat_type))
5471 return false;
5472
5473 if (UI_To_Int (Esize (gnat_type)) != 64)
5474 return false;
5475
5476 return true;
5477 }
5478
5479 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5480 size is greater or equal to 64 bits, or an array of such a type. Set
5481 ALIGN_CLAUSE according to the presence of an alignment clause on the
5482 type or, if it is an array, on the component type. */
5483
5484 bool
5485 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5486 {
5487 gnat_type = Underlying_Type (gnat_type);
5488
5489 *align_clause = Present (Alignment_Clause (gnat_type));
5490
5491 if (Is_Array_Type (gnat_type))
5492 {
5493 gnat_type = Underlying_Type (Component_Type (gnat_type));
5494 if (Present (Alignment_Clause (gnat_type)))
5495 *align_clause = true;
5496 }
5497
5498 if (!Is_Scalar_Type (gnat_type))
5499 return false;
5500
5501 if (UI_To_Int (Esize (gnat_type)) < 64)
5502 return false;
5503
5504 return true;
5505 }
5506
5507 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5508 component of an aggregate type. */
5509
5510 bool
5511 type_for_nonaliased_component_p (tree gnu_type)
5512 {
5513 /* If the type is passed by reference, we may have pointers to the
5514 component so it cannot be made non-aliased. */
5515 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5516 return false;
5517
5518 /* We used to say that any component of aggregate type is aliased
5519 because the front-end may take 'Reference of it. The front-end
5520 has been enhanced in the meantime so as to use a renaming instead
5521 in most cases, but the back-end can probably take the address of
5522 such a component too so we go for the conservative stance.
5523
5524 For instance, we might need the address of any array type, even
5525 if normally passed by copy, to construct a fat pointer if the
5526 component is used as an actual for an unconstrained formal.
5527
5528 Likewise for record types: even if a specific record subtype is
5529 passed by copy, the parent type might be passed by ref (e.g. if
5530 it's of variable size) and we might take the address of a child
5531 component to pass to a parent formal. We have no way to check
5532 for such conditions here. */
5533 if (AGGREGATE_TYPE_P (gnu_type))
5534 return false;
5535
5536 return true;
5537 }
5538
5539 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5540
5541 bool
5542 smaller_form_type_p (tree type, tree orig_type)
5543 {
5544 tree size, osize;
5545
5546 /* We're not interested in variants here. */
5547 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5548 return false;
5549
5550 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5551 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5552 return false;
5553
5554 size = TYPE_SIZE (type);
5555 osize = TYPE_SIZE (orig_type);
5556
5557 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5558 return false;
5559
5560 return tree_int_cst_lt (size, osize) != 0;
5561 }
5562
5563 /* Perform final processing on global variables. */
5564
5565 static GTY (()) tree dummy_global;
5566
5567 void
5568 gnat_write_global_declarations (void)
5569 {
5570 unsigned int i;
5571 tree iter;
5572
5573 /* If we have declared types as used at the global level, insert them in
5574 the global hash table. We use a dummy variable for this purpose. */
5575 if (!VEC_empty (tree, types_used_by_cur_var_decl))
5576 {
5577 struct varpool_node *node;
5578 char *label;
5579
5580 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5581 dummy_global
5582 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5583 void_type_node);
5584 TREE_STATIC (dummy_global) = 1;
5585 TREE_ASM_WRITTEN (dummy_global) = 1;
5586 node = varpool_node (dummy_global);
5587 node->symbol.force_output = 1;
5588
5589 while (!VEC_empty (tree, types_used_by_cur_var_decl))
5590 {
5591 tree t = VEC_pop (tree, types_used_by_cur_var_decl);
5592 types_used_by_var_decl_insert (t, dummy_global);
5593 }
5594 }
5595
5596 /* Output debug information for all global type declarations first. This
5597 ensures that global types whose compilation hasn't been finalized yet,
5598 for example pointers to Taft amendment types, have their compilation
5599 finalized in the right context. */
5600 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
5601 if (TREE_CODE (iter) == TYPE_DECL)
5602 debug_hooks->global_decl (iter);
5603
5604 /* Proceed to optimize and emit assembly. */
5605 finalize_compilation_unit ();
5606
5607 /* After cgraph has had a chance to emit everything that's going to
5608 be emitted, output debug information for the rest of globals. */
5609 if (!seen_error ())
5610 {
5611 timevar_push (TV_SYMOUT);
5612 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
5613 if (TREE_CODE (iter) != TYPE_DECL)
5614 debug_hooks->global_decl (iter);
5615 timevar_pop (TV_SYMOUT);
5616 }
5617 }
5618
5619 /* ************************************************************************
5620 * * GCC builtins support *
5621 * ************************************************************************ */
5622
5623 /* The general scheme is fairly simple:
5624
5625 For each builtin function/type to be declared, gnat_install_builtins calls
5626 internal facilities which eventually get to gnat_push_decl, which in turn
5627 tracks the so declared builtin function decls in the 'builtin_decls' global
5628 datastructure. When an Intrinsic subprogram declaration is processed, we
5629 search this global datastructure to retrieve the associated BUILT_IN DECL
5630 node. */
5631
5632 /* Search the chain of currently available builtin declarations for a node
5633 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5634 found, if any, or NULL_TREE otherwise. */
5635 tree
5636 builtin_decl_for (tree name)
5637 {
5638 unsigned i;
5639 tree decl;
5640
5641 FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
5642 if (DECL_NAME (decl) == name)
5643 return decl;
5644
5645 return NULL_TREE;
5646 }
5647
5648 /* The code below eventually exposes gnat_install_builtins, which declares
5649 the builtin types and functions we might need, either internally or as
5650 user accessible facilities.
5651
5652 ??? This is a first implementation shot, still in rough shape. It is
5653 heavily inspired from the "C" family implementation, with chunks copied
5654 verbatim from there.
5655
5656 Two obvious TODO candidates are
5657 o Use a more efficient name/decl mapping scheme
5658 o Devise a middle-end infrastructure to avoid having to copy
5659 pieces between front-ends. */
5660
5661 /* ----------------------------------------------------------------------- *
5662 * BUILTIN ELEMENTARY TYPES *
5663 * ----------------------------------------------------------------------- */
5664
5665 /* Standard data types to be used in builtin argument declarations. */
5666
5667 enum c_tree_index
5668 {
5669 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5670 CTI_STRING_TYPE,
5671 CTI_CONST_STRING_TYPE,
5672
5673 CTI_MAX
5674 };
5675
5676 static tree c_global_trees[CTI_MAX];
5677
5678 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5679 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5680 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5681
5682 /* ??? In addition some attribute handlers, we currently don't support a
5683 (small) number of builtin-types, which in turns inhibits support for a
5684 number of builtin functions. */
5685 #define wint_type_node void_type_node
5686 #define intmax_type_node void_type_node
5687 #define uintmax_type_node void_type_node
5688
5689 /* Build the void_list_node (void_type_node having been created). */
5690
5691 static tree
5692 build_void_list_node (void)
5693 {
5694 tree t = build_tree_list (NULL_TREE, void_type_node);
5695 return t;
5696 }
5697
5698 /* Used to help initialize the builtin-types.def table. When a type of
5699 the correct size doesn't exist, use error_mark_node instead of NULL.
5700 The later results in segfaults even when a decl using the type doesn't
5701 get invoked. */
5702
5703 static tree
5704 builtin_type_for_size (int size, bool unsignedp)
5705 {
5706 tree type = gnat_type_for_size (size, unsignedp);
5707 return type ? type : error_mark_node;
5708 }
5709
5710 /* Build/push the elementary type decls that builtin functions/types
5711 will need. */
5712
5713 static void
5714 install_builtin_elementary_types (void)
5715 {
5716 signed_size_type_node = gnat_signed_type (size_type_node);
5717 pid_type_node = integer_type_node;
5718 void_list_node = build_void_list_node ();
5719
5720 string_type_node = build_pointer_type (char_type_node);
5721 const_string_type_node
5722 = build_pointer_type (build_qualified_type
5723 (char_type_node, TYPE_QUAL_CONST));
5724 }
5725
5726 /* ----------------------------------------------------------------------- *
5727 * BUILTIN FUNCTION TYPES *
5728 * ----------------------------------------------------------------------- */
5729
5730 /* Now, builtin function types per se. */
5731
5732 enum c_builtin_type
5733 {
5734 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5735 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5736 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5737 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5738 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5739 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5740 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5741 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5742 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5743 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5744 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5745 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5746 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5747 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5748 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5749 NAME,
5750 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5751 #include "builtin-types.def"
5752 #undef DEF_PRIMITIVE_TYPE
5753 #undef DEF_FUNCTION_TYPE_0
5754 #undef DEF_FUNCTION_TYPE_1
5755 #undef DEF_FUNCTION_TYPE_2
5756 #undef DEF_FUNCTION_TYPE_3
5757 #undef DEF_FUNCTION_TYPE_4
5758 #undef DEF_FUNCTION_TYPE_5
5759 #undef DEF_FUNCTION_TYPE_6
5760 #undef DEF_FUNCTION_TYPE_7
5761 #undef DEF_FUNCTION_TYPE_VAR_0
5762 #undef DEF_FUNCTION_TYPE_VAR_1
5763 #undef DEF_FUNCTION_TYPE_VAR_2
5764 #undef DEF_FUNCTION_TYPE_VAR_3
5765 #undef DEF_FUNCTION_TYPE_VAR_4
5766 #undef DEF_FUNCTION_TYPE_VAR_5
5767 #undef DEF_POINTER_TYPE
5768 BT_LAST
5769 };
5770
5771 typedef enum c_builtin_type builtin_type;
5772
5773 /* A temporary array used in communication with def_fn_type. */
5774 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5775
5776 /* A helper function for install_builtin_types. Build function type
5777 for DEF with return type RET and N arguments. If VAR is true, then the
5778 function should be variadic after those N arguments.
5779
5780 Takes special care not to ICE if any of the types involved are
5781 error_mark_node, which indicates that said type is not in fact available
5782 (see builtin_type_for_size). In which case the function type as a whole
5783 should be error_mark_node. */
5784
5785 static void
5786 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5787 {
5788 tree t;
5789 tree *args = XALLOCAVEC (tree, n);
5790 va_list list;
5791 int i;
5792
5793 va_start (list, n);
5794 for (i = 0; i < n; ++i)
5795 {
5796 builtin_type a = (builtin_type) va_arg (list, int);
5797 t = builtin_types[a];
5798 if (t == error_mark_node)
5799 goto egress;
5800 args[i] = t;
5801 }
5802
5803 t = builtin_types[ret];
5804 if (t == error_mark_node)
5805 goto egress;
5806 if (var)
5807 t = build_varargs_function_type_array (t, n, args);
5808 else
5809 t = build_function_type_array (t, n, args);
5810
5811 egress:
5812 builtin_types[def] = t;
5813 va_end (list);
5814 }
5815
5816 /* Build the builtin function types and install them in the builtin_types
5817 array for later use in builtin function decls. */
5818
5819 static void
5820 install_builtin_function_types (void)
5821 {
5822 tree va_list_ref_type_node;
5823 tree va_list_arg_type_node;
5824
5825 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5826 {
5827 va_list_arg_type_node = va_list_ref_type_node =
5828 build_pointer_type (TREE_TYPE (va_list_type_node));
5829 }
5830 else
5831 {
5832 va_list_arg_type_node = va_list_type_node;
5833 va_list_ref_type_node = build_reference_type (va_list_type_node);
5834 }
5835
5836 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5837 builtin_types[ENUM] = VALUE;
5838 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5839 def_fn_type (ENUM, RETURN, 0, 0);
5840 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5841 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5842 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5843 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5844 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5845 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5846 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5847 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5848 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5849 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5850 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5851 ARG6) \
5852 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5853 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5854 ARG6, ARG7) \
5855 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5856 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5857 def_fn_type (ENUM, RETURN, 1, 0);
5858 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5859 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5860 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5861 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5862 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5863 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5864 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5865 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5866 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5867 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5868 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5869 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5870
5871 #include "builtin-types.def"
5872
5873 #undef DEF_PRIMITIVE_TYPE
5874 #undef DEF_FUNCTION_TYPE_1
5875 #undef DEF_FUNCTION_TYPE_2
5876 #undef DEF_FUNCTION_TYPE_3
5877 #undef DEF_FUNCTION_TYPE_4
5878 #undef DEF_FUNCTION_TYPE_5
5879 #undef DEF_FUNCTION_TYPE_6
5880 #undef DEF_FUNCTION_TYPE_VAR_0
5881 #undef DEF_FUNCTION_TYPE_VAR_1
5882 #undef DEF_FUNCTION_TYPE_VAR_2
5883 #undef DEF_FUNCTION_TYPE_VAR_3
5884 #undef DEF_FUNCTION_TYPE_VAR_4
5885 #undef DEF_FUNCTION_TYPE_VAR_5
5886 #undef DEF_POINTER_TYPE
5887 builtin_types[(int) BT_LAST] = NULL_TREE;
5888 }
5889
5890 /* ----------------------------------------------------------------------- *
5891 * BUILTIN ATTRIBUTES *
5892 * ----------------------------------------------------------------------- */
5893
5894 enum built_in_attribute
5895 {
5896 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5897 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5898 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5899 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5900 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5901 #include "builtin-attrs.def"
5902 #undef DEF_ATTR_NULL_TREE
5903 #undef DEF_ATTR_INT
5904 #undef DEF_ATTR_STRING
5905 #undef DEF_ATTR_IDENT
5906 #undef DEF_ATTR_TREE_LIST
5907 ATTR_LAST
5908 };
5909
5910 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5911
5912 static void
5913 install_builtin_attributes (void)
5914 {
5915 /* Fill in the built_in_attributes array. */
5916 #define DEF_ATTR_NULL_TREE(ENUM) \
5917 built_in_attributes[(int) ENUM] = NULL_TREE;
5918 #define DEF_ATTR_INT(ENUM, VALUE) \
5919 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5920 #define DEF_ATTR_STRING(ENUM, VALUE) \
5921 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5922 #define DEF_ATTR_IDENT(ENUM, STRING) \
5923 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5924 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5925 built_in_attributes[(int) ENUM] \
5926 = tree_cons (built_in_attributes[(int) PURPOSE], \
5927 built_in_attributes[(int) VALUE], \
5928 built_in_attributes[(int) CHAIN]);
5929 #include "builtin-attrs.def"
5930 #undef DEF_ATTR_NULL_TREE
5931 #undef DEF_ATTR_INT
5932 #undef DEF_ATTR_STRING
5933 #undef DEF_ATTR_IDENT
5934 #undef DEF_ATTR_TREE_LIST
5935 }
5936
5937 /* Handle a "const" attribute; arguments as in
5938 struct attribute_spec.handler. */
5939
5940 static tree
5941 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5942 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5943 bool *no_add_attrs)
5944 {
5945 if (TREE_CODE (*node) == FUNCTION_DECL)
5946 TREE_READONLY (*node) = 1;
5947 else
5948 *no_add_attrs = true;
5949
5950 return NULL_TREE;
5951 }
5952
5953 /* Handle a "nothrow" attribute; arguments as in
5954 struct attribute_spec.handler. */
5955
5956 static tree
5957 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5958 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5959 bool *no_add_attrs)
5960 {
5961 if (TREE_CODE (*node) == FUNCTION_DECL)
5962 TREE_NOTHROW (*node) = 1;
5963 else
5964 *no_add_attrs = true;
5965
5966 return NULL_TREE;
5967 }
5968
5969 /* Handle a "pure" attribute; arguments as in
5970 struct attribute_spec.handler. */
5971
5972 static tree
5973 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5974 int ARG_UNUSED (flags), bool *no_add_attrs)
5975 {
5976 if (TREE_CODE (*node) == FUNCTION_DECL)
5977 DECL_PURE_P (*node) = 1;
5978 /* ??? TODO: Support types. */
5979 else
5980 {
5981 warning (OPT_Wattributes, "%qs attribute ignored",
5982 IDENTIFIER_POINTER (name));
5983 *no_add_attrs = true;
5984 }
5985
5986 return NULL_TREE;
5987 }
5988
5989 /* Handle a "no vops" attribute; arguments as in
5990 struct attribute_spec.handler. */
5991
5992 static tree
5993 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5994 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5995 bool *ARG_UNUSED (no_add_attrs))
5996 {
5997 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5998 DECL_IS_NOVOPS (*node) = 1;
5999 return NULL_TREE;
6000 }
6001
6002 /* Helper for nonnull attribute handling; fetch the operand number
6003 from the attribute argument list. */
6004
6005 static bool
6006 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6007 {
6008 /* Verify the arg number is a constant. */
6009 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6010 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6011 return false;
6012
6013 *valp = TREE_INT_CST_LOW (arg_num_expr);
6014 return true;
6015 }
6016
6017 /* Handle the "nonnull" attribute. */
6018 static tree
6019 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6020 tree args, int ARG_UNUSED (flags),
6021 bool *no_add_attrs)
6022 {
6023 tree type = *node;
6024 unsigned HOST_WIDE_INT attr_arg_num;
6025
6026 /* If no arguments are specified, all pointer arguments should be
6027 non-null. Verify a full prototype is given so that the arguments
6028 will have the correct types when we actually check them later. */
6029 if (!args)
6030 {
6031 if (!prototype_p (type))
6032 {
6033 error ("nonnull attribute without arguments on a non-prototype");
6034 *no_add_attrs = true;
6035 }
6036 return NULL_TREE;
6037 }
6038
6039 /* Argument list specified. Verify that each argument number references
6040 a pointer argument. */
6041 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6042 {
6043 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6044
6045 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6046 {
6047 error ("nonnull argument has invalid operand number (argument %lu)",
6048 (unsigned long) attr_arg_num);
6049 *no_add_attrs = true;
6050 return NULL_TREE;
6051 }
6052
6053 if (prototype_p (type))
6054 {
6055 function_args_iterator iter;
6056 tree argument;
6057
6058 function_args_iter_init (&iter, type);
6059 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6060 {
6061 argument = function_args_iter_cond (&iter);
6062 if (!argument || ck_num == arg_num)
6063 break;
6064 }
6065
6066 if (!argument
6067 || TREE_CODE (argument) == VOID_TYPE)
6068 {
6069 error ("nonnull argument with out-of-range operand number "
6070 "(argument %lu, operand %lu)",
6071 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6072 *no_add_attrs = true;
6073 return NULL_TREE;
6074 }
6075
6076 if (TREE_CODE (argument) != POINTER_TYPE)
6077 {
6078 error ("nonnull argument references non-pointer operand "
6079 "(argument %lu, operand %lu)",
6080 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6081 *no_add_attrs = true;
6082 return NULL_TREE;
6083 }
6084 }
6085 }
6086
6087 return NULL_TREE;
6088 }
6089
6090 /* Handle a "sentinel" attribute. */
6091
6092 static tree
6093 handle_sentinel_attribute (tree *node, tree name, tree args,
6094 int ARG_UNUSED (flags), bool *no_add_attrs)
6095 {
6096 if (!prototype_p (*node))
6097 {
6098 warning (OPT_Wattributes,
6099 "%qs attribute requires prototypes with named arguments",
6100 IDENTIFIER_POINTER (name));
6101 *no_add_attrs = true;
6102 }
6103 else
6104 {
6105 if (!stdarg_p (*node))
6106 {
6107 warning (OPT_Wattributes,
6108 "%qs attribute only applies to variadic functions",
6109 IDENTIFIER_POINTER (name));
6110 *no_add_attrs = true;
6111 }
6112 }
6113
6114 if (args)
6115 {
6116 tree position = TREE_VALUE (args);
6117
6118 if (TREE_CODE (position) != INTEGER_CST)
6119 {
6120 warning (0, "requested position is not an integer constant");
6121 *no_add_attrs = true;
6122 }
6123 else
6124 {
6125 if (tree_int_cst_lt (position, integer_zero_node))
6126 {
6127 warning (0, "requested position is less than zero");
6128 *no_add_attrs = true;
6129 }
6130 }
6131 }
6132
6133 return NULL_TREE;
6134 }
6135
6136 /* Handle a "noreturn" attribute; arguments as in
6137 struct attribute_spec.handler. */
6138
6139 static tree
6140 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6141 int ARG_UNUSED (flags), bool *no_add_attrs)
6142 {
6143 tree type = TREE_TYPE (*node);
6144
6145 /* See FIXME comment in c_common_attribute_table. */
6146 if (TREE_CODE (*node) == FUNCTION_DECL)
6147 TREE_THIS_VOLATILE (*node) = 1;
6148 else if (TREE_CODE (type) == POINTER_TYPE
6149 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6150 TREE_TYPE (*node)
6151 = build_pointer_type
6152 (build_type_variant (TREE_TYPE (type),
6153 TYPE_READONLY (TREE_TYPE (type)), 1));
6154 else
6155 {
6156 warning (OPT_Wattributes, "%qs attribute ignored",
6157 IDENTIFIER_POINTER (name));
6158 *no_add_attrs = true;
6159 }
6160
6161 return NULL_TREE;
6162 }
6163
6164 /* Handle a "leaf" attribute; arguments as in
6165 struct attribute_spec.handler. */
6166
6167 static tree
6168 handle_leaf_attribute (tree *node, tree name,
6169 tree ARG_UNUSED (args),
6170 int ARG_UNUSED (flags), bool *no_add_attrs)
6171 {
6172 if (TREE_CODE (*node) != FUNCTION_DECL)
6173 {
6174 warning (OPT_Wattributes, "%qE attribute ignored", name);
6175 *no_add_attrs = true;
6176 }
6177 if (!TREE_PUBLIC (*node))
6178 {
6179 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6180 *no_add_attrs = true;
6181 }
6182
6183 return NULL_TREE;
6184 }
6185
6186 /* Handle a "malloc" attribute; arguments as in
6187 struct attribute_spec.handler. */
6188
6189 static tree
6190 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6191 int ARG_UNUSED (flags), bool *no_add_attrs)
6192 {
6193 if (TREE_CODE (*node) == FUNCTION_DECL
6194 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6195 DECL_IS_MALLOC (*node) = 1;
6196 else
6197 {
6198 warning (OPT_Wattributes, "%qs attribute ignored",
6199 IDENTIFIER_POINTER (name));
6200 *no_add_attrs = true;
6201 }
6202
6203 return NULL_TREE;
6204 }
6205
6206 /* Fake handler for attributes we don't properly support. */
6207
6208 tree
6209 fake_attribute_handler (tree * ARG_UNUSED (node),
6210 tree ARG_UNUSED (name),
6211 tree ARG_UNUSED (args),
6212 int ARG_UNUSED (flags),
6213 bool * ARG_UNUSED (no_add_attrs))
6214 {
6215 return NULL_TREE;
6216 }
6217
6218 /* Handle a "type_generic" attribute. */
6219
6220 static tree
6221 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6222 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6223 bool * ARG_UNUSED (no_add_attrs))
6224 {
6225 /* Ensure we have a function type. */
6226 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6227
6228 /* Ensure we have a variadic function. */
6229 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6230
6231 return NULL_TREE;
6232 }
6233
6234 /* Handle a "vector_size" attribute; arguments as in
6235 struct attribute_spec.handler. */
6236
6237 static tree
6238 handle_vector_size_attribute (tree *node, tree name, tree args,
6239 int ARG_UNUSED (flags),
6240 bool *no_add_attrs)
6241 {
6242 unsigned HOST_WIDE_INT vecsize, nunits;
6243 enum machine_mode orig_mode;
6244 tree type = *node, new_type, size;
6245
6246 *no_add_attrs = true;
6247
6248 size = TREE_VALUE (args);
6249
6250 if (!host_integerp (size, 1))
6251 {
6252 warning (OPT_Wattributes, "%qs attribute ignored",
6253 IDENTIFIER_POINTER (name));
6254 return NULL_TREE;
6255 }
6256
6257 /* Get the vector size (in bytes). */
6258 vecsize = tree_low_cst (size, 1);
6259
6260 /* We need to provide for vector pointers, vector arrays, and
6261 functions returning vectors. For example:
6262
6263 __attribute__((vector_size(16))) short *foo;
6264
6265 In this case, the mode is SI, but the type being modified is
6266 HI, so we need to look further. */
6267
6268 while (POINTER_TYPE_P (type)
6269 || TREE_CODE (type) == FUNCTION_TYPE
6270 || TREE_CODE (type) == ARRAY_TYPE)
6271 type = TREE_TYPE (type);
6272
6273 /* Get the mode of the type being modified. */
6274 orig_mode = TYPE_MODE (type);
6275
6276 if ((!INTEGRAL_TYPE_P (type)
6277 && !SCALAR_FLOAT_TYPE_P (type)
6278 && !FIXED_POINT_TYPE_P (type))
6279 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6280 && GET_MODE_CLASS (orig_mode) != MODE_INT
6281 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
6282 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
6283 || TREE_CODE (type) == BOOLEAN_TYPE)
6284 {
6285 error ("invalid vector type for attribute %qs",
6286 IDENTIFIER_POINTER (name));
6287 return NULL_TREE;
6288 }
6289
6290 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
6291 {
6292 error ("vector size not an integral multiple of component size");
6293 return NULL;
6294 }
6295
6296 if (vecsize == 0)
6297 {
6298 error ("zero vector size");
6299 return NULL;
6300 }
6301
6302 /* Calculate how many units fit in the vector. */
6303 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
6304 if (nunits & (nunits - 1))
6305 {
6306 error ("number of components of the vector not a power of two");
6307 return NULL_TREE;
6308 }
6309
6310 new_type = build_vector_type (type, nunits);
6311
6312 /* Build back pointers if needed. */
6313 *node = reconstruct_complex_type (*node, new_type);
6314
6315 return NULL_TREE;
6316 }
6317
6318 /* Handle a "vector_type" attribute; arguments as in
6319 struct attribute_spec.handler. */
6320
6321 static tree
6322 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6323 int ARG_UNUSED (flags),
6324 bool *no_add_attrs)
6325 {
6326 /* Vector representative type and size. */
6327 tree rep_type = *node;
6328 tree rep_size = TYPE_SIZE_UNIT (rep_type);
6329 tree rep_name;
6330
6331 /* Vector size in bytes and number of units. */
6332 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6333
6334 /* Vector element type and mode. */
6335 tree elem_type;
6336 enum machine_mode elem_mode;
6337
6338 *no_add_attrs = true;
6339
6340 /* Get the representative array type, possibly nested within a
6341 padding record e.g. for alignment purposes. */
6342
6343 if (TYPE_IS_PADDING_P (rep_type))
6344 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
6345
6346 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6347 {
6348 error ("attribute %qs applies to array types only",
6349 IDENTIFIER_POINTER (name));
6350 return NULL_TREE;
6351 }
6352
6353 /* Silently punt on variable sizes. We can't make vector types for them,
6354 need to ignore them on front-end generated subtypes of unconstrained
6355 bases, and this attribute is for binding implementors, not end-users, so
6356 we should never get there from legitimate explicit uses. */
6357
6358 if (!host_integerp (rep_size, 1))
6359 return NULL_TREE;
6360
6361 /* Get the element type/mode and check this is something we know
6362 how to make vectors of. */
6363
6364 elem_type = TREE_TYPE (rep_type);
6365 elem_mode = TYPE_MODE (elem_type);
6366
6367 if ((!INTEGRAL_TYPE_P (elem_type)
6368 && !SCALAR_FLOAT_TYPE_P (elem_type)
6369 && !FIXED_POINT_TYPE_P (elem_type))
6370 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6371 && GET_MODE_CLASS (elem_mode) != MODE_INT
6372 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
6373 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
6374 {
6375 error ("invalid element type for attribute %qs",
6376 IDENTIFIER_POINTER (name));
6377 return NULL_TREE;
6378 }
6379
6380 /* Sanity check the vector size and element type consistency. */
6381
6382 vec_bytes = tree_low_cst (rep_size, 1);
6383
6384 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
6385 {
6386 error ("vector size not an integral multiple of component size");
6387 return NULL;
6388 }
6389
6390 if (vec_bytes == 0)
6391 {
6392 error ("zero vector size");
6393 return NULL;
6394 }
6395
6396 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
6397 if (vec_units & (vec_units - 1))
6398 {
6399 error ("number of components of the vector not a power of two");
6400 return NULL_TREE;
6401 }
6402
6403 /* Build the vector type and replace. */
6404
6405 *node = build_vector_type (elem_type, vec_units);
6406 rep_name = TYPE_NAME (rep_type);
6407 if (TREE_CODE (rep_name) == TYPE_DECL)
6408 rep_name = DECL_NAME (rep_name);
6409 TYPE_NAME (*node) = rep_name;
6410 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6411
6412 return NULL_TREE;
6413 }
6414
6415 /* ----------------------------------------------------------------------- *
6416 * BUILTIN FUNCTIONS *
6417 * ----------------------------------------------------------------------- */
6418
6419 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6420 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6421 if nonansi_p and flag_no_nonansi_builtin. */
6422
6423 static void
6424 def_builtin_1 (enum built_in_function fncode,
6425 const char *name,
6426 enum built_in_class fnclass,
6427 tree fntype, tree libtype,
6428 bool both_p, bool fallback_p,
6429 bool nonansi_p ATTRIBUTE_UNUSED,
6430 tree fnattrs, bool implicit_p)
6431 {
6432 tree decl;
6433 const char *libname;
6434
6435 /* Preserve an already installed decl. It most likely was setup in advance
6436 (e.g. as part of the internal builtins) for specific reasons. */
6437 if (builtin_decl_explicit (fncode) != NULL_TREE)
6438 return;
6439
6440 gcc_assert ((!both_p && !fallback_p)
6441 || !strncmp (name, "__builtin_",
6442 strlen ("__builtin_")));
6443
6444 libname = name + strlen ("__builtin_");
6445 decl = add_builtin_function (name, fntype, fncode, fnclass,
6446 (fallback_p ? libname : NULL),
6447 fnattrs);
6448 if (both_p)
6449 /* ??? This is normally further controlled by command-line options
6450 like -fno-builtin, but we don't have them for Ada. */
6451 add_builtin_function (libname, libtype, fncode, fnclass,
6452 NULL, fnattrs);
6453
6454 set_builtin_decl (fncode, decl, implicit_p);
6455 }
6456
6457 static int flag_isoc94 = 0;
6458 static int flag_isoc99 = 0;
6459
6460 /* Install what the common builtins.def offers. */
6461
6462 static void
6463 install_builtin_functions (void)
6464 {
6465 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6466 NONANSI_P, ATTRS, IMPLICIT, COND) \
6467 if (NAME && COND) \
6468 def_builtin_1 (ENUM, NAME, CLASS, \
6469 builtin_types[(int) TYPE], \
6470 builtin_types[(int) LIBTYPE], \
6471 BOTH_P, FALLBACK_P, NONANSI_P, \
6472 built_in_attributes[(int) ATTRS], IMPLICIT);
6473 #include "builtins.def"
6474 #undef DEF_BUILTIN
6475 }
6476
6477 /* ----------------------------------------------------------------------- *
6478 * BUILTIN FUNCTIONS *
6479 * ----------------------------------------------------------------------- */
6480
6481 /* Install the builtin functions we might need. */
6482
6483 void
6484 gnat_install_builtins (void)
6485 {
6486 install_builtin_elementary_types ();
6487 install_builtin_function_types ();
6488 install_builtin_attributes ();
6489
6490 /* Install builtins used by generic middle-end pieces first. Some of these
6491 know about internal specificities and control attributes accordingly, for
6492 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6493 the generic definition from builtins.def. */
6494 build_common_builtin_nodes ();
6495
6496 /* Now, install the target specific builtins, such as the AltiVec family on
6497 ppc, and the common set as exposed by builtins.def. */
6498 targetm.init_builtins ();
6499 install_builtin_functions ();
6500 }
6501
6502 #include "gt-ada-utils.h"
6503 #include "gtype-ada.h"