utils.c (finish_record_type): Return early for padded types and tidy up.
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "diagnostic-core.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "diagnostic.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
46
47 #include "ada.h"
48 #include "types.h"
49 #include "atree.h"
50 #include "elists.h"
51 #include "namet.h"
52 #include "nlists.h"
53 #include "stringt.h"
54 #include "uintp.h"
55 #include "fe.h"
56 #include "sinfo.h"
57 #include "einfo.h"
58 #include "ada-tree.h"
59 #include "gigi.h"
60
61 #ifndef MAX_BITS_PER_WORD
62 #define MAX_BITS_PER_WORD BITS_PER_WORD
63 #endif
64
65 /* If nonzero, pretend we are allocating at global level. */
66 int force_global;
67
68 /* The default alignment of "double" floating-point types, i.e. floating
69 point types whose size is equal to 64 bits, or 0 if this alignment is
70 not specifically capped. */
71 int double_float_alignment;
72
73 /* The default alignment of "double" or larger scalar types, i.e. scalar
74 types whose size is greater or equal to 64 bits, or 0 if this alignment
75 is not specifically capped. */
76 int double_scalar_alignment;
77
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls[(int) ADT_LAST];
80
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
83
84 /* Likewise, but with extra info for each of the possible raise reasons. */
85 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
86
87 /* Forward declarations for handlers of attributes. */
88 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
100
101 /* Fake handler for attributes we don't properly support, typically because
102 they'd require dragging a lot of the common-c front-end circuitry. */
103 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
104
105 /* Table of machine-independent internal attributes for Ada. We support
106 this minimal set of attributes to accommodate the needs of builtins. */
107 const struct attribute_spec gnat_internal_attribute_table[] =
108 {
109 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
110 affects_type_identity } */
111 { "const", 0, 0, true, false, false, handle_const_attribute,
112 false },
113 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
114 false },
115 { "pure", 0, 0, true, false, false, handle_pure_attribute,
116 false },
117 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
118 false },
119 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
120 false },
121 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
122 false },
123 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
124 false },
125 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
126 false },
127 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
128 false },
129 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
130 false },
131
132 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
133 false },
134 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
135 false },
136 { "may_alias", 0, 0, false, true, false, NULL, false },
137
138 /* ??? format and format_arg are heavy and not supported, which actually
139 prevents support for stdio builtins, which we however declare as part
140 of the common builtins.def contents. */
141 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
142 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
143
144 { NULL, 0, 0, false, false, false, NULL, false }
145 };
146
147 /* Associates a GNAT tree node to a GCC tree node. It is used in
148 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
149 of `save_gnu_tree' for more info. */
150 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
151
152 #define GET_GNU_TREE(GNAT_ENTITY) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
154
155 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
156 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
157
158 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
159 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
160
161 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
162 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
163
164 #define GET_DUMMY_NODE(GNAT_ENTITY) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
166
167 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
168 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
169
170 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
171 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
172
173 /* This variable keeps a table for types for each precision so that we only
174 allocate each of them once. Signed and unsigned types are kept separate.
175
176 Note that these types are only used when fold-const requests something
177 special. Perhaps we should NOT share these types; we'll see how it
178 goes later. */
179 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
180
181 /* Likewise for float types, but record these by mode. */
182 static GTY(()) tree float_types[NUM_MACHINE_MODES];
183
184 /* For each binding contour we allocate a binding_level structure to indicate
185 the binding depth. */
186
187 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
188 /* The binding level containing this one (the enclosing binding level). */
189 struct gnat_binding_level *chain;
190 /* The BLOCK node for this level. */
191 tree block;
192 /* If nonzero, the setjmp buffer that needs to be updated for any
193 variable-sized definition within this context. */
194 tree jmpbuf_decl;
195 };
196
197 /* The binding level currently in effect. */
198 static GTY(()) struct gnat_binding_level *current_binding_level;
199
200 /* A chain of gnat_binding_level structures awaiting reuse. */
201 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
202
203 /* The context to be used for global declarations. */
204 static GTY(()) tree global_context;
205
206 /* An array of global declarations. */
207 static GTY(()) VEC(tree,gc) *global_decls;
208
209 /* An array of builtin function declarations. */
210 static GTY(()) VEC(tree,gc) *builtin_decls;
211
212 /* An array of global renaming pointers. */
213 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
214
215 /* A chain of unused BLOCK nodes. */
216 static GTY((deletable)) tree free_block_chain;
217
218 static tree merge_sizes (tree, tree, tree, bool, bool);
219 static tree compute_related_constant (tree, tree);
220 static tree split_plus (tree, tree *);
221 static tree float_type_for_precision (int, enum machine_mode);
222 static tree convert_to_fat_pointer (tree, tree);
223 static bool potential_alignment_gap (tree, tree, tree);
224 static void process_attributes (tree, struct attrib *);
225 \f
226 /* Initialize the association of GNAT nodes to GCC trees. */
227
228 void
229 init_gnat_to_gnu (void)
230 {
231 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
232 }
233
234 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
235 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
236 If NO_CHECK is true, the latter check is suppressed.
237
238 If GNU_DECL is zero, reset a previous association. */
239
240 void
241 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
242 {
243 /* Check that GNAT_ENTITY is not already defined and that it is being set
244 to something which is a decl. If that is not the case, this usually
245 means GNAT_ENTITY is defined twice, but occasionally is due to some
246 Gigi problem. */
247 gcc_assert (!(gnu_decl
248 && (PRESENT_GNU_TREE (gnat_entity)
249 || (!no_check && !DECL_P (gnu_decl)))));
250
251 SET_GNU_TREE (gnat_entity, gnu_decl);
252 }
253
254 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
255 that was associated with it. If there is no such tree node, abort.
256
257 In some cases, such as delayed elaboration or expressions that need to
258 be elaborated only once, GNAT_ENTITY is really not an entity. */
259
260 tree
261 get_gnu_tree (Entity_Id gnat_entity)
262 {
263 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
264 return GET_GNU_TREE (gnat_entity);
265 }
266
267 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
268
269 bool
270 present_gnu_tree (Entity_Id gnat_entity)
271 {
272 return PRESENT_GNU_TREE (gnat_entity);
273 }
274 \f
275 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
276
277 void
278 init_dummy_type (void)
279 {
280 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
281 }
282
283 /* Make a dummy type corresponding to GNAT_TYPE. */
284
285 tree
286 make_dummy_type (Entity_Id gnat_type)
287 {
288 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
289 tree gnu_type;
290
291 /* If there is an equivalent type, get its underlying type. */
292 if (Present (gnat_underlying))
293 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
294
295 /* If there was no equivalent type (can only happen when just annotating
296 types) or underlying type, go back to the original type. */
297 if (No (gnat_underlying))
298 gnat_underlying = gnat_type;
299
300 /* If it there already a dummy type, use that one. Else make one. */
301 if (PRESENT_DUMMY_NODE (gnat_underlying))
302 return GET_DUMMY_NODE (gnat_underlying);
303
304 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
305 an ENUMERAL_TYPE. */
306 gnu_type = make_node (Is_Record_Type (gnat_underlying)
307 ? tree_code_for_record_type (gnat_underlying)
308 : ENUMERAL_TYPE);
309 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
310 TYPE_DUMMY_P (gnu_type) = 1;
311 TYPE_STUB_DECL (gnu_type)
312 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
313 if (Is_By_Reference_Type (gnat_underlying))
314 TYPE_BY_REFERENCE_P (gnu_type) = 1;
315
316 SET_DUMMY_NODE (gnat_underlying, gnu_type);
317
318 return gnu_type;
319 }
320
321 /* Return the dummy type that was made for GNAT_TYPE, if any. */
322
323 tree
324 get_dummy_type (Entity_Id gnat_type)
325 {
326 return GET_DUMMY_NODE (gnat_type);
327 }
328
329 /* Build dummy fat and thin pointer types whose designated type is specified
330 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
331
332 void
333 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
334 {
335 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
336 tree gnu_fat_type, fields, gnu_object_type;
337
338 gnu_template_type = make_node (RECORD_TYPE);
339 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
340 TYPE_DUMMY_P (gnu_template_type) = 1;
341 gnu_ptr_template = build_pointer_type (gnu_template_type);
342
343 gnu_array_type = make_node (ENUMERAL_TYPE);
344 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
345 TYPE_DUMMY_P (gnu_array_type) = 1;
346 gnu_ptr_array = build_pointer_type (gnu_array_type);
347
348 gnu_fat_type = make_node (RECORD_TYPE);
349 /* Build a stub DECL to trigger the special processing for fat pointer types
350 in gnat_pushdecl. */
351 TYPE_NAME (gnu_fat_type)
352 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
353 gnu_fat_type);
354 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
355 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
356 DECL_CHAIN (fields)
357 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
358 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
359 finish_fat_pointer_type (gnu_fat_type, fields);
360 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
361 /* Suppress debug info until after the type is completed. */
362 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
363
364 gnu_object_type = make_node (RECORD_TYPE);
365 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
366 TYPE_DUMMY_P (gnu_object_type) = 1;
367
368 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
369 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
370 }
371 \f
372 /* Return true if we are in the global binding level. */
373
374 bool
375 global_bindings_p (void)
376 {
377 return force_global || current_function_decl == NULL_TREE;
378 }
379
380 /* Enter a new binding level. */
381
382 void
383 gnat_pushlevel (void)
384 {
385 struct gnat_binding_level *newlevel = NULL;
386
387 /* Reuse a struct for this binding level, if there is one. */
388 if (free_binding_level)
389 {
390 newlevel = free_binding_level;
391 free_binding_level = free_binding_level->chain;
392 }
393 else
394 newlevel = ggc_alloc_gnat_binding_level ();
395
396 /* Use a free BLOCK, if any; otherwise, allocate one. */
397 if (free_block_chain)
398 {
399 newlevel->block = free_block_chain;
400 free_block_chain = BLOCK_CHAIN (free_block_chain);
401 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
402 }
403 else
404 newlevel->block = make_node (BLOCK);
405
406 /* Point the BLOCK we just made to its parent. */
407 if (current_binding_level)
408 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
409
410 BLOCK_VARS (newlevel->block) = NULL_TREE;
411 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
412 TREE_USED (newlevel->block) = 1;
413
414 /* Add this level to the front of the chain (stack) of active levels. */
415 newlevel->chain = current_binding_level;
416 newlevel->jmpbuf_decl = NULL_TREE;
417 current_binding_level = newlevel;
418 }
419
420 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
421 and point FNDECL to this BLOCK. */
422
423 void
424 set_current_block_context (tree fndecl)
425 {
426 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
427 DECL_INITIAL (fndecl) = current_binding_level->block;
428 set_block_for_group (current_binding_level->block);
429 }
430
431 /* Set the jmpbuf_decl for the current binding level to DECL. */
432
433 void
434 set_block_jmpbuf_decl (tree decl)
435 {
436 current_binding_level->jmpbuf_decl = decl;
437 }
438
439 /* Get the jmpbuf_decl, if any, for the current binding level. */
440
441 tree
442 get_block_jmpbuf_decl (void)
443 {
444 return current_binding_level->jmpbuf_decl;
445 }
446
447 /* Exit a binding level. Set any BLOCK into the current code group. */
448
449 void
450 gnat_poplevel (void)
451 {
452 struct gnat_binding_level *level = current_binding_level;
453 tree block = level->block;
454
455 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
456 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
457
458 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
459 are no variables free the block and merge its subblocks into those of its
460 parent block. Otherwise, add it to the list of its parent. */
461 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
462 ;
463 else if (BLOCK_VARS (block) == NULL_TREE)
464 {
465 BLOCK_SUBBLOCKS (level->chain->block)
466 = block_chainon (BLOCK_SUBBLOCKS (block),
467 BLOCK_SUBBLOCKS (level->chain->block));
468 BLOCK_CHAIN (block) = free_block_chain;
469 free_block_chain = block;
470 }
471 else
472 {
473 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
474 BLOCK_SUBBLOCKS (level->chain->block) = block;
475 TREE_USED (block) = 1;
476 set_block_for_group (block);
477 }
478
479 /* Free this binding structure. */
480 current_binding_level = level->chain;
481 level->chain = free_binding_level;
482 free_binding_level = level;
483 }
484
485 /* Exit a binding level and discard the associated BLOCK. */
486
487 void
488 gnat_zaplevel (void)
489 {
490 struct gnat_binding_level *level = current_binding_level;
491 tree block = level->block;
492
493 BLOCK_CHAIN (block) = free_block_chain;
494 free_block_chain = block;
495
496 /* Free this binding structure. */
497 current_binding_level = level->chain;
498 level->chain = free_binding_level;
499 free_binding_level = level;
500 }
501 \f
502 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
503 for location information and flag propagation. */
504
505 void
506 gnat_pushdecl (tree decl, Node_Id gnat_node)
507 {
508 /* If DECL is public external or at top level, it has global context. */
509 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
510 {
511 if (!global_context)
512 global_context = build_translation_unit_decl (NULL_TREE);
513 DECL_CONTEXT (decl) = global_context;
514 }
515 else
516 {
517 DECL_CONTEXT (decl) = current_function_decl;
518
519 /* Functions imported in another function are not really nested.
520 For really nested functions mark them initially as needing
521 a static chain for uses of that flag before unnesting;
522 lower_nested_functions will then recompute it. */
523 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
524 DECL_STATIC_CHAIN (decl) = 1;
525 }
526
527 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
528
529 /* Set the location of DECL and emit a declaration for it. */
530 if (Present (gnat_node))
531 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
532
533 add_decl_expr (decl, gnat_node);
534
535 /* Put the declaration on the list. The list of declarations is in reverse
536 order. The list will be reversed later. Put global declarations in the
537 globals list and local ones in the current block. But skip TYPE_DECLs
538 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
539 with the debugger and aren't needed anyway. */
540 if (!(TREE_CODE (decl) == TYPE_DECL
541 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
542 {
543 if (global_bindings_p ())
544 {
545 VEC_safe_push (tree, gc, global_decls, decl);
546
547 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
548 VEC_safe_push (tree, gc, builtin_decls, decl);
549 }
550 else if (!DECL_EXTERNAL (decl))
551 {
552 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
553 BLOCK_VARS (current_binding_level->block) = decl;
554 }
555 }
556
557 /* For the declaration of a type, set its name if it either is not already
558 set or if the previous type name was not derived from a source name.
559 We'd rather have the type named with a real name and all the pointer
560 types to the same object have the same POINTER_TYPE node. Code in the
561 equivalent function of c-decl.c makes a copy of the type node here, but
562 that may cause us trouble with incomplete types. We make an exception
563 for fat pointer types because the compiler automatically builds them
564 for unconstrained array types and the debugger uses them to represent
565 both these and pointers to these. */
566 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
567 {
568 tree t = TREE_TYPE (decl);
569
570 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
571 {
572 /* Array and pointer types aren't "tagged" types so we force the
573 type to be associated with its typedef in the DWARF back-end,
574 in order to make sure that the latter is always preserved. */
575 if (!DECL_ARTIFICIAL (decl)
576 && (TREE_CODE (t) == ARRAY_TYPE
577 || TREE_CODE (t) == POINTER_TYPE))
578 {
579 tree tt = build_distinct_type_copy (t);
580 if (TREE_CODE (t) == POINTER_TYPE)
581 TYPE_NEXT_PTR_TO (t) = tt;
582 TYPE_NAME (tt) = DECL_NAME (decl);
583 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
584 DECL_ORIGINAL_TYPE (decl) = tt;
585 }
586 }
587 else if (TYPE_IS_FAT_POINTER_P (t))
588 {
589 /* We need a variant for the placeholder machinery to work. */
590 tree tt = build_variant_type_copy (t);
591 TYPE_NAME (tt) = decl;
592 TREE_USED (tt) = TREE_USED (t);
593 TREE_TYPE (decl) = tt;
594 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
595 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
596 else
597 DECL_ORIGINAL_TYPE (decl) = t;
598 DECL_ARTIFICIAL (decl) = 0;
599 t = NULL_TREE;
600 }
601 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
602 ;
603 else
604 t = NULL_TREE;
605
606 /* Propagate the name to all the anonymous variants. This is needed
607 for the type qualifiers machinery to work properly. */
608 if (t)
609 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
610 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
611 TYPE_NAME (t) = decl;
612 }
613 }
614 \f
615 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
616 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
617
618 void
619 record_builtin_type (const char *name, tree type, bool artificial_p)
620 {
621 tree type_decl = build_decl (input_location,
622 TYPE_DECL, get_identifier (name), type);
623 DECL_ARTIFICIAL (type_decl) = artificial_p;
624 TYPE_ARTIFICIAL (type) = artificial_p;
625 gnat_pushdecl (type_decl, Empty);
626
627 if (debug_hooks->type_decl)
628 debug_hooks->type_decl (type_decl, false);
629 }
630 \f
631 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
632 finish constructing the record type as a fat pointer type. */
633
634 void
635 finish_fat_pointer_type (tree record_type, tree field_list)
636 {
637 /* Make sure we can put it into a register. */
638 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
639
640 /* Show what it really is. */
641 TYPE_FAT_POINTER_P (record_type) = 1;
642
643 /* Do not emit debug info for it since the types of its fields may still be
644 incomplete at this point. */
645 finish_record_type (record_type, field_list, 0, false);
646
647 /* Force type_contains_placeholder_p to return true on it. Although the
648 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
649 type but the representation of the unconstrained array. */
650 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
651 }
652
653 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
654 finish constructing the record or union type. If REP_LEVEL is zero, this
655 record has no representation clause and so will be entirely laid out here.
656 If REP_LEVEL is one, this record has a representation clause and has been
657 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
658 this record is derived from a parent record and thus inherits its layout;
659 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
660 we need to write debug information about this type. */
661
662 void
663 finish_record_type (tree record_type, tree field_list, int rep_level,
664 bool debug_info_p)
665 {
666 enum tree_code code = TREE_CODE (record_type);
667 tree name = TYPE_NAME (record_type);
668 tree ada_size = bitsize_zero_node;
669 tree size = bitsize_zero_node;
670 bool had_size = TYPE_SIZE (record_type) != 0;
671 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
672 bool had_align = TYPE_ALIGN (record_type) != 0;
673 tree field;
674
675 TYPE_FIELDS (record_type) = field_list;
676
677 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
678 generate debug info and have a parallel type. */
679 if (name && TREE_CODE (name) == TYPE_DECL)
680 name = DECL_NAME (name);
681 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
682
683 /* Globally initialize the record first. If this is a rep'ed record,
684 that just means some initializations; otherwise, layout the record. */
685 if (rep_level > 0)
686 {
687 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
688
689 if (!had_size_unit)
690 TYPE_SIZE_UNIT (record_type) = size_zero_node;
691
692 if (!had_size)
693 TYPE_SIZE (record_type) = bitsize_zero_node;
694
695 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
696 out just like a UNION_TYPE, since the size will be fixed. */
697 else if (code == QUAL_UNION_TYPE)
698 code = UNION_TYPE;
699 }
700 else
701 {
702 /* Ensure there isn't a size already set. There can be in an error
703 case where there is a rep clause but all fields have errors and
704 no longer have a position. */
705 TYPE_SIZE (record_type) = 0;
706 layout_type (record_type);
707 }
708
709 /* At this point, the position and size of each field is known. It was
710 either set before entry by a rep clause, or by laying out the type above.
711
712 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
713 to compute the Ada size; the GCC size and alignment (for rep'ed records
714 that are not padding types); and the mode (for rep'ed records). We also
715 clear the DECL_BIT_FIELD indication for the cases we know have not been
716 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
717
718 if (code == QUAL_UNION_TYPE)
719 field_list = nreverse (field_list);
720
721 for (field = field_list; field; field = DECL_CHAIN (field))
722 {
723 tree type = TREE_TYPE (field);
724 tree pos = bit_position (field);
725 tree this_size = DECL_SIZE (field);
726 tree this_ada_size;
727
728 if (RECORD_OR_UNION_TYPE_P (type)
729 && !TYPE_FAT_POINTER_P (type)
730 && !TYPE_CONTAINS_TEMPLATE_P (type)
731 && TYPE_ADA_SIZE (type))
732 this_ada_size = TYPE_ADA_SIZE (type);
733 else
734 this_ada_size = this_size;
735
736 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
737 if (DECL_BIT_FIELD (field)
738 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
739 {
740 unsigned int align = TYPE_ALIGN (type);
741
742 /* In the general case, type alignment is required. */
743 if (value_factor_p (pos, align))
744 {
745 /* The enclosing record type must be sufficiently aligned.
746 Otherwise, if no alignment was specified for it and it
747 has been laid out already, bump its alignment to the
748 desired one if this is compatible with its size. */
749 if (TYPE_ALIGN (record_type) >= align)
750 {
751 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
752 DECL_BIT_FIELD (field) = 0;
753 }
754 else if (!had_align
755 && rep_level == 0
756 && value_factor_p (TYPE_SIZE (record_type), align))
757 {
758 TYPE_ALIGN (record_type) = align;
759 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
760 DECL_BIT_FIELD (field) = 0;
761 }
762 }
763
764 /* In the non-strict alignment case, only byte alignment is. */
765 if (!STRICT_ALIGNMENT
766 && DECL_BIT_FIELD (field)
767 && value_factor_p (pos, BITS_PER_UNIT))
768 DECL_BIT_FIELD (field) = 0;
769 }
770
771 /* If we still have DECL_BIT_FIELD set at this point, we know that the
772 field is technically not addressable. Except that it can actually
773 be addressed if it is BLKmode and happens to be properly aligned. */
774 if (DECL_BIT_FIELD (field)
775 && !(DECL_MODE (field) == BLKmode
776 && value_factor_p (pos, BITS_PER_UNIT)))
777 DECL_NONADDRESSABLE_P (field) = 1;
778
779 /* A type must be as aligned as its most aligned field that is not
780 a bit-field. But this is already enforced by layout_type. */
781 if (rep_level > 0 && !DECL_BIT_FIELD (field))
782 TYPE_ALIGN (record_type)
783 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
784
785 switch (code)
786 {
787 case UNION_TYPE:
788 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
789 size = size_binop (MAX_EXPR, size, this_size);
790 break;
791
792 case QUAL_UNION_TYPE:
793 ada_size
794 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
795 this_ada_size, ada_size);
796 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
797 this_size, size);
798 break;
799
800 case RECORD_TYPE:
801 /* Since we know here that all fields are sorted in order of
802 increasing bit position, the size of the record is one
803 higher than the ending bit of the last field processed
804 unless we have a rep clause, since in that case we might
805 have a field outside a QUAL_UNION_TYPE that has a higher ending
806 position. So use a MAX in that case. Also, if this field is a
807 QUAL_UNION_TYPE, we need to take into account the previous size in
808 the case of empty variants. */
809 ada_size
810 = merge_sizes (ada_size, pos, this_ada_size,
811 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
812 size
813 = merge_sizes (size, pos, this_size,
814 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
815 break;
816
817 default:
818 gcc_unreachable ();
819 }
820 }
821
822 if (code == QUAL_UNION_TYPE)
823 nreverse (field_list);
824
825 if (rep_level < 2)
826 {
827 /* If this is a padding record, we never want to make the size smaller
828 than what was specified in it, if any. */
829 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
830 size = TYPE_SIZE (record_type);
831
832 /* Now set any of the values we've just computed that apply. */
833 if (!TYPE_FAT_POINTER_P (record_type)
834 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
835 SET_TYPE_ADA_SIZE (record_type, ada_size);
836
837 if (rep_level > 0)
838 {
839 tree size_unit = had_size_unit
840 ? TYPE_SIZE_UNIT (record_type)
841 : convert (sizetype,
842 size_binop (CEIL_DIV_EXPR, size,
843 bitsize_unit_node));
844 unsigned int align = TYPE_ALIGN (record_type);
845
846 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
847 TYPE_SIZE_UNIT (record_type)
848 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
849
850 compute_record_mode (record_type);
851 }
852 }
853
854 if (debug_info_p)
855 rest_of_record_type_compilation (record_type);
856 }
857
858 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
859 associated with it. It need not be invoked directly in most cases since
860 finish_record_type takes care of doing so, but this can be necessary if
861 a parallel type is to be attached to the record type. */
862
863 void
864 rest_of_record_type_compilation (tree record_type)
865 {
866 bool var_size = false;
867 tree field;
868
869 /* If this is a padded type, the bulk of the debug info has already been
870 generated for the field's type. */
871 if (TYPE_IS_PADDING_P (record_type))
872 return;
873
874 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
875 {
876 /* We need to make an XVE/XVU record if any field has variable size,
877 whether or not the record does. For example, if we have a union,
878 it may be that all fields, rounded up to the alignment, have the
879 same size, in which case we'll use that size. But the debug
880 output routines (except Dwarf2) won't be able to output the fields,
881 so we need to make the special record. */
882 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
883 /* If a field has a non-constant qualifier, the record will have
884 variable size too. */
885 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
886 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
887 {
888 var_size = true;
889 break;
890 }
891 }
892
893 /* If this record type is of variable size, make a parallel record type that
894 will tell the debugger how the former is laid out (see exp_dbug.ads). */
895 if (var_size)
896 {
897 tree new_record_type
898 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
899 ? UNION_TYPE : TREE_CODE (record_type));
900 tree orig_name = TYPE_NAME (record_type), new_name;
901 tree last_pos = bitsize_zero_node;
902 tree old_field, prev_old_field = NULL_TREE;
903
904 if (TREE_CODE (orig_name) == TYPE_DECL)
905 orig_name = DECL_NAME (orig_name);
906
907 new_name
908 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
909 ? "XVU" : "XVE");
910 TYPE_NAME (new_record_type) = new_name;
911 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
912 TYPE_STUB_DECL (new_record_type)
913 = create_type_stub_decl (new_name, new_record_type);
914 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
915 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
916 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
917 TYPE_SIZE_UNIT (new_record_type)
918 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
919
920 /* Now scan all the fields, replacing each field with a new
921 field corresponding to the new encoding. */
922 for (old_field = TYPE_FIELDS (record_type); old_field;
923 old_field = DECL_CHAIN (old_field))
924 {
925 tree field_type = TREE_TYPE (old_field);
926 tree field_name = DECL_NAME (old_field);
927 tree new_field;
928 tree curpos = bit_position (old_field);
929 bool var = false;
930 unsigned int align = 0;
931 tree pos;
932
933 /* See how the position was modified from the last position.
934
935 There are two basic cases we support: a value was added
936 to the last position or the last position was rounded to
937 a boundary and they something was added. Check for the
938 first case first. If not, see if there is any evidence
939 of rounding. If so, round the last position and try
940 again.
941
942 If this is a union, the position can be taken as zero. */
943
944 /* Some computations depend on the shape of the position expression,
945 so strip conversions to make sure it's exposed. */
946 curpos = remove_conversions (curpos, true);
947
948 if (TREE_CODE (new_record_type) == UNION_TYPE)
949 pos = bitsize_zero_node, align = 0;
950 else
951 pos = compute_related_constant (curpos, last_pos);
952
953 if (!pos && TREE_CODE (curpos) == MULT_EXPR
954 && host_integerp (TREE_OPERAND (curpos, 1), 1))
955 {
956 tree offset = TREE_OPERAND (curpos, 0);
957 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
958
959 /* An offset which is a bitwise AND with a negative power of 2
960 means an alignment corresponding to this power of 2. Note
961 that, as sizetype is sign-extended but nonetheless unsigned,
962 we don't directly use tree_int_cst_sgn. */
963 offset = remove_conversions (offset, true);
964 if (TREE_CODE (offset) == BIT_AND_EXPR
965 && host_integerp (TREE_OPERAND (offset, 1), 0)
966 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
967 {
968 unsigned int pow
969 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
970 if (exact_log2 (pow) > 0)
971 align *= pow;
972 }
973
974 pos = compute_related_constant (curpos,
975 round_up (last_pos, align));
976 }
977 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
978 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
979 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
980 && host_integerp (TREE_OPERAND
981 (TREE_OPERAND (curpos, 0), 1),
982 1))
983 {
984 align
985 = tree_low_cst
986 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
987 pos = compute_related_constant (curpos,
988 round_up (last_pos, align));
989 }
990 else if (potential_alignment_gap (prev_old_field, old_field,
991 pos))
992 {
993 align = TYPE_ALIGN (field_type);
994 pos = compute_related_constant (curpos,
995 round_up (last_pos, align));
996 }
997
998 /* If we can't compute a position, set it to zero.
999
1000 ??? We really should abort here, but it's too much work
1001 to get this correct for all cases. */
1002
1003 if (!pos)
1004 pos = bitsize_zero_node;
1005
1006 /* See if this type is variable-sized and make a pointer type
1007 and indicate the indirection if so. Beware that the debug
1008 back-end may adjust the position computed above according
1009 to the alignment of the field type, i.e. the pointer type
1010 in this case, if we don't preventively counter that. */
1011 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1012 {
1013 field_type = build_pointer_type (field_type);
1014 if (align != 0 && TYPE_ALIGN (field_type) > align)
1015 {
1016 field_type = copy_node (field_type);
1017 TYPE_ALIGN (field_type) = align;
1018 }
1019 var = true;
1020 }
1021
1022 /* Make a new field name, if necessary. */
1023 if (var || align != 0)
1024 {
1025 char suffix[16];
1026
1027 if (align != 0)
1028 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1029 align / BITS_PER_UNIT);
1030 else
1031 strcpy (suffix, "XVL");
1032
1033 field_name = concat_name (field_name, suffix);
1034 }
1035
1036 new_field
1037 = create_field_decl (field_name, field_type, new_record_type,
1038 DECL_SIZE (old_field), pos, 0, 0);
1039 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1040 TYPE_FIELDS (new_record_type) = new_field;
1041
1042 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1043 zero. The only time it's not the last field of the record
1044 is when there are other components at fixed positions after
1045 it (meaning there was a rep clause for every field) and we
1046 want to be able to encode them. */
1047 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1048 (TREE_CODE (TREE_TYPE (old_field))
1049 == QUAL_UNION_TYPE)
1050 ? bitsize_zero_node
1051 : DECL_SIZE (old_field));
1052 prev_old_field = old_field;
1053 }
1054
1055 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
1056
1057 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
1058 }
1059 }
1060
1061 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1062
1063 void
1064 add_parallel_type (tree decl, tree parallel_type)
1065 {
1066 tree d = decl;
1067
1068 while (DECL_PARALLEL_TYPE (d))
1069 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1070
1071 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1072 }
1073
1074 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1075 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1076 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1077 replace a value of zero with the old size. If HAS_REP is true, we take the
1078 MAX of the end position of this field with LAST_SIZE. In all other cases,
1079 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1080
1081 static tree
1082 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1083 bool has_rep)
1084 {
1085 tree type = TREE_TYPE (last_size);
1086 tree new_size;
1087
1088 if (!special || TREE_CODE (size) != COND_EXPR)
1089 {
1090 new_size = size_binop (PLUS_EXPR, first_bit, size);
1091 if (has_rep)
1092 new_size = size_binop (MAX_EXPR, last_size, new_size);
1093 }
1094
1095 else
1096 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1097 integer_zerop (TREE_OPERAND (size, 1))
1098 ? last_size : merge_sizes (last_size, first_bit,
1099 TREE_OPERAND (size, 1),
1100 1, has_rep),
1101 integer_zerop (TREE_OPERAND (size, 2))
1102 ? last_size : merge_sizes (last_size, first_bit,
1103 TREE_OPERAND (size, 2),
1104 1, has_rep));
1105
1106 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1107 when fed through substitute_in_expr) into thinking that a constant
1108 size is not constant. */
1109 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1110 new_size = TREE_OPERAND (new_size, 0);
1111
1112 return new_size;
1113 }
1114
1115 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1116 related by the addition of a constant. Return that constant if so. */
1117
1118 static tree
1119 compute_related_constant (tree op0, tree op1)
1120 {
1121 tree op0_var, op1_var;
1122 tree op0_con = split_plus (op0, &op0_var);
1123 tree op1_con = split_plus (op1, &op1_var);
1124 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1125
1126 if (operand_equal_p (op0_var, op1_var, 0))
1127 return result;
1128 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1129 return result;
1130 else
1131 return 0;
1132 }
1133
1134 /* Utility function of above to split a tree OP which may be a sum, into a
1135 constant part, which is returned, and a variable part, which is stored
1136 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1137 bitsizetype. */
1138
1139 static tree
1140 split_plus (tree in, tree *pvar)
1141 {
1142 /* Strip conversions in order to ease the tree traversal and maximize the
1143 potential for constant or plus/minus discovery. We need to be careful
1144 to always return and set *pvar to bitsizetype trees, but it's worth
1145 the effort. */
1146 in = remove_conversions (in, false);
1147
1148 *pvar = convert (bitsizetype, in);
1149
1150 if (TREE_CODE (in) == INTEGER_CST)
1151 {
1152 *pvar = bitsize_zero_node;
1153 return convert (bitsizetype, in);
1154 }
1155 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1156 {
1157 tree lhs_var, rhs_var;
1158 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1159 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1160
1161 if (lhs_var == TREE_OPERAND (in, 0)
1162 && rhs_var == TREE_OPERAND (in, 1))
1163 return bitsize_zero_node;
1164
1165 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1166 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1167 }
1168 else
1169 return bitsize_zero_node;
1170 }
1171 \f
1172 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1173 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1174 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1175 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1176 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1177 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1178 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1179 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1180 invisible reference. */
1181
1182 tree
1183 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1184 bool return_unconstrained_p, bool return_by_direct_ref_p,
1185 bool return_by_invisi_ref_p)
1186 {
1187 /* A list of the data type nodes of the subprogram formal parameters.
1188 This list is generated by traversing the input list of PARM_DECL
1189 nodes. */
1190 VEC(tree,gc) *param_type_list = NULL;
1191 tree t, type;
1192
1193 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1194 VEC_safe_push (tree, gc, param_type_list, TREE_TYPE (t));
1195
1196 type = build_function_type_vec (return_type, param_type_list);
1197
1198 /* TYPE may have been shared since GCC hashes types. If it has a different
1199 CICO_LIST, make a copy. Likewise for the various flags. */
1200 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1201 return_by_direct_ref_p, return_by_invisi_ref_p))
1202 {
1203 type = copy_type (type);
1204 TYPE_CI_CO_LIST (type) = cico_list;
1205 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1206 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1207 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1208 }
1209
1210 return type;
1211 }
1212 \f
1213 /* Return a copy of TYPE but safe to modify in any way. */
1214
1215 tree
1216 copy_type (tree type)
1217 {
1218 tree new_type = copy_node (type);
1219
1220 /* Unshare the language-specific data. */
1221 if (TYPE_LANG_SPECIFIC (type))
1222 {
1223 TYPE_LANG_SPECIFIC (new_type) = NULL;
1224 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1225 }
1226
1227 /* And the contents of the language-specific slot if needed. */
1228 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1229 && TYPE_RM_VALUES (type))
1230 {
1231 TYPE_RM_VALUES (new_type) = NULL_TREE;
1232 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1233 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1234 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1235 }
1236
1237 /* copy_node clears this field instead of copying it, because it is
1238 aliased with TREE_CHAIN. */
1239 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1240
1241 TYPE_POINTER_TO (new_type) = 0;
1242 TYPE_REFERENCE_TO (new_type) = 0;
1243 TYPE_MAIN_VARIANT (new_type) = new_type;
1244 TYPE_NEXT_VARIANT (new_type) = 0;
1245
1246 return new_type;
1247 }
1248 \f
1249 /* Return a subtype of sizetype with range MIN to MAX and whose
1250 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1251 of the associated TYPE_DECL. */
1252
1253 tree
1254 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1255 {
1256 /* First build a type for the desired range. */
1257 tree type = build_nonshared_range_type (sizetype, min, max);
1258
1259 /* Then set the index type. */
1260 SET_TYPE_INDEX_TYPE (type, index);
1261 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1262
1263 return type;
1264 }
1265
1266 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1267 sizetype is used. */
1268
1269 tree
1270 create_range_type (tree type, tree min, tree max)
1271 {
1272 tree range_type;
1273
1274 if (type == NULL_TREE)
1275 type = sizetype;
1276
1277 /* First build a type with the base range. */
1278 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
1279 TYPE_MAX_VALUE (type));
1280
1281 /* Then set the actual range. */
1282 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
1283 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
1284
1285 return range_type;
1286 }
1287 \f
1288 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1289 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1290 its data type. */
1291
1292 tree
1293 create_type_stub_decl (tree type_name, tree type)
1294 {
1295 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1296 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1297 emitted in DWARF. */
1298 tree type_decl = build_decl (input_location,
1299 TYPE_DECL, type_name, type);
1300 DECL_ARTIFICIAL (type_decl) = 1;
1301 TYPE_ARTIFICIAL (type) = 1;
1302 return type_decl;
1303 }
1304
1305 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1306 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1307 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1308 true if we need to write debug information about this type. GNAT_NODE
1309 is used for the position of the decl. */
1310
1311 tree
1312 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1313 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1314 {
1315 enum tree_code code = TREE_CODE (type);
1316 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1317 tree type_decl;
1318
1319 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1320 gcc_assert (!TYPE_IS_DUMMY_P (type));
1321
1322 /* If the type hasn't been named yet, we're naming it; preserve an existing
1323 TYPE_STUB_DECL that has been attached to it for some purpose. */
1324 if (!named && TYPE_STUB_DECL (type))
1325 {
1326 type_decl = TYPE_STUB_DECL (type);
1327 DECL_NAME (type_decl) = type_name;
1328 }
1329 else
1330 type_decl = build_decl (input_location,
1331 TYPE_DECL, type_name, type);
1332
1333 DECL_ARTIFICIAL (type_decl) = artificial_p;
1334 TYPE_ARTIFICIAL (type) = artificial_p;
1335
1336 /* Add this decl to the current binding level. */
1337 gnat_pushdecl (type_decl, gnat_node);
1338
1339 process_attributes (type_decl, attr_list);
1340
1341 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1342 This causes the name to be also viewed as a "tag" by the debug
1343 back-end, with the advantage that no DW_TAG_typedef is emitted
1344 for artificial "tagged" types in DWARF. */
1345 if (!named)
1346 TYPE_STUB_DECL (type) = type_decl;
1347
1348 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
1349 back-end doesn't support, and for others if we don't need to. */
1350 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1351 DECL_IGNORED_P (type_decl) = 1;
1352
1353 return type_decl;
1354 }
1355 \f
1356 /* Return a VAR_DECL or CONST_DECL node.
1357
1358 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1359 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1360 the GCC tree for an optional initial expression; NULL_TREE if none.
1361
1362 CONST_FLAG is true if this variable is constant, in which case we might
1363 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1364
1365 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1366 definition to be made visible outside of the current compilation unit, for
1367 instance variable definitions in a package specification.
1368
1369 EXTERN_FLAG is true when processing an external variable declaration (as
1370 opposed to a definition: no storage is to be allocated for the variable).
1371
1372 STATIC_FLAG is only relevant when not at top level. In that case
1373 it indicates whether to always allocate storage to the variable.
1374
1375 GNAT_NODE is used for the position of the decl. */
1376
1377 tree
1378 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1379 bool const_flag, bool public_flag, bool extern_flag,
1380 bool static_flag, bool const_decl_allowed_p,
1381 struct attrib *attr_list, Node_Id gnat_node)
1382 {
1383 /* Whether the initializer is a constant initializer. At the global level
1384 or for an external object or an object to be allocated in static memory,
1385 we check that it is a valid constant expression for use in initializing
1386 a static variable; otherwise, we only check that it is constant. */
1387 bool init_const
1388 = (var_init != 0
1389 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1390 && (global_bindings_p () || extern_flag || static_flag
1391 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1392 : TREE_CONSTANT (var_init)));
1393
1394 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1395 case the initializer may be used in-lieu of the DECL node (as done in
1396 Identifier_to_gnu). This is useful to prevent the need of elaboration
1397 code when an identifier for which such a decl is made is in turn used as
1398 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1399 but extra constraints apply to this choice (see below) and are not
1400 relevant to the distinction we wish to make. */
1401 bool constant_p = const_flag && init_const;
1402
1403 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1404 and may be used for scalars in general but not for aggregates. */
1405 tree var_decl
1406 = build_decl (input_location,
1407 (constant_p && const_decl_allowed_p
1408 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1409 var_name, type);
1410
1411 /* If this is external, throw away any initializations (they will be done
1412 elsewhere) unless this is a constant for which we would like to remain
1413 able to get the initializer. If we are defining a global here, leave a
1414 constant initialization and save any variable elaborations for the
1415 elaboration routine. If we are just annotating types, throw away the
1416 initialization if it isn't a constant. */
1417 if ((extern_flag && !constant_p)
1418 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1419 var_init = NULL_TREE;
1420
1421 /* At the global level, an initializer requiring code to be generated
1422 produces elaboration statements. Check that such statements are allowed,
1423 that is, not violating a No_Elaboration_Code restriction. */
1424 if (global_bindings_p () && var_init != 0 && !init_const)
1425 Check_Elaboration_Code_Allowed (gnat_node);
1426
1427 DECL_INITIAL (var_decl) = var_init;
1428 TREE_READONLY (var_decl) = const_flag;
1429 DECL_EXTERNAL (var_decl) = extern_flag;
1430 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1431 TREE_CONSTANT (var_decl) = constant_p;
1432 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1433 = TYPE_VOLATILE (type);
1434
1435 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1436 try to fiddle with DECL_COMMON. However, on platforms that don't
1437 support global BSS sections, uninitialized global variables would
1438 go in DATA instead, thus increasing the size of the executable. */
1439 if (!flag_no_common
1440 && TREE_CODE (var_decl) == VAR_DECL
1441 && TREE_PUBLIC (var_decl)
1442 && !have_global_bss_p ())
1443 DECL_COMMON (var_decl) = 1;
1444
1445 /* At the global binding level, we need to allocate static storage for the
1446 variable if it isn't external. Otherwise, we allocate automatic storage
1447 unless requested not to. */
1448 TREE_STATIC (var_decl)
1449 = !extern_flag && (static_flag || global_bindings_p ());
1450
1451 /* For an external constant whose initializer is not absolute, do not emit
1452 debug info. In DWARF this would mean a global relocation in a read-only
1453 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1454 if (extern_flag
1455 && constant_p
1456 && var_init
1457 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1458 != null_pointer_node)
1459 DECL_IGNORED_P (var_decl) = 1;
1460
1461 /* Add this decl to the current binding level. */
1462 gnat_pushdecl (var_decl, gnat_node);
1463
1464 if (TREE_SIDE_EFFECTS (var_decl))
1465 TREE_ADDRESSABLE (var_decl) = 1;
1466
1467 if (TREE_CODE (var_decl) == VAR_DECL)
1468 {
1469 if (asm_name)
1470 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1471 process_attributes (var_decl, attr_list);
1472 if (global_bindings_p ())
1473 rest_of_decl_compilation (var_decl, true, 0);
1474 }
1475 else
1476 expand_decl (var_decl);
1477
1478 return var_decl;
1479 }
1480 \f
1481 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1482
1483 static bool
1484 aggregate_type_contains_array_p (tree type)
1485 {
1486 switch (TREE_CODE (type))
1487 {
1488 case RECORD_TYPE:
1489 case UNION_TYPE:
1490 case QUAL_UNION_TYPE:
1491 {
1492 tree field;
1493 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1494 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1495 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1496 return true;
1497 return false;
1498 }
1499
1500 case ARRAY_TYPE:
1501 return true;
1502
1503 default:
1504 gcc_unreachable ();
1505 }
1506 }
1507
1508 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1509 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1510 nonzero, it is the specified size of the field. If POS is nonzero, it is
1511 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1512 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1513 means we are allowed to take the address of the field; if it is negative,
1514 we should not make a bitfield, which is used by make_aligning_type. */
1515
1516 tree
1517 create_field_decl (tree field_name, tree field_type, tree record_type,
1518 tree size, tree pos, int packed, int addressable)
1519 {
1520 tree field_decl = build_decl (input_location,
1521 FIELD_DECL, field_name, field_type);
1522
1523 DECL_CONTEXT (field_decl) = record_type;
1524 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1525
1526 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1527 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1528 Likewise for an aggregate without specified position that contains an
1529 array, because in this case slices of variable length of this array
1530 must be handled by GCC and variable-sized objects need to be aligned
1531 to at least a byte boundary. */
1532 if (packed && (TYPE_MODE (field_type) == BLKmode
1533 || (!pos
1534 && AGGREGATE_TYPE_P (field_type)
1535 && aggregate_type_contains_array_p (field_type))))
1536 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1537
1538 /* If a size is specified, use it. Otherwise, if the record type is packed
1539 compute a size to use, which may differ from the object's natural size.
1540 We always set a size in this case to trigger the checks for bitfield
1541 creation below, which is typically required when no position has been
1542 specified. */
1543 if (size)
1544 size = convert (bitsizetype, size);
1545 else if (packed == 1)
1546 {
1547 size = rm_size (field_type);
1548 if (TYPE_MODE (field_type) == BLKmode)
1549 size = round_up (size, BITS_PER_UNIT);
1550 }
1551
1552 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1553 specified for two reasons: first if the size differs from the natural
1554 size. Second, if the alignment is insufficient. There are a number of
1555 ways the latter can be true.
1556
1557 We never make a bitfield if the type of the field has a nonconstant size,
1558 because no such entity requiring bitfield operations should reach here.
1559
1560 We do *preventively* make a bitfield when there might be the need for it
1561 but we don't have all the necessary information to decide, as is the case
1562 of a field with no specified position in a packed record.
1563
1564 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1565 in layout_decl or finish_record_type to clear the bit_field indication if
1566 it is in fact not needed. */
1567 if (addressable >= 0
1568 && size
1569 && TREE_CODE (size) == INTEGER_CST
1570 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1571 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1572 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1573 || packed
1574 || (TYPE_ALIGN (record_type) != 0
1575 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1576 {
1577 DECL_BIT_FIELD (field_decl) = 1;
1578 DECL_SIZE (field_decl) = size;
1579 if (!packed && !pos)
1580 {
1581 if (TYPE_ALIGN (record_type) != 0
1582 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1583 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1584 else
1585 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1586 }
1587 }
1588
1589 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1590
1591 /* Bump the alignment if need be, either for bitfield/packing purposes or
1592 to satisfy the type requirements if no such consideration applies. When
1593 we get the alignment from the type, indicate if this is from an explicit
1594 user request, which prevents stor-layout from lowering it later on. */
1595 {
1596 unsigned int bit_align
1597 = (DECL_BIT_FIELD (field_decl) ? 1
1598 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1599
1600 if (bit_align > DECL_ALIGN (field_decl))
1601 DECL_ALIGN (field_decl) = bit_align;
1602 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1603 {
1604 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1605 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1606 }
1607 }
1608
1609 if (pos)
1610 {
1611 /* We need to pass in the alignment the DECL is known to have.
1612 This is the lowest-order bit set in POS, but no more than
1613 the alignment of the record, if one is specified. Note
1614 that an alignment of 0 is taken as infinite. */
1615 unsigned int known_align;
1616
1617 if (host_integerp (pos, 1))
1618 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1619 else
1620 known_align = BITS_PER_UNIT;
1621
1622 if (TYPE_ALIGN (record_type)
1623 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1624 known_align = TYPE_ALIGN (record_type);
1625
1626 layout_decl (field_decl, known_align);
1627 SET_DECL_OFFSET_ALIGN (field_decl,
1628 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1629 : BITS_PER_UNIT);
1630 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1631 &DECL_FIELD_BIT_OFFSET (field_decl),
1632 DECL_OFFSET_ALIGN (field_decl), pos);
1633 }
1634
1635 /* In addition to what our caller says, claim the field is addressable if we
1636 know that its type is not suitable.
1637
1638 The field may also be "technically" nonaddressable, meaning that even if
1639 we attempt to take the field's address we will actually get the address
1640 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1641 value we have at this point is not accurate enough, so we don't account
1642 for this here and let finish_record_type decide. */
1643 if (!addressable && !type_for_nonaliased_component_p (field_type))
1644 addressable = 1;
1645
1646 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1647
1648 return field_decl;
1649 }
1650 \f
1651 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1652 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1653 (either an In parameter or an address of a pass-by-ref parameter). */
1654
1655 tree
1656 create_param_decl (tree param_name, tree param_type, bool readonly)
1657 {
1658 tree param_decl = build_decl (input_location,
1659 PARM_DECL, param_name, param_type);
1660
1661 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1662 can lead to various ABI violations. */
1663 if (targetm.calls.promote_prototypes (NULL_TREE)
1664 && INTEGRAL_TYPE_P (param_type)
1665 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1666 {
1667 /* We have to be careful about biased types here. Make a subtype
1668 of integer_type_node with the proper biasing. */
1669 if (TREE_CODE (param_type) == INTEGER_TYPE
1670 && TYPE_BIASED_REPRESENTATION_P (param_type))
1671 {
1672 tree subtype
1673 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1674 TREE_TYPE (subtype) = integer_type_node;
1675 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1676 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1677 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1678 param_type = subtype;
1679 }
1680 else
1681 param_type = integer_type_node;
1682 }
1683
1684 DECL_ARG_TYPE (param_decl) = param_type;
1685 TREE_READONLY (param_decl) = readonly;
1686 return param_decl;
1687 }
1688 \f
1689 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1690
1691 static void
1692 process_attributes (tree decl, struct attrib *attr_list)
1693 {
1694 for (; attr_list; attr_list = attr_list->next)
1695 switch (attr_list->type)
1696 {
1697 case ATTR_MACHINE_ATTRIBUTE:
1698 input_location = DECL_SOURCE_LOCATION (decl);
1699 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1700 NULL_TREE),
1701 ATTR_FLAG_TYPE_IN_PLACE);
1702 break;
1703
1704 case ATTR_LINK_ALIAS:
1705 if (! DECL_EXTERNAL (decl))
1706 {
1707 TREE_STATIC (decl) = 1;
1708 assemble_alias (decl, attr_list->name);
1709 }
1710 break;
1711
1712 case ATTR_WEAK_EXTERNAL:
1713 if (SUPPORTS_WEAK)
1714 declare_weak (decl);
1715 else
1716 post_error ("?weak declarations not supported on this target",
1717 attr_list->error_point);
1718 break;
1719
1720 case ATTR_LINK_SECTION:
1721 if (targetm_common.have_named_sections)
1722 {
1723 DECL_SECTION_NAME (decl)
1724 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1725 IDENTIFIER_POINTER (attr_list->name));
1726 DECL_COMMON (decl) = 0;
1727 }
1728 else
1729 post_error ("?section attributes are not supported for this target",
1730 attr_list->error_point);
1731 break;
1732
1733 case ATTR_LINK_CONSTRUCTOR:
1734 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1735 TREE_USED (decl) = 1;
1736 break;
1737
1738 case ATTR_LINK_DESTRUCTOR:
1739 DECL_STATIC_DESTRUCTOR (decl) = 1;
1740 TREE_USED (decl) = 1;
1741 break;
1742
1743 case ATTR_THREAD_LOCAL_STORAGE:
1744 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1745 DECL_COMMON (decl) = 0;
1746 break;
1747 }
1748 }
1749 \f
1750 /* Record DECL as a global renaming pointer. */
1751
1752 void
1753 record_global_renaming_pointer (tree decl)
1754 {
1755 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
1756 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1757 }
1758
1759 /* Invalidate the global renaming pointers. */
1760
1761 void
1762 invalidate_global_renaming_pointers (void)
1763 {
1764 unsigned int i;
1765 tree iter;
1766
1767 FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
1768 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1769
1770 VEC_free (tree, gc, global_renaming_pointers);
1771 }
1772
1773 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1774 a power of 2. */
1775
1776 bool
1777 value_factor_p (tree value, HOST_WIDE_INT factor)
1778 {
1779 if (host_integerp (value, 1))
1780 return tree_low_cst (value, 1) % factor == 0;
1781
1782 if (TREE_CODE (value) == MULT_EXPR)
1783 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1784 || value_factor_p (TREE_OPERAND (value, 1), factor));
1785
1786 return false;
1787 }
1788
1789 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
1790 unless we can prove these 2 fields are laid out in such a way that no gap
1791 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1792 is the distance in bits between the end of PREV_FIELD and the starting
1793 position of CURR_FIELD. It is ignored if null. */
1794
1795 static bool
1796 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1797 {
1798 /* If this is the first field of the record, there cannot be any gap */
1799 if (!prev_field)
1800 return false;
1801
1802 /* If the previous field is a union type, then return False: The only
1803 time when such a field is not the last field of the record is when
1804 there are other components at fixed positions after it (meaning there
1805 was a rep clause for every field), in which case we don't want the
1806 alignment constraint to override them. */
1807 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1808 return false;
1809
1810 /* If the distance between the end of prev_field and the beginning of
1811 curr_field is constant, then there is a gap if the value of this
1812 constant is not null. */
1813 if (offset && host_integerp (offset, 1))
1814 return !integer_zerop (offset);
1815
1816 /* If the size and position of the previous field are constant,
1817 then check the sum of this size and position. There will be a gap
1818 iff it is not multiple of the current field alignment. */
1819 if (host_integerp (DECL_SIZE (prev_field), 1)
1820 && host_integerp (bit_position (prev_field), 1))
1821 return ((tree_low_cst (bit_position (prev_field), 1)
1822 + tree_low_cst (DECL_SIZE (prev_field), 1))
1823 % DECL_ALIGN (curr_field) != 0);
1824
1825 /* If both the position and size of the previous field are multiples
1826 of the current field alignment, there cannot be any gap. */
1827 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1828 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1829 return false;
1830
1831 /* Fallback, return that there may be a potential gap */
1832 return true;
1833 }
1834
1835 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
1836 of the decl. */
1837
1838 tree
1839 create_label_decl (tree label_name, Node_Id gnat_node)
1840 {
1841 tree label_decl
1842 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
1843
1844 DECL_MODE (label_decl) = VOIDmode;
1845
1846 /* Add this decl to the current binding level. */
1847 gnat_pushdecl (label_decl, gnat_node);
1848
1849 return label_decl;
1850 }
1851 \f
1852 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1853 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1854 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1855 PARM_DECL nodes chained through the DECL_CHAIN field).
1856
1857 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
1858 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
1859 used for the position of the decl. */
1860
1861 tree
1862 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
1863 tree param_decl_list, bool inline_flag, bool public_flag,
1864 bool extern_flag, bool artificial_flag,
1865 struct attrib *attr_list, Node_Id gnat_node)
1866 {
1867 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1868 subprog_type);
1869 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1870 TREE_TYPE (subprog_type));
1871 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1872
1873 /* If this is a non-inline function nested inside an inlined external
1874 function, we cannot honor both requests without cloning the nested
1875 function in the current unit since it is private to the other unit.
1876 We could inline the nested function as well but it's probably better
1877 to err on the side of too little inlining. */
1878 if (!inline_flag
1879 && !public_flag
1880 && current_function_decl
1881 && DECL_DECLARED_INLINE_P (current_function_decl)
1882 && DECL_EXTERNAL (current_function_decl))
1883 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1884
1885 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
1886 DECL_EXTERNAL (subprog_decl) = extern_flag;
1887 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1888 DECL_NO_INLINE_WARNING_P (subprog_decl) = inline_flag && artificial_flag;
1889
1890 TREE_PUBLIC (subprog_decl) = public_flag;
1891 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1892 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1893 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1894
1895 DECL_ARTIFICIAL (result_decl) = 1;
1896 DECL_IGNORED_P (result_decl) = 1;
1897 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1898 DECL_RESULT (subprog_decl) = result_decl;
1899
1900 if (asm_name)
1901 {
1902 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1903
1904 /* The expand_main_function circuitry expects "main_identifier_node" to
1905 designate the DECL_NAME of the 'main' entry point, in turn expected
1906 to be declared as the "main" function literally by default. Ada
1907 program entry points are typically declared with a different name
1908 within the binder generated file, exported as 'main' to satisfy the
1909 system expectations. Force main_identifier_node in this case. */
1910 if (asm_name == main_identifier_node)
1911 DECL_NAME (subprog_decl) = main_identifier_node;
1912 }
1913
1914 /* Add this decl to the current binding level. */
1915 gnat_pushdecl (subprog_decl, gnat_node);
1916
1917 process_attributes (subprog_decl, attr_list);
1918
1919 /* Output the assembler code and/or RTL for the declaration. */
1920 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1921
1922 return subprog_decl;
1923 }
1924 \f
1925 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1926 body. This routine needs to be invoked before processing the declarations
1927 appearing in the subprogram. */
1928
1929 void
1930 begin_subprog_body (tree subprog_decl)
1931 {
1932 tree param_decl;
1933
1934 announce_function (subprog_decl);
1935
1936 /* This function is being defined. */
1937 TREE_STATIC (subprog_decl) = 1;
1938
1939 current_function_decl = subprog_decl;
1940
1941 /* Enter a new binding level and show that all the parameters belong to
1942 this function. */
1943 gnat_pushlevel ();
1944
1945 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1946 param_decl = DECL_CHAIN (param_decl))
1947 DECL_CONTEXT (param_decl) = subprog_decl;
1948
1949 make_decl_rtl (subprog_decl);
1950 }
1951
1952 /* Finish translating the current subprogram and set its BODY. */
1953
1954 void
1955 end_subprog_body (tree body)
1956 {
1957 tree fndecl = current_function_decl;
1958
1959 /* Attach the BLOCK for this level to the function and pop the level. */
1960 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1961 DECL_INITIAL (fndecl) = current_binding_level->block;
1962 gnat_poplevel ();
1963
1964 /* Mark the RESULT_DECL as being in this subprogram. */
1965 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1966
1967 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
1968 if (TREE_CODE (body) == BIND_EXPR)
1969 {
1970 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
1971 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
1972 }
1973
1974 DECL_SAVED_TREE (fndecl) = body;
1975
1976 current_function_decl = decl_function_context (fndecl);
1977 }
1978
1979 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
1980
1981 void
1982 rest_of_subprog_body_compilation (tree subprog_decl)
1983 {
1984 /* We cannot track the location of errors past this point. */
1985 error_gnat_node = Empty;
1986
1987 /* If we're only annotating types, don't actually compile this function. */
1988 if (type_annotate_only)
1989 return;
1990
1991 /* Dump functions before gimplification. */
1992 dump_function (TDI_original, subprog_decl);
1993
1994 /* ??? This special handling of nested functions is probably obsolete. */
1995 if (!decl_function_context (subprog_decl))
1996 cgraph_finalize_function (subprog_decl, false);
1997 else
1998 /* Register this function with cgraph just far enough to get it
1999 added to our parent's nested function list. */
2000 (void) cgraph_get_create_node (subprog_decl);
2001 }
2002
2003 tree
2004 gnat_builtin_function (tree decl)
2005 {
2006 gnat_pushdecl (decl, Empty);
2007 return decl;
2008 }
2009
2010 /* Return an integer type with the number of bits of precision given by
2011 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2012 it is a signed type. */
2013
2014 tree
2015 gnat_type_for_size (unsigned precision, int unsignedp)
2016 {
2017 tree t;
2018 char type_name[20];
2019
2020 if (precision <= 2 * MAX_BITS_PER_WORD
2021 && signed_and_unsigned_types[precision][unsignedp])
2022 return signed_and_unsigned_types[precision][unsignedp];
2023
2024 if (unsignedp)
2025 t = make_unsigned_type (precision);
2026 else
2027 t = make_signed_type (precision);
2028
2029 if (precision <= 2 * MAX_BITS_PER_WORD)
2030 signed_and_unsigned_types[precision][unsignedp] = t;
2031
2032 if (!TYPE_NAME (t))
2033 {
2034 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2035 TYPE_NAME (t) = get_identifier (type_name);
2036 }
2037
2038 return t;
2039 }
2040
2041 /* Likewise for floating-point types. */
2042
2043 static tree
2044 float_type_for_precision (int precision, enum machine_mode mode)
2045 {
2046 tree t;
2047 char type_name[20];
2048
2049 if (float_types[(int) mode])
2050 return float_types[(int) mode];
2051
2052 float_types[(int) mode] = t = make_node (REAL_TYPE);
2053 TYPE_PRECISION (t) = precision;
2054 layout_type (t);
2055
2056 gcc_assert (TYPE_MODE (t) == mode);
2057 if (!TYPE_NAME (t))
2058 {
2059 sprintf (type_name, "FLOAT_%d", precision);
2060 TYPE_NAME (t) = get_identifier (type_name);
2061 }
2062
2063 return t;
2064 }
2065
2066 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2067 an unsigned type; otherwise a signed type is returned. */
2068
2069 tree
2070 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2071 {
2072 if (mode == BLKmode)
2073 return NULL_TREE;
2074
2075 if (mode == VOIDmode)
2076 return void_type_node;
2077
2078 if (COMPLEX_MODE_P (mode))
2079 return NULL_TREE;
2080
2081 if (SCALAR_FLOAT_MODE_P (mode))
2082 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2083
2084 if (SCALAR_INT_MODE_P (mode))
2085 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2086
2087 if (VECTOR_MODE_P (mode))
2088 {
2089 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2090 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2091 if (inner_type)
2092 return build_vector_type_for_mode (inner_type, mode);
2093 }
2094
2095 return NULL_TREE;
2096 }
2097
2098 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2099
2100 tree
2101 gnat_unsigned_type (tree type_node)
2102 {
2103 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2104
2105 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2106 {
2107 type = copy_node (type);
2108 TREE_TYPE (type) = type_node;
2109 }
2110 else if (TREE_TYPE (type_node)
2111 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2112 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2113 {
2114 type = copy_node (type);
2115 TREE_TYPE (type) = TREE_TYPE (type_node);
2116 }
2117
2118 return type;
2119 }
2120
2121 /* Return the signed version of a TYPE_NODE, a scalar type. */
2122
2123 tree
2124 gnat_signed_type (tree type_node)
2125 {
2126 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2127
2128 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2129 {
2130 type = copy_node (type);
2131 TREE_TYPE (type) = type_node;
2132 }
2133 else if (TREE_TYPE (type_node)
2134 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2135 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2136 {
2137 type = copy_node (type);
2138 TREE_TYPE (type) = TREE_TYPE (type_node);
2139 }
2140
2141 return type;
2142 }
2143
2144 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2145 transparently converted to each other. */
2146
2147 int
2148 gnat_types_compatible_p (tree t1, tree t2)
2149 {
2150 enum tree_code code;
2151
2152 /* This is the default criterion. */
2153 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2154 return 1;
2155
2156 /* We only check structural equivalence here. */
2157 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2158 return 0;
2159
2160 /* Vector types are also compatible if they have the same number of subparts
2161 and the same form of (scalar) element type. */
2162 if (code == VECTOR_TYPE
2163 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2164 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2165 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2166 return 1;
2167
2168 /* Array types are also compatible if they are constrained and have the same
2169 domain(s) and the same component type. */
2170 if (code == ARRAY_TYPE
2171 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2172 || (TYPE_DOMAIN (t1)
2173 && TYPE_DOMAIN (t2)
2174 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2175 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2176 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2177 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2178 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2179 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2180 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2181 return 1;
2182
2183 /* Padding record types are also compatible if they pad the same
2184 type and have the same constant size. */
2185 if (code == RECORD_TYPE
2186 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2187 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2188 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2189 return 1;
2190
2191 return 0;
2192 }
2193
2194 /* Return true if EXPR is a useless type conversion. */
2195
2196 bool
2197 gnat_useless_type_conversion (tree expr)
2198 {
2199 if (CONVERT_EXPR_P (expr)
2200 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2201 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2202 return gnat_types_compatible_p (TREE_TYPE (expr),
2203 TREE_TYPE (TREE_OPERAND (expr, 0)));
2204
2205 return false;
2206 }
2207
2208 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2209
2210 bool
2211 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2212 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2213 {
2214 return TYPE_CI_CO_LIST (t) == cico_list
2215 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2216 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2217 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2218 }
2219 \f
2220 /* EXP is an expression for the size of an object. If this size contains
2221 discriminant references, replace them with the maximum (if MAX_P) or
2222 minimum (if !MAX_P) possible value of the discriminant. */
2223
2224 tree
2225 max_size (tree exp, bool max_p)
2226 {
2227 enum tree_code code = TREE_CODE (exp);
2228 tree type = TREE_TYPE (exp);
2229
2230 switch (TREE_CODE_CLASS (code))
2231 {
2232 case tcc_declaration:
2233 case tcc_constant:
2234 return exp;
2235
2236 case tcc_vl_exp:
2237 if (code == CALL_EXPR)
2238 {
2239 tree t, *argarray;
2240 int n, i;
2241
2242 t = maybe_inline_call_in_expr (exp);
2243 if (t)
2244 return max_size (t, max_p);
2245
2246 n = call_expr_nargs (exp);
2247 gcc_assert (n > 0);
2248 argarray = XALLOCAVEC (tree, n);
2249 for (i = 0; i < n; i++)
2250 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2251 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2252 }
2253 break;
2254
2255 case tcc_reference:
2256 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2257 modify. Otherwise, we treat it like a variable. */
2258 if (!CONTAINS_PLACEHOLDER_P (exp))
2259 return exp;
2260
2261 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2262 return
2263 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2264
2265 case tcc_comparison:
2266 return max_p ? size_one_node : size_zero_node;
2267
2268 case tcc_unary:
2269 case tcc_binary:
2270 case tcc_expression:
2271 switch (TREE_CODE_LENGTH (code))
2272 {
2273 case 1:
2274 if (code == SAVE_EXPR)
2275 return exp;
2276 else if (code == NON_LVALUE_EXPR)
2277 return max_size (TREE_OPERAND (exp, 0), max_p);
2278 else
2279 return
2280 fold_build1 (code, type,
2281 max_size (TREE_OPERAND (exp, 0),
2282 code == NEGATE_EXPR ? !max_p : max_p));
2283
2284 case 2:
2285 if (code == COMPOUND_EXPR)
2286 return max_size (TREE_OPERAND (exp, 1), max_p);
2287
2288 {
2289 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2290 tree rhs = max_size (TREE_OPERAND (exp, 1),
2291 code == MINUS_EXPR ? !max_p : max_p);
2292
2293 /* Special-case wanting the maximum value of a MIN_EXPR.
2294 In that case, if one side overflows, return the other.
2295 sizetype is signed, but we know sizes are non-negative.
2296 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2297 overflowing and the RHS a variable. */
2298 if (max_p
2299 && code == MIN_EXPR
2300 && TREE_CODE (rhs) == INTEGER_CST
2301 && TREE_OVERFLOW (rhs))
2302 return lhs;
2303 else if (max_p
2304 && code == MIN_EXPR
2305 && TREE_CODE (lhs) == INTEGER_CST
2306 && TREE_OVERFLOW (lhs))
2307 return rhs;
2308 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2309 && TREE_CODE (lhs) == INTEGER_CST
2310 && TREE_OVERFLOW (lhs)
2311 && !TREE_CONSTANT (rhs))
2312 return lhs;
2313 else
2314 return fold_build2 (code, type, lhs, rhs);
2315 }
2316
2317 case 3:
2318 if (code == COND_EXPR)
2319 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2320 max_size (TREE_OPERAND (exp, 1), max_p),
2321 max_size (TREE_OPERAND (exp, 2), max_p));
2322 }
2323
2324 /* Other tree classes cannot happen. */
2325 default:
2326 break;
2327 }
2328
2329 gcc_unreachable ();
2330 }
2331 \f
2332 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2333 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2334 Return a constructor for the template. */
2335
2336 tree
2337 build_template (tree template_type, tree array_type, tree expr)
2338 {
2339 VEC(constructor_elt,gc) *template_elts = NULL;
2340 tree bound_list = NULL_TREE;
2341 tree field;
2342
2343 while (TREE_CODE (array_type) == RECORD_TYPE
2344 && (TYPE_PADDING_P (array_type)
2345 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2346 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2347
2348 if (TREE_CODE (array_type) == ARRAY_TYPE
2349 || (TREE_CODE (array_type) == INTEGER_TYPE
2350 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2351 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2352
2353 /* First make the list for a CONSTRUCTOR for the template. Go down the
2354 field list of the template instead of the type chain because this
2355 array might be an Ada array of arrays and we can't tell where the
2356 nested arrays stop being the underlying object. */
2357
2358 for (field = TYPE_FIELDS (template_type); field;
2359 (bound_list
2360 ? (bound_list = TREE_CHAIN (bound_list))
2361 : (array_type = TREE_TYPE (array_type))),
2362 field = DECL_CHAIN (DECL_CHAIN (field)))
2363 {
2364 tree bounds, min, max;
2365
2366 /* If we have a bound list, get the bounds from there. Likewise
2367 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2368 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2369 This will give us a maximum range. */
2370 if (bound_list)
2371 bounds = TREE_VALUE (bound_list);
2372 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2373 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2374 else if (expr && TREE_CODE (expr) == PARM_DECL
2375 && DECL_BY_COMPONENT_PTR_P (expr))
2376 bounds = TREE_TYPE (field);
2377 else
2378 gcc_unreachable ();
2379
2380 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2381 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2382
2383 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2384 substitute it from OBJECT. */
2385 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2386 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2387
2388 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
2389 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
2390 }
2391
2392 return gnat_build_constructor (template_type, template_elts);
2393 }
2394 \f
2395 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2396 being built; the new decl is chained on to the front of the list. */
2397
2398 static tree
2399 make_descriptor_field (const char *name, tree type, tree rec_type,
2400 tree initial, tree field_list)
2401 {
2402 tree field
2403 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
2404 NULL_TREE, 0, 0);
2405
2406 DECL_INITIAL (field) = initial;
2407 DECL_CHAIN (field) = field_list;
2408 return field;
2409 }
2410
2411 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2412 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2413 type contains in its DECL_INITIAL the expression to use when a constructor
2414 is made for the type. GNAT_ENTITY is an entity used to print out an error
2415 message if the mechanism cannot be applied to an object of that type and
2416 also for the name. */
2417
2418 tree
2419 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2420 {
2421 tree record_type = make_node (RECORD_TYPE);
2422 tree pointer32_type, pointer64_type;
2423 tree field_list = NULL_TREE;
2424 int klass, ndim, i, dtype = 0;
2425 tree inner_type, tem;
2426 tree *idx_arr;
2427
2428 /* If TYPE is an unconstrained array, use the underlying array type. */
2429 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2430 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2431
2432 /* If this is an array, compute the number of dimensions in the array,
2433 get the index types, and point to the inner type. */
2434 if (TREE_CODE (type) != ARRAY_TYPE)
2435 ndim = 0;
2436 else
2437 for (ndim = 1, inner_type = type;
2438 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2439 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2440 ndim++, inner_type = TREE_TYPE (inner_type))
2441 ;
2442
2443 idx_arr = XALLOCAVEC (tree, ndim);
2444
2445 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2446 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2447 for (i = ndim - 1, inner_type = type;
2448 i >= 0;
2449 i--, inner_type = TREE_TYPE (inner_type))
2450 idx_arr[i] = TYPE_DOMAIN (inner_type);
2451 else
2452 for (i = 0, inner_type = type;
2453 i < ndim;
2454 i++, inner_type = TREE_TYPE (inner_type))
2455 idx_arr[i] = TYPE_DOMAIN (inner_type);
2456
2457 /* Now get the DTYPE value. */
2458 switch (TREE_CODE (type))
2459 {
2460 case INTEGER_TYPE:
2461 case ENUMERAL_TYPE:
2462 case BOOLEAN_TYPE:
2463 if (TYPE_VAX_FLOATING_POINT_P (type))
2464 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2465 {
2466 case 6:
2467 dtype = 10;
2468 break;
2469 case 9:
2470 dtype = 11;
2471 break;
2472 case 15:
2473 dtype = 27;
2474 break;
2475 }
2476 else
2477 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2478 {
2479 case 8:
2480 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2481 break;
2482 case 16:
2483 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2484 break;
2485 case 32:
2486 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2487 break;
2488 case 64:
2489 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2490 break;
2491 case 128:
2492 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2493 break;
2494 }
2495 break;
2496
2497 case REAL_TYPE:
2498 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2499 break;
2500
2501 case COMPLEX_TYPE:
2502 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2503 && TYPE_VAX_FLOATING_POINT_P (type))
2504 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2505 {
2506 case 6:
2507 dtype = 12;
2508 break;
2509 case 9:
2510 dtype = 13;
2511 break;
2512 case 15:
2513 dtype = 29;
2514 }
2515 else
2516 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2517 break;
2518
2519 case ARRAY_TYPE:
2520 dtype = 14;
2521 break;
2522
2523 default:
2524 break;
2525 }
2526
2527 /* Get the CLASS value. */
2528 switch (mech)
2529 {
2530 case By_Descriptor_A:
2531 case By_Short_Descriptor_A:
2532 klass = 4;
2533 break;
2534 case By_Descriptor_NCA:
2535 case By_Short_Descriptor_NCA:
2536 klass = 10;
2537 break;
2538 case By_Descriptor_SB:
2539 case By_Short_Descriptor_SB:
2540 klass = 15;
2541 break;
2542 case By_Descriptor:
2543 case By_Short_Descriptor:
2544 case By_Descriptor_S:
2545 case By_Short_Descriptor_S:
2546 default:
2547 klass = 1;
2548 break;
2549 }
2550
2551 /* Make the type for a descriptor for VMS. The first four fields are the
2552 same for all types. */
2553 field_list
2554 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
2555 size_in_bytes ((mech == By_Descriptor_A
2556 || mech == By_Short_Descriptor_A)
2557 ? inner_type : type),
2558 field_list);
2559 field_list
2560 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
2561 size_int (dtype), field_list);
2562 field_list
2563 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
2564 size_int (klass), field_list);
2565
2566 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2567 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2568
2569 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2570 that we cannot build a template call to the CE routine as it would get a
2571 wrong source location; instead we use a second placeholder for it. */
2572 tem = build_unary_op (ADDR_EXPR, pointer64_type,
2573 build0 (PLACEHOLDER_EXPR, type));
2574 tem = build3 (COND_EXPR, pointer32_type,
2575 Pmode != SImode
2576 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
2577 build_int_cstu (pointer64_type, 0x80000000))
2578 : boolean_false_node,
2579 build0 (PLACEHOLDER_EXPR, void_type_node),
2580 convert (pointer32_type, tem));
2581
2582 field_list
2583 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
2584 field_list);
2585
2586 switch (mech)
2587 {
2588 case By_Descriptor:
2589 case By_Short_Descriptor:
2590 case By_Descriptor_S:
2591 case By_Short_Descriptor_S:
2592 break;
2593
2594 case By_Descriptor_SB:
2595 case By_Short_Descriptor_SB:
2596 field_list
2597 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2598 record_type,
2599 (TREE_CODE (type) == ARRAY_TYPE
2600 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2601 : size_zero_node),
2602 field_list);
2603 field_list
2604 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2605 record_type,
2606 (TREE_CODE (type) == ARRAY_TYPE
2607 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2608 : size_zero_node),
2609 field_list);
2610 break;
2611
2612 case By_Descriptor_A:
2613 case By_Short_Descriptor_A:
2614 case By_Descriptor_NCA:
2615 case By_Short_Descriptor_NCA:
2616 field_list
2617 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2618 record_type, size_zero_node, field_list);
2619
2620 field_list
2621 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2622 record_type, size_zero_node, field_list);
2623
2624 field_list
2625 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2626 record_type,
2627 size_int ((mech == By_Descriptor_NCA
2628 || mech == By_Short_Descriptor_NCA)
2629 ? 0
2630 /* Set FL_COLUMN, FL_COEFF, and
2631 FL_BOUNDS. */
2632 : (TREE_CODE (type) == ARRAY_TYPE
2633 && TYPE_CONVENTION_FORTRAN_P
2634 (type)
2635 ? 224 : 192)),
2636 field_list);
2637
2638 field_list
2639 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2640 record_type, size_int (ndim), field_list);
2641
2642 field_list
2643 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2644 record_type, size_in_bytes (type),
2645 field_list);
2646
2647 /* Now build a pointer to the 0,0,0... element. */
2648 tem = build0 (PLACEHOLDER_EXPR, type);
2649 for (i = 0, inner_type = type; i < ndim;
2650 i++, inner_type = TREE_TYPE (inner_type))
2651 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2652 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2653 NULL_TREE, NULL_TREE);
2654
2655 field_list
2656 = make_descriptor_field ("A0", pointer32_type, record_type,
2657 build1 (ADDR_EXPR, pointer32_type, tem),
2658 field_list);
2659
2660 /* Next come the addressing coefficients. */
2661 tem = size_one_node;
2662 for (i = 0; i < ndim; i++)
2663 {
2664 char fname[3];
2665 tree idx_length
2666 = size_binop (MULT_EXPR, tem,
2667 size_binop (PLUS_EXPR,
2668 size_binop (MINUS_EXPR,
2669 TYPE_MAX_VALUE (idx_arr[i]),
2670 TYPE_MIN_VALUE (idx_arr[i])),
2671 size_int (1)));
2672
2673 fname[0] = ((mech == By_Descriptor_NCA ||
2674 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2675 fname[1] = '0' + i, fname[2] = 0;
2676 field_list
2677 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2678 record_type, idx_length, field_list);
2679
2680 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2681 tem = idx_length;
2682 }
2683
2684 /* Finally here are the bounds. */
2685 for (i = 0; i < ndim; i++)
2686 {
2687 char fname[3];
2688
2689 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2690 field_list
2691 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2692 record_type, TYPE_MIN_VALUE (idx_arr[i]),
2693 field_list);
2694
2695 fname[0] = 'U';
2696 field_list
2697 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2698 record_type, TYPE_MAX_VALUE (idx_arr[i]),
2699 field_list);
2700 }
2701 break;
2702
2703 default:
2704 post_error ("unsupported descriptor type for &", gnat_entity);
2705 }
2706
2707 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2708 finish_record_type (record_type, nreverse (field_list), 0, false);
2709 return record_type;
2710 }
2711
2712 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2713 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2714 type contains in its DECL_INITIAL the expression to use when a constructor
2715 is made for the type. GNAT_ENTITY is an entity used to print out an error
2716 message if the mechanism cannot be applied to an object of that type and
2717 also for the name. */
2718
2719 tree
2720 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2721 {
2722 tree record_type = make_node (RECORD_TYPE);
2723 tree pointer64_type;
2724 tree field_list = NULL_TREE;
2725 int klass, ndim, i, dtype = 0;
2726 tree inner_type, tem;
2727 tree *idx_arr;
2728
2729 /* If TYPE is an unconstrained array, use the underlying array type. */
2730 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2731 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2732
2733 /* If this is an array, compute the number of dimensions in the array,
2734 get the index types, and point to the inner type. */
2735 if (TREE_CODE (type) != ARRAY_TYPE)
2736 ndim = 0;
2737 else
2738 for (ndim = 1, inner_type = type;
2739 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2740 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2741 ndim++, inner_type = TREE_TYPE (inner_type))
2742 ;
2743
2744 idx_arr = XALLOCAVEC (tree, ndim);
2745
2746 if (mech != By_Descriptor_NCA
2747 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2748 for (i = ndim - 1, inner_type = type;
2749 i >= 0;
2750 i--, inner_type = TREE_TYPE (inner_type))
2751 idx_arr[i] = TYPE_DOMAIN (inner_type);
2752 else
2753 for (i = 0, inner_type = type;
2754 i < ndim;
2755 i++, inner_type = TREE_TYPE (inner_type))
2756 idx_arr[i] = TYPE_DOMAIN (inner_type);
2757
2758 /* Now get the DTYPE value. */
2759 switch (TREE_CODE (type))
2760 {
2761 case INTEGER_TYPE:
2762 case ENUMERAL_TYPE:
2763 case BOOLEAN_TYPE:
2764 if (TYPE_VAX_FLOATING_POINT_P (type))
2765 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2766 {
2767 case 6:
2768 dtype = 10;
2769 break;
2770 case 9:
2771 dtype = 11;
2772 break;
2773 case 15:
2774 dtype = 27;
2775 break;
2776 }
2777 else
2778 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2779 {
2780 case 8:
2781 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2782 break;
2783 case 16:
2784 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2785 break;
2786 case 32:
2787 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2788 break;
2789 case 64:
2790 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2791 break;
2792 case 128:
2793 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2794 break;
2795 }
2796 break;
2797
2798 case REAL_TYPE:
2799 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2800 break;
2801
2802 case COMPLEX_TYPE:
2803 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2804 && TYPE_VAX_FLOATING_POINT_P (type))
2805 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2806 {
2807 case 6:
2808 dtype = 12;
2809 break;
2810 case 9:
2811 dtype = 13;
2812 break;
2813 case 15:
2814 dtype = 29;
2815 }
2816 else
2817 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2818 break;
2819
2820 case ARRAY_TYPE:
2821 dtype = 14;
2822 break;
2823
2824 default:
2825 break;
2826 }
2827
2828 /* Get the CLASS value. */
2829 switch (mech)
2830 {
2831 case By_Descriptor_A:
2832 klass = 4;
2833 break;
2834 case By_Descriptor_NCA:
2835 klass = 10;
2836 break;
2837 case By_Descriptor_SB:
2838 klass = 15;
2839 break;
2840 case By_Descriptor:
2841 case By_Descriptor_S:
2842 default:
2843 klass = 1;
2844 break;
2845 }
2846
2847 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2848 are the same for all types. */
2849 field_list
2850 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2851 record_type, size_int (1), field_list);
2852 field_list
2853 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2854 record_type, size_int (dtype), field_list);
2855 field_list
2856 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2857 record_type, size_int (klass), field_list);
2858 field_list
2859 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2860 record_type, ssize_int (-1), field_list);
2861 field_list
2862 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2863 record_type,
2864 size_in_bytes (mech == By_Descriptor_A
2865 ? inner_type : type),
2866 field_list);
2867
2868 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2869
2870 field_list
2871 = make_descriptor_field ("POINTER", pointer64_type, record_type,
2872 build_unary_op (ADDR_EXPR, pointer64_type,
2873 build0 (PLACEHOLDER_EXPR, type)),
2874 field_list);
2875
2876 switch (mech)
2877 {
2878 case By_Descriptor:
2879 case By_Descriptor_S:
2880 break;
2881
2882 case By_Descriptor_SB:
2883 field_list
2884 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2885 record_type,
2886 (TREE_CODE (type) == ARRAY_TYPE
2887 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2888 : size_zero_node),
2889 field_list);
2890 field_list
2891 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2892 record_type,
2893 (TREE_CODE (type) == ARRAY_TYPE
2894 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2895 : size_zero_node),
2896 field_list);
2897 break;
2898
2899 case By_Descriptor_A:
2900 case By_Descriptor_NCA:
2901 field_list
2902 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2903 record_type, size_zero_node, field_list);
2904
2905 field_list
2906 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2907 record_type, size_zero_node, field_list);
2908
2909 dtype = (mech == By_Descriptor_NCA
2910 ? 0
2911 /* Set FL_COLUMN, FL_COEFF, and
2912 FL_BOUNDS. */
2913 : (TREE_CODE (type) == ARRAY_TYPE
2914 && TYPE_CONVENTION_FORTRAN_P (type)
2915 ? 224 : 192));
2916 field_list
2917 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2918 record_type, size_int (dtype),
2919 field_list);
2920
2921 field_list
2922 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2923 record_type, size_int (ndim), field_list);
2924
2925 field_list
2926 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2927 record_type, size_int (0), field_list);
2928 field_list
2929 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2930 record_type, size_in_bytes (type),
2931 field_list);
2932
2933 /* Now build a pointer to the 0,0,0... element. */
2934 tem = build0 (PLACEHOLDER_EXPR, type);
2935 for (i = 0, inner_type = type; i < ndim;
2936 i++, inner_type = TREE_TYPE (inner_type))
2937 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2938 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2939 NULL_TREE, NULL_TREE);
2940
2941 field_list
2942 = make_descriptor_field ("A0", pointer64_type, record_type,
2943 build1 (ADDR_EXPR, pointer64_type, tem),
2944 field_list);
2945
2946 /* Next come the addressing coefficients. */
2947 tem = size_one_node;
2948 for (i = 0; i < ndim; i++)
2949 {
2950 char fname[3];
2951 tree idx_length
2952 = size_binop (MULT_EXPR, tem,
2953 size_binop (PLUS_EXPR,
2954 size_binop (MINUS_EXPR,
2955 TYPE_MAX_VALUE (idx_arr[i]),
2956 TYPE_MIN_VALUE (idx_arr[i])),
2957 size_int (1)));
2958
2959 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2960 fname[1] = '0' + i, fname[2] = 0;
2961 field_list
2962 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2963 record_type, idx_length, field_list);
2964
2965 if (mech == By_Descriptor_NCA)
2966 tem = idx_length;
2967 }
2968
2969 /* Finally here are the bounds. */
2970 for (i = 0; i < ndim; i++)
2971 {
2972 char fname[3];
2973
2974 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2975 field_list
2976 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2977 record_type,
2978 TYPE_MIN_VALUE (idx_arr[i]), field_list);
2979
2980 fname[0] = 'U';
2981 field_list
2982 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2983 record_type,
2984 TYPE_MAX_VALUE (idx_arr[i]), field_list);
2985 }
2986 break;
2987
2988 default:
2989 post_error ("unsupported descriptor type for &", gnat_entity);
2990 }
2991
2992 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
2993 finish_record_type (record_type, nreverse (field_list), 0, false);
2994 return record_type;
2995 }
2996
2997 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
2998 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
2999
3000 tree
3001 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3002 {
3003 VEC(constructor_elt,gc) *v = NULL;
3004 tree field;
3005
3006 gnu_expr = maybe_unconstrained_array (gnu_expr);
3007 gnu_expr = gnat_protect_expr (gnu_expr);
3008 gnat_mark_addressable (gnu_expr);
3009
3010 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3011 routine in case we have a 32-bit descriptor. */
3012 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3013 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3014 N_Raise_Constraint_Error),
3015 gnu_expr);
3016
3017 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3018 {
3019 tree value
3020 = convert (TREE_TYPE (field),
3021 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3022 gnu_expr));
3023 CONSTRUCTOR_APPEND_ELT (v, field, value);
3024 }
3025
3026 return gnat_build_constructor (gnu_type, v);
3027 }
3028
3029 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3030 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3031 which the VMS descriptor is passed. */
3032
3033 static tree
3034 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3035 {
3036 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3037 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3038 /* The CLASS field is the 3rd field in the descriptor. */
3039 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3040 /* The POINTER field is the 6th field in the descriptor. */
3041 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3042
3043 /* Retrieve the value of the POINTER field. */
3044 tree gnu_expr64
3045 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3046
3047 if (POINTER_TYPE_P (gnu_type))
3048 return convert (gnu_type, gnu_expr64);
3049
3050 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3051 {
3052 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3053 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3054 tree template_type = TREE_TYPE (p_bounds_type);
3055 tree min_field = TYPE_FIELDS (template_type);
3056 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3057 tree template_tree, template_addr, aflags, dimct, t, u;
3058 /* See the head comment of build_vms_descriptor. */
3059 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3060 tree lfield, ufield;
3061 VEC(constructor_elt,gc) *v;
3062
3063 /* Convert POINTER to the pointer-to-array type. */
3064 gnu_expr64 = convert (p_array_type, gnu_expr64);
3065
3066 switch (iklass)
3067 {
3068 case 1: /* Class S */
3069 case 15: /* Class SB */
3070 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3071 v = VEC_alloc (constructor_elt, gc, 2);
3072 t = DECL_CHAIN (DECL_CHAIN (klass));
3073 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3074 CONSTRUCTOR_APPEND_ELT (v, min_field,
3075 convert (TREE_TYPE (min_field),
3076 integer_one_node));
3077 CONSTRUCTOR_APPEND_ELT (v, max_field,
3078 convert (TREE_TYPE (max_field), t));
3079 template_tree = gnat_build_constructor (template_type, v);
3080 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3081
3082 /* For class S, we are done. */
3083 if (iklass == 1)
3084 break;
3085
3086 /* Test that we really have a SB descriptor, like DEC Ada. */
3087 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3088 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3089 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3090 /* If so, there is already a template in the descriptor and
3091 it is located right after the POINTER field. The fields are
3092 64bits so they must be repacked. */
3093 t = DECL_CHAIN (pointer);
3094 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3095 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3096
3097 t = DECL_CHAIN (t);
3098 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3099 ufield = convert
3100 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3101
3102 /* Build the template in the form of a constructor. */
3103 v = VEC_alloc (constructor_elt, gc, 2);
3104 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3105 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3106 ufield);
3107 template_tree = gnat_build_constructor (template_type, v);
3108
3109 /* Otherwise use the {1, LENGTH} template we build above. */
3110 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3111 build_unary_op (ADDR_EXPR, p_bounds_type,
3112 template_tree),
3113 template_addr);
3114 break;
3115
3116 case 4: /* Class A */
3117 /* The AFLAGS field is the 3rd field after the pointer in the
3118 descriptor. */
3119 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3120 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3121 /* The DIMCT field is the next field in the descriptor after
3122 aflags. */
3123 t = DECL_CHAIN (t);
3124 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3125 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3126 or FL_COEFF or FL_BOUNDS not set. */
3127 u = build_int_cst (TREE_TYPE (aflags), 192);
3128 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3129 build_binary_op (NE_EXPR, boolean_type_node,
3130 dimct,
3131 convert (TREE_TYPE (dimct),
3132 size_one_node)),
3133 build_binary_op (NE_EXPR, boolean_type_node,
3134 build2 (BIT_AND_EXPR,
3135 TREE_TYPE (aflags),
3136 aflags, u),
3137 u));
3138 /* There is already a template in the descriptor and it is located
3139 in block 3. The fields are 64bits so they must be repacked. */
3140 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3141 (t)))));
3142 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3143 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3144
3145 t = DECL_CHAIN (t);
3146 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3147 ufield = convert
3148 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3149
3150 /* Build the template in the form of a constructor. */
3151 v = VEC_alloc (constructor_elt, gc, 2);
3152 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3153 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3154 ufield);
3155 template_tree = gnat_build_constructor (template_type, v);
3156 template_tree = build3 (COND_EXPR, template_type, u,
3157 build_call_raise (CE_Length_Check_Failed, Empty,
3158 N_Raise_Constraint_Error),
3159 template_tree);
3160 template_addr
3161 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3162 break;
3163
3164 case 10: /* Class NCA */
3165 default:
3166 post_error ("unsupported descriptor type for &", gnat_subprog);
3167 template_addr = integer_zero_node;
3168 break;
3169 }
3170
3171 /* Build the fat pointer in the form of a constructor. */
3172 v = VEC_alloc (constructor_elt, gc, 2);
3173 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3174 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3175 template_addr);
3176 return gnat_build_constructor (gnu_type, v);
3177 }
3178
3179 else
3180 gcc_unreachable ();
3181 }
3182
3183 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3184 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3185 which the VMS descriptor is passed. */
3186
3187 static tree
3188 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3189 {
3190 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3191 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3192 /* The CLASS field is the 3rd field in the descriptor. */
3193 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3194 /* The POINTER field is the 4th field in the descriptor. */
3195 tree pointer = DECL_CHAIN (klass);
3196
3197 /* Retrieve the value of the POINTER field. */
3198 tree gnu_expr32
3199 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3200
3201 if (POINTER_TYPE_P (gnu_type))
3202 return convert (gnu_type, gnu_expr32);
3203
3204 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3205 {
3206 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3207 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3208 tree template_type = TREE_TYPE (p_bounds_type);
3209 tree min_field = TYPE_FIELDS (template_type);
3210 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3211 tree template_tree, template_addr, aflags, dimct, t, u;
3212 /* See the head comment of build_vms_descriptor. */
3213 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3214 VEC(constructor_elt,gc) *v;
3215
3216 /* Convert POINTER to the pointer-to-array type. */
3217 gnu_expr32 = convert (p_array_type, gnu_expr32);
3218
3219 switch (iklass)
3220 {
3221 case 1: /* Class S */
3222 case 15: /* Class SB */
3223 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3224 v = VEC_alloc (constructor_elt, gc, 2);
3225 t = TYPE_FIELDS (desc_type);
3226 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3227 CONSTRUCTOR_APPEND_ELT (v, min_field,
3228 convert (TREE_TYPE (min_field),
3229 integer_one_node));
3230 CONSTRUCTOR_APPEND_ELT (v, max_field,
3231 convert (TREE_TYPE (max_field), t));
3232 template_tree = gnat_build_constructor (template_type, v);
3233 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3234
3235 /* For class S, we are done. */
3236 if (iklass == 1)
3237 break;
3238
3239 /* Test that we really have a SB descriptor, like DEC Ada. */
3240 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3241 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3242 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3243 /* If so, there is already a template in the descriptor and
3244 it is located right after the POINTER field. */
3245 t = DECL_CHAIN (pointer);
3246 template_tree
3247 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3248 /* Otherwise use the {1, LENGTH} template we build above. */
3249 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3250 build_unary_op (ADDR_EXPR, p_bounds_type,
3251 template_tree),
3252 template_addr);
3253 break;
3254
3255 case 4: /* Class A */
3256 /* The AFLAGS field is the 7th field in the descriptor. */
3257 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3258 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3259 /* The DIMCT field is the 8th field in the descriptor. */
3260 t = DECL_CHAIN (t);
3261 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3262 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3263 or FL_COEFF or FL_BOUNDS not set. */
3264 u = build_int_cst (TREE_TYPE (aflags), 192);
3265 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3266 build_binary_op (NE_EXPR, boolean_type_node,
3267 dimct,
3268 convert (TREE_TYPE (dimct),
3269 size_one_node)),
3270 build_binary_op (NE_EXPR, boolean_type_node,
3271 build2 (BIT_AND_EXPR,
3272 TREE_TYPE (aflags),
3273 aflags, u),
3274 u));
3275 /* There is already a template in the descriptor and it is
3276 located at the start of block 3 (12th field). */
3277 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
3278 template_tree
3279 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3280 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3281 build_call_raise (CE_Length_Check_Failed, Empty,
3282 N_Raise_Constraint_Error),
3283 template_tree);
3284 template_addr
3285 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3286 break;
3287
3288 case 10: /* Class NCA */
3289 default:
3290 post_error ("unsupported descriptor type for &", gnat_subprog);
3291 template_addr = integer_zero_node;
3292 break;
3293 }
3294
3295 /* Build the fat pointer in the form of a constructor. */
3296 v = VEC_alloc (constructor_elt, gc, 2);
3297 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
3298 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3299 template_addr);
3300
3301 return gnat_build_constructor (gnu_type, v);
3302 }
3303
3304 else
3305 gcc_unreachable ();
3306 }
3307
3308 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3309 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3310 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3311 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3312 passed. */
3313
3314 tree
3315 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3316 bool by_ref, Entity_Id gnat_subprog)
3317 {
3318 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3319 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3320 tree mbo = TYPE_FIELDS (desc_type);
3321 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3322 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
3323 tree real_type, is64bit, gnu_expr32, gnu_expr64;
3324
3325 if (by_ref)
3326 real_type = TREE_TYPE (gnu_type);
3327 else
3328 real_type = gnu_type;
3329
3330 /* If the field name is not MBO, it must be 32-bit and no alternate.
3331 Otherwise primary must be 64-bit and alternate 32-bit. */
3332 if (strcmp (mbostr, "MBO") != 0)
3333 {
3334 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3335 if (by_ref)
3336 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
3337 return ret;
3338 }
3339
3340 /* Build the test for 64-bit descriptor. */
3341 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3342 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3343 is64bit
3344 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3345 build_binary_op (EQ_EXPR, boolean_type_node,
3346 convert (integer_type_node, mbo),
3347 integer_one_node),
3348 build_binary_op (EQ_EXPR, boolean_type_node,
3349 convert (integer_type_node, mbmo),
3350 integer_minus_one_node));
3351
3352 /* Build the 2 possible end results. */
3353 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
3354 if (by_ref)
3355 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
3356 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3357 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3358 if (by_ref)
3359 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
3360
3361 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3362 }
3363 \f
3364 /* Build a type to be used to represent an aliased object whose nominal type
3365 is an unconstrained array. This consists of a RECORD_TYPE containing a
3366 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3367 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3368 an arbitrary unconstrained object. Use NAME as the name of the record.
3369 DEBUG_INFO_P is true if we need to write debug information for the type. */
3370
3371 tree
3372 build_unc_object_type (tree template_type, tree object_type, tree name,
3373 bool debug_info_p)
3374 {
3375 tree type = make_node (RECORD_TYPE);
3376 tree template_field
3377 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3378 NULL_TREE, NULL_TREE, 0, 1);
3379 tree array_field
3380 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3381 NULL_TREE, NULL_TREE, 0, 1);
3382
3383 TYPE_NAME (type) = name;
3384 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3385 DECL_CHAIN (template_field) = array_field;
3386 finish_record_type (type, template_field, 0, true);
3387
3388 /* Declare it now since it will never be declared otherwise. This is
3389 necessary to ensure that its subtrees are properly marked. */
3390 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
3391
3392 return type;
3393 }
3394
3395 /* Same, taking a thin or fat pointer type instead of a template type. */
3396
3397 tree
3398 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3399 tree name, bool debug_info_p)
3400 {
3401 tree template_type;
3402
3403 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3404
3405 template_type
3406 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3407 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3408 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3409
3410 return
3411 build_unc_object_type (template_type, object_type, name, debug_info_p);
3412 }
3413 \f
3414 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3415 In the normal case this is just two adjustments, but we have more to
3416 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3417
3418 void
3419 update_pointer_to (tree old_type, tree new_type)
3420 {
3421 tree ptr = TYPE_POINTER_TO (old_type);
3422 tree ref = TYPE_REFERENCE_TO (old_type);
3423 tree t;
3424
3425 /* If this is the main variant, process all the other variants first. */
3426 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3427 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3428 update_pointer_to (t, new_type);
3429
3430 /* If no pointers and no references, we are done. */
3431 if (!ptr && !ref)
3432 return;
3433
3434 /* Merge the old type qualifiers in the new type.
3435
3436 Each old variant has qualifiers for specific reasons, and the new
3437 designated type as well. Each set of qualifiers represents useful
3438 information grabbed at some point, and merging the two simply unifies
3439 these inputs into the final type description.
3440
3441 Consider for instance a volatile type frozen after an access to constant
3442 type designating it; after the designated type's freeze, we get here with
3443 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3444 when the access type was processed. We will make a volatile and readonly
3445 designated type, because that's what it really is.
3446
3447 We might also get here for a non-dummy OLD_TYPE variant with different
3448 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3449 to private record type elaboration (see the comments around the call to
3450 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3451 the qualifiers in those cases too, to avoid accidentally discarding the
3452 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3453 new_type
3454 = build_qualified_type (new_type,
3455 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3456
3457 /* If old type and new type are identical, there is nothing to do. */
3458 if (old_type == new_type)
3459 return;
3460
3461 /* Otherwise, first handle the simple case. */
3462 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3463 {
3464 tree new_ptr, new_ref;
3465
3466 /* If pointer or reference already points to new type, nothing to do.
3467 This can happen as update_pointer_to can be invoked multiple times
3468 on the same couple of types because of the type variants. */
3469 if ((ptr && TREE_TYPE (ptr) == new_type)
3470 || (ref && TREE_TYPE (ref) == new_type))
3471 return;
3472
3473 /* Chain PTR and its variants at the end. */
3474 new_ptr = TYPE_POINTER_TO (new_type);
3475 if (new_ptr)
3476 {
3477 while (TYPE_NEXT_PTR_TO (new_ptr))
3478 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3479 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3480 }
3481 else
3482 TYPE_POINTER_TO (new_type) = ptr;
3483
3484 /* Now adjust them. */
3485 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3486 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3487 {
3488 TREE_TYPE (t) = new_type;
3489 if (TYPE_NULL_BOUNDS (t))
3490 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3491 }
3492
3493 /* Chain REF and its variants at the end. */
3494 new_ref = TYPE_REFERENCE_TO (new_type);
3495 if (new_ref)
3496 {
3497 while (TYPE_NEXT_REF_TO (new_ref))
3498 new_ref = TYPE_NEXT_REF_TO (new_ref);
3499 TYPE_NEXT_REF_TO (new_ref) = ref;
3500 }
3501 else
3502 TYPE_REFERENCE_TO (new_type) = ref;
3503
3504 /* Now adjust them. */
3505 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3506 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3507 TREE_TYPE (t) = new_type;
3508
3509 TYPE_POINTER_TO (old_type) = NULL_TREE;
3510 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3511 }
3512
3513 /* Now deal with the unconstrained array case. In this case the pointer
3514 is actually a record where both fields are pointers to dummy nodes.
3515 Turn them into pointers to the correct types using update_pointer_to.
3516 Likewise for the pointer to the object record (thin pointer). */
3517 else
3518 {
3519 tree new_ptr = TYPE_POINTER_TO (new_type);
3520
3521 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3522
3523 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3524 since update_pointer_to can be invoked multiple times on the same
3525 couple of types because of the type variants. */
3526 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3527 return;
3528
3529 update_pointer_to
3530 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3531 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3532
3533 update_pointer_to
3534 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3535 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3536
3537 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3538 TYPE_OBJECT_RECORD_TYPE (new_type));
3539
3540 TYPE_POINTER_TO (old_type) = NULL_TREE;
3541 }
3542 }
3543 \f
3544 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3545 unconstrained one. This involves making or finding a template. */
3546
3547 static tree
3548 convert_to_fat_pointer (tree type, tree expr)
3549 {
3550 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3551 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3552 tree etype = TREE_TYPE (expr);
3553 tree template_tree;
3554 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3555
3556 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3557 array (compare_fat_pointers ensures that this is the full discriminant)
3558 and a valid pointer to the bounds. This latter property is necessary
3559 since the compiler can hoist the load of the bounds done through it. */
3560 if (integer_zerop (expr))
3561 {
3562 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3563 tree null_bounds, t;
3564
3565 if (TYPE_NULL_BOUNDS (ptr_template_type))
3566 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3567 else
3568 {
3569 /* The template type can still be dummy at this point so we build an
3570 empty constructor. The middle-end will fill it in with zeros. */
3571 t = build_constructor (template_type, NULL);
3572 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3573 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3574 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3575 }
3576
3577 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3578 fold_convert (p_array_type, null_pointer_node));
3579 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3580 t = build_constructor (type, v);
3581 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3582 TREE_CONSTANT (t) = 0;
3583 TREE_STATIC (t) = 1;
3584
3585 return t;
3586 }
3587
3588 /* If EXPR is a thin pointer, make template and data from the record. */
3589 if (TYPE_IS_THIN_POINTER_P (etype))
3590 {
3591 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3592
3593 expr = gnat_protect_expr (expr);
3594 if (TREE_CODE (expr) == ADDR_EXPR)
3595 expr = TREE_OPERAND (expr, 0);
3596 else
3597 {
3598 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3599 the thin pointer value has been shifted so we first need to shift
3600 it back to get the template address. */
3601 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3602 expr
3603 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3604 fold_build1 (NEGATE_EXPR, sizetype,
3605 byte_position
3606 (DECL_CHAIN (field))));
3607 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3608 }
3609
3610 template_tree = build_component_ref (expr, NULL_TREE, field, false);
3611 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3612 build_component_ref (expr, NULL_TREE,
3613 DECL_CHAIN (field), false));
3614 }
3615
3616 /* Otherwise, build the constructor for the template. */
3617 else
3618 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3619
3620 /* The final result is a constructor for the fat pointer.
3621
3622 If EXPR is an argument of a foreign convention subprogram, the type it
3623 points to is directly the component type. In this case, the expression
3624 type may not match the corresponding FIELD_DECL type at this point, so we
3625 call "convert" here to fix that up if necessary. This type consistency is
3626 required, for instance because it ensures that possible later folding of
3627 COMPONENT_REFs against this constructor always yields something of the
3628 same type as the initial reference.
3629
3630 Note that the call to "build_template" above is still fine because it
3631 will only refer to the provided TEMPLATE_TYPE in this case. */
3632 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3633 convert (p_array_type, expr));
3634 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3635 build_unary_op (ADDR_EXPR, NULL_TREE,
3636 template_tree));
3637 return gnat_build_constructor (type, v);
3638 }
3639 \f
3640 /* Create an expression whose value is that of EXPR,
3641 converted to type TYPE. The TREE_TYPE of the value
3642 is always TYPE. This function implements all reasonable
3643 conversions; callers should filter out those that are
3644 not permitted by the language being compiled. */
3645
3646 tree
3647 convert (tree type, tree expr)
3648 {
3649 tree etype = TREE_TYPE (expr);
3650 enum tree_code ecode = TREE_CODE (etype);
3651 enum tree_code code = TREE_CODE (type);
3652
3653 /* If the expression is already of the right type, we are done. */
3654 if (etype == type)
3655 return expr;
3656
3657 /* If both input and output have padding and are of variable size, do this
3658 as an unchecked conversion. Likewise if one is a mere variant of the
3659 other, so we avoid a pointless unpad/repad sequence. */
3660 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3661 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3662 && (!TREE_CONSTANT (TYPE_SIZE (type))
3663 || !TREE_CONSTANT (TYPE_SIZE (etype))
3664 || gnat_types_compatible_p (type, etype)
3665 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3666 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3667 ;
3668
3669 /* If the output type has padding, convert to the inner type and make a
3670 constructor to build the record, unless a variable size is involved. */
3671 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3672 {
3673 VEC(constructor_elt,gc) *v;
3674
3675 /* If we previously converted from another type and our type is
3676 of variable size, remove the conversion to avoid the need for
3677 variable-sized temporaries. Likewise for a conversion between
3678 original and packable version. */
3679 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3680 && (!TREE_CONSTANT (TYPE_SIZE (type))
3681 || (ecode == RECORD_TYPE
3682 && TYPE_NAME (etype)
3683 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3684 expr = TREE_OPERAND (expr, 0);
3685
3686 /* If we are just removing the padding from expr, convert the original
3687 object if we have variable size in order to avoid the need for some
3688 variable-sized temporaries. Likewise if the padding is a variant
3689 of the other, so we avoid a pointless unpad/repad sequence. */
3690 if (TREE_CODE (expr) == COMPONENT_REF
3691 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3692 && (!TREE_CONSTANT (TYPE_SIZE (type))
3693 || gnat_types_compatible_p (type,
3694 TREE_TYPE (TREE_OPERAND (expr, 0)))
3695 || (ecode == RECORD_TYPE
3696 && TYPE_NAME (etype)
3697 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3698 return convert (type, TREE_OPERAND (expr, 0));
3699
3700 /* If the inner type is of self-referential size and the expression type
3701 is a record, do this as an unchecked conversion. But first pad the
3702 expression if possible to have the same size on both sides. */
3703 if (ecode == RECORD_TYPE
3704 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3705 {
3706 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
3707 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3708 false, false, false, true),
3709 expr);
3710 return unchecked_convert (type, expr, false);
3711 }
3712
3713 /* If we are converting between array types with variable size, do the
3714 final conversion as an unchecked conversion, again to avoid the need
3715 for some variable-sized temporaries. If valid, this conversion is
3716 very likely purely technical and without real effects. */
3717 if (ecode == ARRAY_TYPE
3718 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3719 && !TREE_CONSTANT (TYPE_SIZE (etype))
3720 && !TREE_CONSTANT (TYPE_SIZE (type)))
3721 return unchecked_convert (type,
3722 convert (TREE_TYPE (TYPE_FIELDS (type)),
3723 expr),
3724 false);
3725
3726 v = VEC_alloc (constructor_elt, gc, 1);
3727 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3728 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
3729 return gnat_build_constructor (type, v);
3730 }
3731
3732 /* If the input type has padding, remove it and convert to the output type.
3733 The conditions ordering is arranged to ensure that the output type is not
3734 a padding type here, as it is not clear whether the conversion would
3735 always be correct if this was to happen. */
3736 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3737 {
3738 tree unpadded;
3739
3740 /* If we have just converted to this padded type, just get the
3741 inner expression. */
3742 if (TREE_CODE (expr) == CONSTRUCTOR
3743 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3744 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3745 == TYPE_FIELDS (etype))
3746 unpadded
3747 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3748
3749 /* Otherwise, build an explicit component reference. */
3750 else
3751 unpadded
3752 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3753
3754 return convert (type, unpadded);
3755 }
3756
3757 /* If the input is a biased type, adjust first. */
3758 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3759 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3760 fold_convert (TREE_TYPE (etype),
3761 expr),
3762 TYPE_MIN_VALUE (etype)));
3763
3764 /* If the input is a justified modular type, we need to extract the actual
3765 object before converting it to any other type with the exceptions of an
3766 unconstrained array or of a mere type variant. It is useful to avoid the
3767 extraction and conversion in the type variant case because it could end
3768 up replacing a VAR_DECL expr by a constructor and we might be about the
3769 take the address of the result. */
3770 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3771 && code != UNCONSTRAINED_ARRAY_TYPE
3772 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3773 return convert (type, build_component_ref (expr, NULL_TREE,
3774 TYPE_FIELDS (etype), false));
3775
3776 /* If converting to a type that contains a template, convert to the data
3777 type and then build the template. */
3778 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3779 {
3780 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3781 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3782
3783 /* If the source already has a template, get a reference to the
3784 associated array only, as we are going to rebuild a template
3785 for the target type anyway. */
3786 expr = maybe_unconstrained_array (expr);
3787
3788 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3789 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3790 obj_type, NULL_TREE));
3791 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3792 convert (obj_type, expr));
3793 return gnat_build_constructor (type, v);
3794 }
3795
3796 /* There are some cases of expressions that we process specially. */
3797 switch (TREE_CODE (expr))
3798 {
3799 case ERROR_MARK:
3800 return expr;
3801
3802 case NULL_EXPR:
3803 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3804 conversion in gnat_expand_expr. NULL_EXPR does not represent
3805 and actual value, so no conversion is needed. */
3806 expr = copy_node (expr);
3807 TREE_TYPE (expr) = type;
3808 return expr;
3809
3810 case STRING_CST:
3811 /* If we are converting a STRING_CST to another constrained array type,
3812 just make a new one in the proper type. */
3813 if (code == ecode && AGGREGATE_TYPE_P (etype)
3814 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3815 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3816 {
3817 expr = copy_node (expr);
3818 TREE_TYPE (expr) = type;
3819 return expr;
3820 }
3821 break;
3822
3823 case VECTOR_CST:
3824 /* If we are converting a VECTOR_CST to a mere variant type, just make
3825 a new one in the proper type. */
3826 if (code == ecode && gnat_types_compatible_p (type, etype))
3827 {
3828 expr = copy_node (expr);
3829 TREE_TYPE (expr) = type;
3830 return expr;
3831 }
3832
3833 case CONSTRUCTOR:
3834 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3835 a new one in the proper type. */
3836 if (code == ecode && gnat_types_compatible_p (type, etype))
3837 {
3838 expr = copy_node (expr);
3839 TREE_TYPE (expr) = type;
3840 CONSTRUCTOR_ELTS (expr)
3841 = VEC_copy (constructor_elt, gc, CONSTRUCTOR_ELTS (expr));
3842 return expr;
3843 }
3844
3845 /* Likewise for a conversion between original and packable version, or
3846 conversion between types of the same size and with the same list of
3847 fields, but we have to work harder to preserve type consistency. */
3848 if (code == ecode
3849 && code == RECORD_TYPE
3850 && (TYPE_NAME (type) == TYPE_NAME (etype)
3851 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3852
3853 {
3854 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3855 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3856 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3857 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3858 unsigned HOST_WIDE_INT idx;
3859 tree index, value;
3860
3861 /* Whether we need to clear TREE_CONSTANT et al. on the output
3862 constructor when we convert in place. */
3863 bool clear_constant = false;
3864
3865 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3866 {
3867 constructor_elt *elt;
3868 /* We expect only simple constructors. */
3869 if (!SAME_FIELD_P (index, efield))
3870 break;
3871 /* The field must be the same. */
3872 if (!SAME_FIELD_P (efield, field))
3873 break;
3874 elt = VEC_quick_push (constructor_elt, v, NULL);
3875 elt->index = field;
3876 elt->value = convert (TREE_TYPE (field), value);
3877
3878 /* If packing has made this field a bitfield and the input
3879 value couldn't be emitted statically any more, we need to
3880 clear TREE_CONSTANT on our output. */
3881 if (!clear_constant
3882 && TREE_CONSTANT (expr)
3883 && !CONSTRUCTOR_BITFIELD_P (efield)
3884 && CONSTRUCTOR_BITFIELD_P (field)
3885 && !initializer_constant_valid_for_bitfield_p (value))
3886 clear_constant = true;
3887
3888 efield = DECL_CHAIN (efield);
3889 field = DECL_CHAIN (field);
3890 }
3891
3892 /* If we have been able to match and convert all the input fields
3893 to their output type, convert in place now. We'll fallback to a
3894 view conversion downstream otherwise. */
3895 if (idx == len)
3896 {
3897 expr = copy_node (expr);
3898 TREE_TYPE (expr) = type;
3899 CONSTRUCTOR_ELTS (expr) = v;
3900 if (clear_constant)
3901 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3902 return expr;
3903 }
3904 }
3905
3906 /* Likewise for a conversion between array type and vector type with a
3907 compatible representative array. */
3908 else if (code == VECTOR_TYPE
3909 && ecode == ARRAY_TYPE
3910 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3911 etype))
3912 {
3913 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3914 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3915 VEC(constructor_elt,gc) *v;
3916 unsigned HOST_WIDE_INT ix;
3917 tree value;
3918
3919 /* Build a VECTOR_CST from a *constant* array constructor. */
3920 if (TREE_CONSTANT (expr))
3921 {
3922 bool constant_p = true;
3923
3924 /* Iterate through elements and check if all constructor
3925 elements are *_CSTs. */
3926 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3927 if (!CONSTANT_CLASS_P (value))
3928 {
3929 constant_p = false;
3930 break;
3931 }
3932
3933 if (constant_p)
3934 return build_vector_from_ctor (type,
3935 CONSTRUCTOR_ELTS (expr));
3936 }
3937
3938 /* Otherwise, build a regular vector constructor. */
3939 v = VEC_alloc (constructor_elt, gc, len);
3940 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3941 {
3942 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3943 elt->index = NULL_TREE;
3944 elt->value = value;
3945 }
3946 expr = copy_node (expr);
3947 TREE_TYPE (expr) = type;
3948 CONSTRUCTOR_ELTS (expr) = v;
3949 return expr;
3950 }
3951 break;
3952
3953 case UNCONSTRAINED_ARRAY_REF:
3954 /* First retrieve the underlying array. */
3955 expr = maybe_unconstrained_array (expr);
3956 etype = TREE_TYPE (expr);
3957 ecode = TREE_CODE (etype);
3958 break;
3959
3960 case VIEW_CONVERT_EXPR:
3961 {
3962 /* GCC 4.x is very sensitive to type consistency overall, and view
3963 conversions thus are very frequent. Even though just "convert"ing
3964 the inner operand to the output type is fine in most cases, it
3965 might expose unexpected input/output type mismatches in special
3966 circumstances so we avoid such recursive calls when we can. */
3967 tree op0 = TREE_OPERAND (expr, 0);
3968
3969 /* If we are converting back to the original type, we can just
3970 lift the input conversion. This is a common occurrence with
3971 switches back-and-forth amongst type variants. */
3972 if (type == TREE_TYPE (op0))
3973 return op0;
3974
3975 /* Otherwise, if we're converting between two aggregate or vector
3976 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
3977 target type in place or to just convert the inner expression. */
3978 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3979 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
3980 {
3981 /* If we are converting between mere variants, we can just
3982 substitute the VIEW_CONVERT_EXPR in place. */
3983 if (gnat_types_compatible_p (type, etype))
3984 return build1 (VIEW_CONVERT_EXPR, type, op0);
3985
3986 /* Otherwise, we may just bypass the input view conversion unless
3987 one of the types is a fat pointer, which is handled by
3988 specialized code below which relies on exact type matching. */
3989 else if (!TYPE_IS_FAT_POINTER_P (type)
3990 && !TYPE_IS_FAT_POINTER_P (etype))
3991 return convert (type, op0);
3992 }
3993
3994 break;
3995 }
3996
3997 default:
3998 break;
3999 }
4000
4001 /* Check for converting to a pointer to an unconstrained array. */
4002 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4003 return convert_to_fat_pointer (type, expr);
4004
4005 /* If we are converting between two aggregate or vector types that are mere
4006 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4007 to a vector type from its representative array type. */
4008 else if ((code == ecode
4009 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4010 && gnat_types_compatible_p (type, etype))
4011 || (code == VECTOR_TYPE
4012 && ecode == ARRAY_TYPE
4013 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4014 etype)))
4015 return build1 (VIEW_CONVERT_EXPR, type, expr);
4016
4017 /* If we are converting between tagged types, try to upcast properly. */
4018 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4019 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4020 {
4021 tree child_etype = etype;
4022 do {
4023 tree field = TYPE_FIELDS (child_etype);
4024 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4025 return build_component_ref (expr, NULL_TREE, field, false);
4026 child_etype = TREE_TYPE (field);
4027 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4028 }
4029
4030 /* If we are converting from a smaller form of record type back to it, just
4031 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4032 size on both sides. */
4033 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4034 && smaller_form_type_p (etype, type))
4035 {
4036 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4037 false, false, false, true),
4038 expr);
4039 return build1 (VIEW_CONVERT_EXPR, type, expr);
4040 }
4041
4042 /* In all other cases of related types, make a NOP_EXPR. */
4043 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4044 return fold_convert (type, expr);
4045
4046 switch (code)
4047 {
4048 case VOID_TYPE:
4049 return fold_build1 (CONVERT_EXPR, type, expr);
4050
4051 case INTEGER_TYPE:
4052 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4053 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4054 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4055 return unchecked_convert (type, expr, false);
4056 else if (TYPE_BIASED_REPRESENTATION_P (type))
4057 return fold_convert (type,
4058 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4059 convert (TREE_TYPE (type), expr),
4060 TYPE_MIN_VALUE (type)));
4061
4062 /* ... fall through ... */
4063
4064 case ENUMERAL_TYPE:
4065 case BOOLEAN_TYPE:
4066 /* If we are converting an additive expression to an integer type
4067 with lower precision, be wary of the optimization that can be
4068 applied by convert_to_integer. There are 2 problematic cases:
4069 - if the first operand was originally of a biased type,
4070 because we could be recursively called to convert it
4071 to an intermediate type and thus rematerialize the
4072 additive operator endlessly,
4073 - if the expression contains a placeholder, because an
4074 intermediate conversion that changes the sign could
4075 be inserted and thus introduce an artificial overflow
4076 at compile time when the placeholder is substituted. */
4077 if (code == INTEGER_TYPE
4078 && ecode == INTEGER_TYPE
4079 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4080 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4081 {
4082 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4083
4084 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4085 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4086 || CONTAINS_PLACEHOLDER_P (expr))
4087 return build1 (NOP_EXPR, type, expr);
4088 }
4089
4090 return fold (convert_to_integer (type, expr));
4091
4092 case POINTER_TYPE:
4093 case REFERENCE_TYPE:
4094 /* If converting between two thin pointers, adjust if needed to account
4095 for differing offsets from the base pointer, depending on whether
4096 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4097 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4098 {
4099 tree etype_pos
4100 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4101 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4102 : size_zero_node;
4103 tree type_pos
4104 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4105 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4106 : size_zero_node;
4107 tree byte_diff = size_diffop (type_pos, etype_pos);
4108
4109 expr = build1 (NOP_EXPR, type, expr);
4110 if (integer_zerop (byte_diff))
4111 return expr;
4112
4113 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4114 fold_convert (sizetype, byte_diff));
4115 }
4116
4117 /* If converting fat pointer to normal or thin pointer, get the pointer
4118 to the array and then convert it. */
4119 if (TYPE_IS_FAT_POINTER_P (etype))
4120 expr
4121 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4122
4123 return fold (convert_to_pointer (type, expr));
4124
4125 case REAL_TYPE:
4126 return fold (convert_to_real (type, expr));
4127
4128 case RECORD_TYPE:
4129 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4130 {
4131 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4132
4133 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4134 convert (TREE_TYPE (TYPE_FIELDS (type)),
4135 expr));
4136 return gnat_build_constructor (type, v);
4137 }
4138
4139 /* ... fall through ... */
4140
4141 case ARRAY_TYPE:
4142 /* In these cases, assume the front-end has validated the conversion.
4143 If the conversion is valid, it will be a bit-wise conversion, so
4144 it can be viewed as an unchecked conversion. */
4145 return unchecked_convert (type, expr, false);
4146
4147 case UNION_TYPE:
4148 /* This is a either a conversion between a tagged type and some
4149 subtype, which we have to mark as a UNION_TYPE because of
4150 overlapping fields or a conversion of an Unchecked_Union. */
4151 return unchecked_convert (type, expr, false);
4152
4153 case UNCONSTRAINED_ARRAY_TYPE:
4154 /* If the input is a VECTOR_TYPE, convert to the representative
4155 array type first. */
4156 if (ecode == VECTOR_TYPE)
4157 {
4158 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4159 etype = TREE_TYPE (expr);
4160 ecode = TREE_CODE (etype);
4161 }
4162
4163 /* If EXPR is a constrained array, take its address, convert it to a
4164 fat pointer, and then dereference it. Likewise if EXPR is a
4165 record containing both a template and a constrained array.
4166 Note that a record representing a justified modular type
4167 always represents a packed constrained array. */
4168 if (ecode == ARRAY_TYPE
4169 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4170 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4171 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4172 return
4173 build_unary_op
4174 (INDIRECT_REF, NULL_TREE,
4175 convert_to_fat_pointer (TREE_TYPE (type),
4176 build_unary_op (ADDR_EXPR,
4177 NULL_TREE, expr)));
4178
4179 /* Do something very similar for converting one unconstrained
4180 array to another. */
4181 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4182 return
4183 build_unary_op (INDIRECT_REF, NULL_TREE,
4184 convert (TREE_TYPE (type),
4185 build_unary_op (ADDR_EXPR,
4186 NULL_TREE, expr)));
4187 else
4188 gcc_unreachable ();
4189
4190 case COMPLEX_TYPE:
4191 return fold (convert_to_complex (type, expr));
4192
4193 default:
4194 gcc_unreachable ();
4195 }
4196 }
4197
4198 /* Create an expression whose value is that of EXPR converted to the common
4199 index type, which is sizetype. EXPR is supposed to be in the base type
4200 of the GNAT index type. Calling it is equivalent to doing
4201
4202 convert (sizetype, expr)
4203
4204 but we try to distribute the type conversion with the knowledge that EXPR
4205 cannot overflow in its type. This is a best-effort approach and we fall
4206 back to the above expression as soon as difficulties are encountered.
4207
4208 This is necessary to overcome issues that arise when the GNAT base index
4209 type and the GCC common index type (sizetype) don't have the same size,
4210 which is quite frequent on 64-bit architectures. In this case, and if
4211 the GNAT base index type is signed but the iteration type of the loop has
4212 been forced to unsigned, the loop scalar evolution engine cannot compute
4213 a simple evolution for the general induction variables associated with the
4214 array indices, because it will preserve the wrap-around semantics in the
4215 unsigned type of their "inner" part. As a result, many loop optimizations
4216 are blocked.
4217
4218 The solution is to use a special (basic) induction variable that is at
4219 least as large as sizetype, and to express the aforementioned general
4220 induction variables in terms of this induction variable, eliminating
4221 the problematic intermediate truncation to the GNAT base index type.
4222 This is possible as long as the original expression doesn't overflow
4223 and if the middle-end hasn't introduced artificial overflows in the
4224 course of the various simplification it can make to the expression. */
4225
4226 tree
4227 convert_to_index_type (tree expr)
4228 {
4229 enum tree_code code = TREE_CODE (expr);
4230 tree type = TREE_TYPE (expr);
4231
4232 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4233 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4234 if (TYPE_UNSIGNED (type) || !optimize)
4235 return convert (sizetype, expr);
4236
4237 switch (code)
4238 {
4239 case VAR_DECL:
4240 /* The main effect of the function: replace a loop parameter with its
4241 associated special induction variable. */
4242 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4243 expr = DECL_INDUCTION_VAR (expr);
4244 break;
4245
4246 CASE_CONVERT:
4247 {
4248 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4249 /* Bail out as soon as we suspect some sort of type frobbing. */
4250 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4251 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4252 break;
4253 }
4254
4255 /* ... fall through ... */
4256
4257 case NON_LVALUE_EXPR:
4258 return fold_build1 (code, sizetype,
4259 convert_to_index_type (TREE_OPERAND (expr, 0)));
4260
4261 case PLUS_EXPR:
4262 case MINUS_EXPR:
4263 case MULT_EXPR:
4264 return fold_build2 (code, sizetype,
4265 convert_to_index_type (TREE_OPERAND (expr, 0)),
4266 convert_to_index_type (TREE_OPERAND (expr, 1)));
4267
4268 case COMPOUND_EXPR:
4269 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4270 convert_to_index_type (TREE_OPERAND (expr, 1)));
4271
4272 case COND_EXPR:
4273 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4274 convert_to_index_type (TREE_OPERAND (expr, 1)),
4275 convert_to_index_type (TREE_OPERAND (expr, 2)));
4276
4277 default:
4278 break;
4279 }
4280
4281 return convert (sizetype, expr);
4282 }
4283 \f
4284 /* Remove all conversions that are done in EXP. This includes converting
4285 from a padded type or to a justified modular type. If TRUE_ADDRESS
4286 is true, always return the address of the containing object even if
4287 the address is not bit-aligned. */
4288
4289 tree
4290 remove_conversions (tree exp, bool true_address)
4291 {
4292 switch (TREE_CODE (exp))
4293 {
4294 case CONSTRUCTOR:
4295 if (true_address
4296 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4297 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4298 return
4299 remove_conversions (VEC_index (constructor_elt,
4300 CONSTRUCTOR_ELTS (exp), 0)->value,
4301 true);
4302 break;
4303
4304 case COMPONENT_REF:
4305 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4306 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4307 break;
4308
4309 CASE_CONVERT:
4310 case VIEW_CONVERT_EXPR:
4311 case NON_LVALUE_EXPR:
4312 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4313
4314 default:
4315 break;
4316 }
4317
4318 return exp;
4319 }
4320 \f
4321 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4322 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4323 likewise return an expression pointing to the underlying array. */
4324
4325 tree
4326 maybe_unconstrained_array (tree exp)
4327 {
4328 enum tree_code code = TREE_CODE (exp);
4329 tree type = TREE_TYPE (exp);
4330
4331 switch (TREE_CODE (type))
4332 {
4333 case UNCONSTRAINED_ARRAY_TYPE:
4334 if (code == UNCONSTRAINED_ARRAY_REF)
4335 {
4336 const bool read_only = TREE_READONLY (exp);
4337 const bool no_trap = TREE_THIS_NOTRAP (exp);
4338
4339 exp = TREE_OPERAND (exp, 0);
4340 type = TREE_TYPE (exp);
4341
4342 if (TREE_CODE (exp) == COND_EXPR)
4343 {
4344 tree op1
4345 = build_unary_op (INDIRECT_REF, NULL_TREE,
4346 build_component_ref (TREE_OPERAND (exp, 1),
4347 NULL_TREE,
4348 TYPE_FIELDS (type),
4349 false));
4350 tree op2
4351 = build_unary_op (INDIRECT_REF, NULL_TREE,
4352 build_component_ref (TREE_OPERAND (exp, 2),
4353 NULL_TREE,
4354 TYPE_FIELDS (type),
4355 false));
4356
4357 exp = build3 (COND_EXPR,
4358 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4359 TREE_OPERAND (exp, 0), op1, op2);
4360 }
4361 else
4362 {
4363 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4364 build_component_ref (exp, NULL_TREE,
4365 TYPE_FIELDS (type),
4366 false));
4367 TREE_READONLY (exp) = read_only;
4368 TREE_THIS_NOTRAP (exp) = no_trap;
4369 }
4370 }
4371
4372 else if (code == NULL_EXPR)
4373 exp = build1 (NULL_EXPR,
4374 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4375 TREE_OPERAND (exp, 0));
4376 break;
4377
4378 case RECORD_TYPE:
4379 /* If this is a padded type and it contains a template, convert to the
4380 unpadded type first. */
4381 if (TYPE_PADDING_P (type)
4382 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4383 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4384 {
4385 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4386 type = TREE_TYPE (exp);
4387 }
4388
4389 if (TYPE_CONTAINS_TEMPLATE_P (type))
4390 {
4391 exp = build_component_ref (exp, NULL_TREE,
4392 DECL_CHAIN (TYPE_FIELDS (type)),
4393 false);
4394 type = TREE_TYPE (exp);
4395
4396 /* If the array type is padded, convert to the unpadded type. */
4397 if (TYPE_IS_PADDING_P (type))
4398 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4399 }
4400 break;
4401
4402 default:
4403 break;
4404 }
4405
4406 return exp;
4407 }
4408
4409 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4410 TYPE_REPRESENTATIVE_ARRAY. */
4411
4412 tree
4413 maybe_vector_array (tree exp)
4414 {
4415 tree etype = TREE_TYPE (exp);
4416
4417 if (VECTOR_TYPE_P (etype))
4418 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4419
4420 return exp;
4421 }
4422 \f
4423 /* Return true if EXPR is an expression that can be folded as an operand
4424 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4425
4426 static bool
4427 can_fold_for_view_convert_p (tree expr)
4428 {
4429 tree t1, t2;
4430
4431 /* The folder will fold NOP_EXPRs between integral types with the same
4432 precision (in the middle-end's sense). We cannot allow it if the
4433 types don't have the same precision in the Ada sense as well. */
4434 if (TREE_CODE (expr) != NOP_EXPR)
4435 return true;
4436
4437 t1 = TREE_TYPE (expr);
4438 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4439
4440 /* Defer to the folder for non-integral conversions. */
4441 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4442 return true;
4443
4444 /* Only fold conversions that preserve both precisions. */
4445 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4446 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4447 return true;
4448
4449 return false;
4450 }
4451
4452 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4453 If NOTRUNC_P is true, truncation operations should be suppressed.
4454
4455 Special care is required with (source or target) integral types whose
4456 precision is not equal to their size, to make sure we fetch or assign
4457 the value bits whose location might depend on the endianness, e.g.
4458
4459 Rmsize : constant := 8;
4460 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4461
4462 type Bit_Array is array (1 .. Rmsize) of Boolean;
4463 pragma Pack (Bit_Array);
4464
4465 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4466
4467 Value : Int := 2#1000_0001#;
4468 Vbits : Bit_Array := To_Bit_Array (Value);
4469
4470 we expect the 8 bits at Vbits'Address to always contain Value, while
4471 their original location depends on the endianness, at Value'Address
4472 on a little-endian architecture but not on a big-endian one. */
4473
4474 tree
4475 unchecked_convert (tree type, tree expr, bool notrunc_p)
4476 {
4477 tree etype = TREE_TYPE (expr);
4478 enum tree_code ecode = TREE_CODE (etype);
4479 enum tree_code code = TREE_CODE (type);
4480 int c;
4481
4482 /* If the expression is already of the right type, we are done. */
4483 if (etype == type)
4484 return expr;
4485
4486 /* If both types types are integral just do a normal conversion.
4487 Likewise for a conversion to an unconstrained array. */
4488 if ((((INTEGRAL_TYPE_P (type)
4489 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4490 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4491 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4492 && ((INTEGRAL_TYPE_P (etype)
4493 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4494 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4495 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4496 || code == UNCONSTRAINED_ARRAY_TYPE)
4497 {
4498 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4499 {
4500 tree ntype = copy_type (etype);
4501 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4502 TYPE_MAIN_VARIANT (ntype) = ntype;
4503 expr = build1 (NOP_EXPR, ntype, expr);
4504 }
4505
4506 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4507 {
4508 tree rtype = copy_type (type);
4509 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4510 TYPE_MAIN_VARIANT (rtype) = rtype;
4511 expr = convert (rtype, expr);
4512 expr = build1 (NOP_EXPR, type, expr);
4513 }
4514 else
4515 expr = convert (type, expr);
4516 }
4517
4518 /* If we are converting to an integral type whose precision is not equal
4519 to its size, first unchecked convert to a record type that contains an
4520 field of the given precision. Then extract the field. */
4521 else if (INTEGRAL_TYPE_P (type)
4522 && TYPE_RM_SIZE (type)
4523 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4524 GET_MODE_BITSIZE (TYPE_MODE (type))))
4525 {
4526 tree rec_type = make_node (RECORD_TYPE);
4527 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4528 tree field_type, field;
4529
4530 if (TYPE_UNSIGNED (type))
4531 field_type = make_unsigned_type (prec);
4532 else
4533 field_type = make_signed_type (prec);
4534 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4535
4536 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4537 NULL_TREE, NULL_TREE, 1, 0);
4538
4539 TYPE_FIELDS (rec_type) = field;
4540 layout_type (rec_type);
4541
4542 expr = unchecked_convert (rec_type, expr, notrunc_p);
4543 expr = build_component_ref (expr, NULL_TREE, field, false);
4544 expr = fold_build1 (NOP_EXPR, type, expr);
4545 }
4546
4547 /* Similarly if we are converting from an integral type whose precision is
4548 not equal to its size, first copy into a field of the given precision
4549 and unchecked convert the record type. */
4550 else if (INTEGRAL_TYPE_P (etype)
4551 && TYPE_RM_SIZE (etype)
4552 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4553 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4554 {
4555 tree rec_type = make_node (RECORD_TYPE);
4556 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4557 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4558 tree field_type, field;
4559
4560 if (TYPE_UNSIGNED (etype))
4561 field_type = make_unsigned_type (prec);
4562 else
4563 field_type = make_signed_type (prec);
4564 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4565
4566 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4567 NULL_TREE, NULL_TREE, 1, 0);
4568
4569 TYPE_FIELDS (rec_type) = field;
4570 layout_type (rec_type);
4571
4572 expr = fold_build1 (NOP_EXPR, field_type, expr);
4573 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4574 expr = gnat_build_constructor (rec_type, v);
4575 expr = unchecked_convert (type, expr, notrunc_p);
4576 }
4577
4578 /* If we are converting from a scalar type to a type with a different size,
4579 we need to pad to have the same size on both sides.
4580
4581 ??? We cannot do it unconditionally because unchecked conversions are
4582 used liberally by the front-end to implement polymorphism, e.g. in:
4583
4584 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4585 return p___size__4 (p__object!(S191s.all));
4586
4587 so we skip all expressions that are references. */
4588 else if (!REFERENCE_CLASS_P (expr)
4589 && !AGGREGATE_TYPE_P (etype)
4590 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4591 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4592 {
4593 if (c < 0)
4594 {
4595 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4596 false, false, false, true),
4597 expr);
4598 expr = unchecked_convert (type, expr, notrunc_p);
4599 }
4600 else
4601 {
4602 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4603 false, false, false, true);
4604 expr = unchecked_convert (rec_type, expr, notrunc_p);
4605 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4606 false);
4607 }
4608 }
4609
4610 /* We have a special case when we are converting between two unconstrained
4611 array types. In that case, take the address, convert the fat pointer
4612 types, and dereference. */
4613 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4614 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4615 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4616 build_unary_op (ADDR_EXPR, NULL_TREE,
4617 expr)));
4618
4619 /* Another special case is when we are converting to a vector type from its
4620 representative array type; this a regular conversion. */
4621 else if (code == VECTOR_TYPE
4622 && ecode == ARRAY_TYPE
4623 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4624 etype))
4625 expr = convert (type, expr);
4626
4627 else
4628 {
4629 expr = maybe_unconstrained_array (expr);
4630 etype = TREE_TYPE (expr);
4631 ecode = TREE_CODE (etype);
4632 if (can_fold_for_view_convert_p (expr))
4633 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4634 else
4635 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4636 }
4637
4638 /* If the result is an integral type whose precision is not equal to its
4639 size, sign- or zero-extend the result. We need not do this if the input
4640 is an integral type of the same precision and signedness or if the output
4641 is a biased type or if both the input and output are unsigned. */
4642 if (!notrunc_p
4643 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4644 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4645 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4646 GET_MODE_BITSIZE (TYPE_MODE (type)))
4647 && !(INTEGRAL_TYPE_P (etype)
4648 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4649 && operand_equal_p (TYPE_RM_SIZE (type),
4650 (TYPE_RM_SIZE (etype) != 0
4651 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4652 0))
4653 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4654 {
4655 tree base_type
4656 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4657 tree shift_expr
4658 = convert (base_type,
4659 size_binop (MINUS_EXPR,
4660 bitsize_int
4661 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4662 TYPE_RM_SIZE (type)));
4663 expr
4664 = convert (type,
4665 build_binary_op (RSHIFT_EXPR, base_type,
4666 build_binary_op (LSHIFT_EXPR, base_type,
4667 convert (base_type, expr),
4668 shift_expr),
4669 shift_expr));
4670 }
4671
4672 /* An unchecked conversion should never raise Constraint_Error. The code
4673 below assumes that GCC's conversion routines overflow the same way that
4674 the underlying hardware does. This is probably true. In the rare case
4675 when it is false, we can rely on the fact that such conversions are
4676 erroneous anyway. */
4677 if (TREE_CODE (expr) == INTEGER_CST)
4678 TREE_OVERFLOW (expr) = 0;
4679
4680 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4681 show no longer constant. */
4682 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4683 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4684 OEP_ONLY_CONST))
4685 TREE_CONSTANT (expr) = 0;
4686
4687 return expr;
4688 }
4689 \f
4690 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4691 the latter being a record type as predicated by Is_Record_Type. */
4692
4693 enum tree_code
4694 tree_code_for_record_type (Entity_Id gnat_type)
4695 {
4696 Node_Id component_list, component;
4697
4698 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
4699 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
4700 if (!Is_Unchecked_Union (gnat_type))
4701 return RECORD_TYPE;
4702
4703 gnat_type = Implementation_Base_Type (gnat_type);
4704 component_list
4705 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
4706
4707 for (component = First_Non_Pragma (Component_Items (component_list));
4708 Present (component);
4709 component = Next_Non_Pragma (component))
4710 if (Ekind (Defining_Entity (component)) == E_Component)
4711 return RECORD_TYPE;
4712
4713 return UNION_TYPE;
4714 }
4715
4716 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4717 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4718 according to the presence of an alignment clause on the type or, if it
4719 is an array, on the component type. */
4720
4721 bool
4722 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4723 {
4724 gnat_type = Underlying_Type (gnat_type);
4725
4726 *align_clause = Present (Alignment_Clause (gnat_type));
4727
4728 if (Is_Array_Type (gnat_type))
4729 {
4730 gnat_type = Underlying_Type (Component_Type (gnat_type));
4731 if (Present (Alignment_Clause (gnat_type)))
4732 *align_clause = true;
4733 }
4734
4735 if (!Is_Floating_Point_Type (gnat_type))
4736 return false;
4737
4738 if (UI_To_Int (Esize (gnat_type)) != 64)
4739 return false;
4740
4741 return true;
4742 }
4743
4744 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4745 size is greater or equal to 64 bits, or an array of such a type. Set
4746 ALIGN_CLAUSE according to the presence of an alignment clause on the
4747 type or, if it is an array, on the component type. */
4748
4749 bool
4750 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4751 {
4752 gnat_type = Underlying_Type (gnat_type);
4753
4754 *align_clause = Present (Alignment_Clause (gnat_type));
4755
4756 if (Is_Array_Type (gnat_type))
4757 {
4758 gnat_type = Underlying_Type (Component_Type (gnat_type));
4759 if (Present (Alignment_Clause (gnat_type)))
4760 *align_clause = true;
4761 }
4762
4763 if (!Is_Scalar_Type (gnat_type))
4764 return false;
4765
4766 if (UI_To_Int (Esize (gnat_type)) < 64)
4767 return false;
4768
4769 return true;
4770 }
4771
4772 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4773 component of an aggregate type. */
4774
4775 bool
4776 type_for_nonaliased_component_p (tree gnu_type)
4777 {
4778 /* If the type is passed by reference, we may have pointers to the
4779 component so it cannot be made non-aliased. */
4780 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4781 return false;
4782
4783 /* We used to say that any component of aggregate type is aliased
4784 because the front-end may take 'Reference of it. The front-end
4785 has been enhanced in the meantime so as to use a renaming instead
4786 in most cases, but the back-end can probably take the address of
4787 such a component too so we go for the conservative stance.
4788
4789 For instance, we might need the address of any array type, even
4790 if normally passed by copy, to construct a fat pointer if the
4791 component is used as an actual for an unconstrained formal.
4792
4793 Likewise for record types: even if a specific record subtype is
4794 passed by copy, the parent type might be passed by ref (e.g. if
4795 it's of variable size) and we might take the address of a child
4796 component to pass to a parent formal. We have no way to check
4797 for such conditions here. */
4798 if (AGGREGATE_TYPE_P (gnu_type))
4799 return false;
4800
4801 return true;
4802 }
4803
4804 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4805
4806 bool
4807 smaller_form_type_p (tree type, tree orig_type)
4808 {
4809 tree size, osize;
4810
4811 /* We're not interested in variants here. */
4812 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
4813 return false;
4814
4815 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4816 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
4817 return false;
4818
4819 size = TYPE_SIZE (type);
4820 osize = TYPE_SIZE (orig_type);
4821
4822 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
4823 return false;
4824
4825 return tree_int_cst_lt (size, osize) != 0;
4826 }
4827
4828 /* Perform final processing on global variables. */
4829
4830 static GTY (()) tree dummy_global;
4831
4832 void
4833 gnat_write_global_declarations (void)
4834 {
4835 unsigned int i;
4836 tree iter;
4837
4838 /* If we have declared types as used at the global level, insert them in
4839 the global hash table. We use a dummy variable for this purpose. */
4840 if (!VEC_empty (tree, types_used_by_cur_var_decl))
4841 {
4842 struct varpool_node *node;
4843 dummy_global
4844 = build_decl (BUILTINS_LOCATION, VAR_DECL, NULL_TREE, void_type_node);
4845 TREE_STATIC (dummy_global) = 1;
4846 TREE_ASM_WRITTEN (dummy_global) = 1;
4847 node = varpool_node (dummy_global);
4848 node->force_output = 1;
4849 varpool_mark_needed_node (node);
4850
4851 while (!VEC_empty (tree, types_used_by_cur_var_decl))
4852 {
4853 tree t = VEC_pop (tree, types_used_by_cur_var_decl);
4854 types_used_by_var_decl_insert (t, dummy_global);
4855 }
4856 }
4857
4858 /* Output debug information for all global type declarations first. This
4859 ensures that global types whose compilation hasn't been finalized yet,
4860 for example pointers to Taft amendment types, have their compilation
4861 finalized in the right context. */
4862 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
4863 if (TREE_CODE (iter) == TYPE_DECL)
4864 debug_hooks->global_decl (iter);
4865
4866 /* Proceed to optimize and emit assembly.
4867 FIXME: shouldn't be the front end's responsibility to call this. */
4868 cgraph_finalize_compilation_unit ();
4869
4870 /* After cgraph has had a chance to emit everything that's going to
4871 be emitted, output debug information for the rest of globals. */
4872 if (!seen_error ())
4873 {
4874 timevar_push (TV_SYMOUT);
4875 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
4876 if (TREE_CODE (iter) != TYPE_DECL)
4877 debug_hooks->global_decl (iter);
4878 timevar_pop (TV_SYMOUT);
4879 }
4880 }
4881
4882 /* ************************************************************************
4883 * * GCC builtins support *
4884 * ************************************************************************ */
4885
4886 /* The general scheme is fairly simple:
4887
4888 For each builtin function/type to be declared, gnat_install_builtins calls
4889 internal facilities which eventually get to gnat_push_decl, which in turn
4890 tracks the so declared builtin function decls in the 'builtin_decls' global
4891 datastructure. When an Intrinsic subprogram declaration is processed, we
4892 search this global datastructure to retrieve the associated BUILT_IN DECL
4893 node. */
4894
4895 /* Search the chain of currently available builtin declarations for a node
4896 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4897 found, if any, or NULL_TREE otherwise. */
4898 tree
4899 builtin_decl_for (tree name)
4900 {
4901 unsigned i;
4902 tree decl;
4903
4904 FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
4905 if (DECL_NAME (decl) == name)
4906 return decl;
4907
4908 return NULL_TREE;
4909 }
4910
4911 /* The code below eventually exposes gnat_install_builtins, which declares
4912 the builtin types and functions we might need, either internally or as
4913 user accessible facilities.
4914
4915 ??? This is a first implementation shot, still in rough shape. It is
4916 heavily inspired from the "C" family implementation, with chunks copied
4917 verbatim from there.
4918
4919 Two obvious TODO candidates are
4920 o Use a more efficient name/decl mapping scheme
4921 o Devise a middle-end infrastructure to avoid having to copy
4922 pieces between front-ends. */
4923
4924 /* ----------------------------------------------------------------------- *
4925 * BUILTIN ELEMENTARY TYPES *
4926 * ----------------------------------------------------------------------- */
4927
4928 /* Standard data types to be used in builtin argument declarations. */
4929
4930 enum c_tree_index
4931 {
4932 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4933 CTI_STRING_TYPE,
4934 CTI_CONST_STRING_TYPE,
4935
4936 CTI_MAX
4937 };
4938
4939 static tree c_global_trees[CTI_MAX];
4940
4941 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4942 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4943 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4944
4945 /* ??? In addition some attribute handlers, we currently don't support a
4946 (small) number of builtin-types, which in turns inhibits support for a
4947 number of builtin functions. */
4948 #define wint_type_node void_type_node
4949 #define intmax_type_node void_type_node
4950 #define uintmax_type_node void_type_node
4951
4952 /* Build the void_list_node (void_type_node having been created). */
4953
4954 static tree
4955 build_void_list_node (void)
4956 {
4957 tree t = build_tree_list (NULL_TREE, void_type_node);
4958 return t;
4959 }
4960
4961 /* Used to help initialize the builtin-types.def table. When a type of
4962 the correct size doesn't exist, use error_mark_node instead of NULL.
4963 The later results in segfaults even when a decl using the type doesn't
4964 get invoked. */
4965
4966 static tree
4967 builtin_type_for_size (int size, bool unsignedp)
4968 {
4969 tree type = gnat_type_for_size (size, unsignedp);
4970 return type ? type : error_mark_node;
4971 }
4972
4973 /* Build/push the elementary type decls that builtin functions/types
4974 will need. */
4975
4976 static void
4977 install_builtin_elementary_types (void)
4978 {
4979 signed_size_type_node = gnat_signed_type (size_type_node);
4980 pid_type_node = integer_type_node;
4981 void_list_node = build_void_list_node ();
4982
4983 string_type_node = build_pointer_type (char_type_node);
4984 const_string_type_node
4985 = build_pointer_type (build_qualified_type
4986 (char_type_node, TYPE_QUAL_CONST));
4987 }
4988
4989 /* ----------------------------------------------------------------------- *
4990 * BUILTIN FUNCTION TYPES *
4991 * ----------------------------------------------------------------------- */
4992
4993 /* Now, builtin function types per se. */
4994
4995 enum c_builtin_type
4996 {
4997 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4998 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4999 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5000 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5001 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5002 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5003 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5004 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5005 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5006 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5007 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5008 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5009 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5010 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5011 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5012 NAME,
5013 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5014 #include "builtin-types.def"
5015 #undef DEF_PRIMITIVE_TYPE
5016 #undef DEF_FUNCTION_TYPE_0
5017 #undef DEF_FUNCTION_TYPE_1
5018 #undef DEF_FUNCTION_TYPE_2
5019 #undef DEF_FUNCTION_TYPE_3
5020 #undef DEF_FUNCTION_TYPE_4
5021 #undef DEF_FUNCTION_TYPE_5
5022 #undef DEF_FUNCTION_TYPE_6
5023 #undef DEF_FUNCTION_TYPE_7
5024 #undef DEF_FUNCTION_TYPE_VAR_0
5025 #undef DEF_FUNCTION_TYPE_VAR_1
5026 #undef DEF_FUNCTION_TYPE_VAR_2
5027 #undef DEF_FUNCTION_TYPE_VAR_3
5028 #undef DEF_FUNCTION_TYPE_VAR_4
5029 #undef DEF_FUNCTION_TYPE_VAR_5
5030 #undef DEF_POINTER_TYPE
5031 BT_LAST
5032 };
5033
5034 typedef enum c_builtin_type builtin_type;
5035
5036 /* A temporary array used in communication with def_fn_type. */
5037 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5038
5039 /* A helper function for install_builtin_types. Build function type
5040 for DEF with return type RET and N arguments. If VAR is true, then the
5041 function should be variadic after those N arguments.
5042
5043 Takes special care not to ICE if any of the types involved are
5044 error_mark_node, which indicates that said type is not in fact available
5045 (see builtin_type_for_size). In which case the function type as a whole
5046 should be error_mark_node. */
5047
5048 static void
5049 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5050 {
5051 tree t;
5052 tree *args = XALLOCAVEC (tree, n);
5053 va_list list;
5054 int i;
5055
5056 va_start (list, n);
5057 for (i = 0; i < n; ++i)
5058 {
5059 builtin_type a = (builtin_type) va_arg (list, int);
5060 t = builtin_types[a];
5061 if (t == error_mark_node)
5062 goto egress;
5063 args[i] = t;
5064 }
5065
5066 t = builtin_types[ret];
5067 if (t == error_mark_node)
5068 goto egress;
5069 if (var)
5070 t = build_varargs_function_type_array (t, n, args);
5071 else
5072 t = build_function_type_array (t, n, args);
5073
5074 egress:
5075 builtin_types[def] = t;
5076 va_end (list);
5077 }
5078
5079 /* Build the builtin function types and install them in the builtin_types
5080 array for later use in builtin function decls. */
5081
5082 static void
5083 install_builtin_function_types (void)
5084 {
5085 tree va_list_ref_type_node;
5086 tree va_list_arg_type_node;
5087
5088 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5089 {
5090 va_list_arg_type_node = va_list_ref_type_node =
5091 build_pointer_type (TREE_TYPE (va_list_type_node));
5092 }
5093 else
5094 {
5095 va_list_arg_type_node = va_list_type_node;
5096 va_list_ref_type_node = build_reference_type (va_list_type_node);
5097 }
5098
5099 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5100 builtin_types[ENUM] = VALUE;
5101 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5102 def_fn_type (ENUM, RETURN, 0, 0);
5103 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5104 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5105 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5106 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5107 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5108 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5109 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5110 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5111 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5112 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5113 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5114 ARG6) \
5115 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5116 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5117 ARG6, ARG7) \
5118 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5119 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5120 def_fn_type (ENUM, RETURN, 1, 0);
5121 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5122 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5123 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5124 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5125 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5126 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5127 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5128 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5129 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5130 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5131 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5132 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5133
5134 #include "builtin-types.def"
5135
5136 #undef DEF_PRIMITIVE_TYPE
5137 #undef DEF_FUNCTION_TYPE_1
5138 #undef DEF_FUNCTION_TYPE_2
5139 #undef DEF_FUNCTION_TYPE_3
5140 #undef DEF_FUNCTION_TYPE_4
5141 #undef DEF_FUNCTION_TYPE_5
5142 #undef DEF_FUNCTION_TYPE_6
5143 #undef DEF_FUNCTION_TYPE_VAR_0
5144 #undef DEF_FUNCTION_TYPE_VAR_1
5145 #undef DEF_FUNCTION_TYPE_VAR_2
5146 #undef DEF_FUNCTION_TYPE_VAR_3
5147 #undef DEF_FUNCTION_TYPE_VAR_4
5148 #undef DEF_FUNCTION_TYPE_VAR_5
5149 #undef DEF_POINTER_TYPE
5150 builtin_types[(int) BT_LAST] = NULL_TREE;
5151 }
5152
5153 /* ----------------------------------------------------------------------- *
5154 * BUILTIN ATTRIBUTES *
5155 * ----------------------------------------------------------------------- */
5156
5157 enum built_in_attribute
5158 {
5159 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5160 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5161 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5162 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5163 #include "builtin-attrs.def"
5164 #undef DEF_ATTR_NULL_TREE
5165 #undef DEF_ATTR_INT
5166 #undef DEF_ATTR_IDENT
5167 #undef DEF_ATTR_TREE_LIST
5168 ATTR_LAST
5169 };
5170
5171 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5172
5173 static void
5174 install_builtin_attributes (void)
5175 {
5176 /* Fill in the built_in_attributes array. */
5177 #define DEF_ATTR_NULL_TREE(ENUM) \
5178 built_in_attributes[(int) ENUM] = NULL_TREE;
5179 #define DEF_ATTR_INT(ENUM, VALUE) \
5180 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5181 #define DEF_ATTR_IDENT(ENUM, STRING) \
5182 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5183 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5184 built_in_attributes[(int) ENUM] \
5185 = tree_cons (built_in_attributes[(int) PURPOSE], \
5186 built_in_attributes[(int) VALUE], \
5187 built_in_attributes[(int) CHAIN]);
5188 #include "builtin-attrs.def"
5189 #undef DEF_ATTR_NULL_TREE
5190 #undef DEF_ATTR_INT
5191 #undef DEF_ATTR_IDENT
5192 #undef DEF_ATTR_TREE_LIST
5193 }
5194
5195 /* Handle a "const" attribute; arguments as in
5196 struct attribute_spec.handler. */
5197
5198 static tree
5199 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5200 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5201 bool *no_add_attrs)
5202 {
5203 if (TREE_CODE (*node) == FUNCTION_DECL)
5204 TREE_READONLY (*node) = 1;
5205 else
5206 *no_add_attrs = true;
5207
5208 return NULL_TREE;
5209 }
5210
5211 /* Handle a "nothrow" attribute; arguments as in
5212 struct attribute_spec.handler. */
5213
5214 static tree
5215 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5216 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5217 bool *no_add_attrs)
5218 {
5219 if (TREE_CODE (*node) == FUNCTION_DECL)
5220 TREE_NOTHROW (*node) = 1;
5221 else
5222 *no_add_attrs = true;
5223
5224 return NULL_TREE;
5225 }
5226
5227 /* Handle a "pure" attribute; arguments as in
5228 struct attribute_spec.handler. */
5229
5230 static tree
5231 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5232 int ARG_UNUSED (flags), bool *no_add_attrs)
5233 {
5234 if (TREE_CODE (*node) == FUNCTION_DECL)
5235 DECL_PURE_P (*node) = 1;
5236 /* ??? TODO: Support types. */
5237 else
5238 {
5239 warning (OPT_Wattributes, "%qs attribute ignored",
5240 IDENTIFIER_POINTER (name));
5241 *no_add_attrs = true;
5242 }
5243
5244 return NULL_TREE;
5245 }
5246
5247 /* Handle a "no vops" attribute; arguments as in
5248 struct attribute_spec.handler. */
5249
5250 static tree
5251 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5252 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5253 bool *ARG_UNUSED (no_add_attrs))
5254 {
5255 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5256 DECL_IS_NOVOPS (*node) = 1;
5257 return NULL_TREE;
5258 }
5259
5260 /* Helper for nonnull attribute handling; fetch the operand number
5261 from the attribute argument list. */
5262
5263 static bool
5264 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5265 {
5266 /* Verify the arg number is a constant. */
5267 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5268 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5269 return false;
5270
5271 *valp = TREE_INT_CST_LOW (arg_num_expr);
5272 return true;
5273 }
5274
5275 /* Handle the "nonnull" attribute. */
5276 static tree
5277 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5278 tree args, int ARG_UNUSED (flags),
5279 bool *no_add_attrs)
5280 {
5281 tree type = *node;
5282 unsigned HOST_WIDE_INT attr_arg_num;
5283
5284 /* If no arguments are specified, all pointer arguments should be
5285 non-null. Verify a full prototype is given so that the arguments
5286 will have the correct types when we actually check them later. */
5287 if (!args)
5288 {
5289 if (!prototype_p (type))
5290 {
5291 error ("nonnull attribute without arguments on a non-prototype");
5292 *no_add_attrs = true;
5293 }
5294 return NULL_TREE;
5295 }
5296
5297 /* Argument list specified. Verify that each argument number references
5298 a pointer argument. */
5299 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5300 {
5301 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5302
5303 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5304 {
5305 error ("nonnull argument has invalid operand number (argument %lu)",
5306 (unsigned long) attr_arg_num);
5307 *no_add_attrs = true;
5308 return NULL_TREE;
5309 }
5310
5311 if (prototype_p (type))
5312 {
5313 function_args_iterator iter;
5314 tree argument;
5315
5316 function_args_iter_init (&iter, type);
5317 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5318 {
5319 argument = function_args_iter_cond (&iter);
5320 if (!argument || ck_num == arg_num)
5321 break;
5322 }
5323
5324 if (!argument
5325 || TREE_CODE (argument) == VOID_TYPE)
5326 {
5327 error ("nonnull argument with out-of-range operand number "
5328 "(argument %lu, operand %lu)",
5329 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5330 *no_add_attrs = true;
5331 return NULL_TREE;
5332 }
5333
5334 if (TREE_CODE (argument) != POINTER_TYPE)
5335 {
5336 error ("nonnull argument references non-pointer operand "
5337 "(argument %lu, operand %lu)",
5338 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5339 *no_add_attrs = true;
5340 return NULL_TREE;
5341 }
5342 }
5343 }
5344
5345 return NULL_TREE;
5346 }
5347
5348 /* Handle a "sentinel" attribute. */
5349
5350 static tree
5351 handle_sentinel_attribute (tree *node, tree name, tree args,
5352 int ARG_UNUSED (flags), bool *no_add_attrs)
5353 {
5354 if (!prototype_p (*node))
5355 {
5356 warning (OPT_Wattributes,
5357 "%qs attribute requires prototypes with named arguments",
5358 IDENTIFIER_POINTER (name));
5359 *no_add_attrs = true;
5360 }
5361 else
5362 {
5363 if (!stdarg_p (*node))
5364 {
5365 warning (OPT_Wattributes,
5366 "%qs attribute only applies to variadic functions",
5367 IDENTIFIER_POINTER (name));
5368 *no_add_attrs = true;
5369 }
5370 }
5371
5372 if (args)
5373 {
5374 tree position = TREE_VALUE (args);
5375
5376 if (TREE_CODE (position) != INTEGER_CST)
5377 {
5378 warning (0, "requested position is not an integer constant");
5379 *no_add_attrs = true;
5380 }
5381 else
5382 {
5383 if (tree_int_cst_lt (position, integer_zero_node))
5384 {
5385 warning (0, "requested position is less than zero");
5386 *no_add_attrs = true;
5387 }
5388 }
5389 }
5390
5391 return NULL_TREE;
5392 }
5393
5394 /* Handle a "noreturn" attribute; arguments as in
5395 struct attribute_spec.handler. */
5396
5397 static tree
5398 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5399 int ARG_UNUSED (flags), bool *no_add_attrs)
5400 {
5401 tree type = TREE_TYPE (*node);
5402
5403 /* See FIXME comment in c_common_attribute_table. */
5404 if (TREE_CODE (*node) == FUNCTION_DECL)
5405 TREE_THIS_VOLATILE (*node) = 1;
5406 else if (TREE_CODE (type) == POINTER_TYPE
5407 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5408 TREE_TYPE (*node)
5409 = build_pointer_type
5410 (build_type_variant (TREE_TYPE (type),
5411 TYPE_READONLY (TREE_TYPE (type)), 1));
5412 else
5413 {
5414 warning (OPT_Wattributes, "%qs attribute ignored",
5415 IDENTIFIER_POINTER (name));
5416 *no_add_attrs = true;
5417 }
5418
5419 return NULL_TREE;
5420 }
5421
5422 /* Handle a "leaf" attribute; arguments as in
5423 struct attribute_spec.handler. */
5424
5425 static tree
5426 handle_leaf_attribute (tree *node, tree name,
5427 tree ARG_UNUSED (args),
5428 int ARG_UNUSED (flags), bool *no_add_attrs)
5429 {
5430 if (TREE_CODE (*node) != FUNCTION_DECL)
5431 {
5432 warning (OPT_Wattributes, "%qE attribute ignored", name);
5433 *no_add_attrs = true;
5434 }
5435 if (!TREE_PUBLIC (*node))
5436 {
5437 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5438 *no_add_attrs = true;
5439 }
5440
5441 return NULL_TREE;
5442 }
5443
5444 /* Handle a "malloc" attribute; arguments as in
5445 struct attribute_spec.handler. */
5446
5447 static tree
5448 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5449 int ARG_UNUSED (flags), bool *no_add_attrs)
5450 {
5451 if (TREE_CODE (*node) == FUNCTION_DECL
5452 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5453 DECL_IS_MALLOC (*node) = 1;
5454 else
5455 {
5456 warning (OPT_Wattributes, "%qs attribute ignored",
5457 IDENTIFIER_POINTER (name));
5458 *no_add_attrs = true;
5459 }
5460
5461 return NULL_TREE;
5462 }
5463
5464 /* Fake handler for attributes we don't properly support. */
5465
5466 tree
5467 fake_attribute_handler (tree * ARG_UNUSED (node),
5468 tree ARG_UNUSED (name),
5469 tree ARG_UNUSED (args),
5470 int ARG_UNUSED (flags),
5471 bool * ARG_UNUSED (no_add_attrs))
5472 {
5473 return NULL_TREE;
5474 }
5475
5476 /* Handle a "type_generic" attribute. */
5477
5478 static tree
5479 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5480 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5481 bool * ARG_UNUSED (no_add_attrs))
5482 {
5483 /* Ensure we have a function type. */
5484 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5485
5486 /* Ensure we have a variadic function. */
5487 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5488
5489 return NULL_TREE;
5490 }
5491
5492 /* Handle a "vector_size" attribute; arguments as in
5493 struct attribute_spec.handler. */
5494
5495 static tree
5496 handle_vector_size_attribute (tree *node, tree name, tree args,
5497 int ARG_UNUSED (flags),
5498 bool *no_add_attrs)
5499 {
5500 unsigned HOST_WIDE_INT vecsize, nunits;
5501 enum machine_mode orig_mode;
5502 tree type = *node, new_type, size;
5503
5504 *no_add_attrs = true;
5505
5506 size = TREE_VALUE (args);
5507
5508 if (!host_integerp (size, 1))
5509 {
5510 warning (OPT_Wattributes, "%qs attribute ignored",
5511 IDENTIFIER_POINTER (name));
5512 return NULL_TREE;
5513 }
5514
5515 /* Get the vector size (in bytes). */
5516 vecsize = tree_low_cst (size, 1);
5517
5518 /* We need to provide for vector pointers, vector arrays, and
5519 functions returning vectors. For example:
5520
5521 __attribute__((vector_size(16))) short *foo;
5522
5523 In this case, the mode is SI, but the type being modified is
5524 HI, so we need to look further. */
5525
5526 while (POINTER_TYPE_P (type)
5527 || TREE_CODE (type) == FUNCTION_TYPE
5528 || TREE_CODE (type) == ARRAY_TYPE)
5529 type = TREE_TYPE (type);
5530
5531 /* Get the mode of the type being modified. */
5532 orig_mode = TYPE_MODE (type);
5533
5534 if ((!INTEGRAL_TYPE_P (type)
5535 && !SCALAR_FLOAT_TYPE_P (type)
5536 && !FIXED_POINT_TYPE_P (type))
5537 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5538 && GET_MODE_CLASS (orig_mode) != MODE_INT
5539 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5540 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5541 || TREE_CODE (type) == BOOLEAN_TYPE)
5542 {
5543 error ("invalid vector type for attribute %qs",
5544 IDENTIFIER_POINTER (name));
5545 return NULL_TREE;
5546 }
5547
5548 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5549 {
5550 error ("vector size not an integral multiple of component size");
5551 return NULL;
5552 }
5553
5554 if (vecsize == 0)
5555 {
5556 error ("zero vector size");
5557 return NULL;
5558 }
5559
5560 /* Calculate how many units fit in the vector. */
5561 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5562 if (nunits & (nunits - 1))
5563 {
5564 error ("number of components of the vector not a power of two");
5565 return NULL_TREE;
5566 }
5567
5568 new_type = build_vector_type (type, nunits);
5569
5570 /* Build back pointers if needed. */
5571 *node = reconstruct_complex_type (*node, new_type);
5572
5573 return NULL_TREE;
5574 }
5575
5576 /* Handle a "vector_type" attribute; arguments as in
5577 struct attribute_spec.handler. */
5578
5579 static tree
5580 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5581 int ARG_UNUSED (flags),
5582 bool *no_add_attrs)
5583 {
5584 /* Vector representative type and size. */
5585 tree rep_type = *node;
5586 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5587 tree rep_name;
5588
5589 /* Vector size in bytes and number of units. */
5590 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5591
5592 /* Vector element type and mode. */
5593 tree elem_type;
5594 enum machine_mode elem_mode;
5595
5596 *no_add_attrs = true;
5597
5598 /* Get the representative array type, possibly nested within a
5599 padding record e.g. for alignment purposes. */
5600
5601 if (TYPE_IS_PADDING_P (rep_type))
5602 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5603
5604 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5605 {
5606 error ("attribute %qs applies to array types only",
5607 IDENTIFIER_POINTER (name));
5608 return NULL_TREE;
5609 }
5610
5611 /* Silently punt on variable sizes. We can't make vector types for them,
5612 need to ignore them on front-end generated subtypes of unconstrained
5613 bases, and this attribute is for binding implementors, not end-users, so
5614 we should never get there from legitimate explicit uses. */
5615
5616 if (!host_integerp (rep_size, 1))
5617 return NULL_TREE;
5618
5619 /* Get the element type/mode and check this is something we know
5620 how to make vectors of. */
5621
5622 elem_type = TREE_TYPE (rep_type);
5623 elem_mode = TYPE_MODE (elem_type);
5624
5625 if ((!INTEGRAL_TYPE_P (elem_type)
5626 && !SCALAR_FLOAT_TYPE_P (elem_type)
5627 && !FIXED_POINT_TYPE_P (elem_type))
5628 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5629 && GET_MODE_CLASS (elem_mode) != MODE_INT
5630 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5631 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5632 {
5633 error ("invalid element type for attribute %qs",
5634 IDENTIFIER_POINTER (name));
5635 return NULL_TREE;
5636 }
5637
5638 /* Sanity check the vector size and element type consistency. */
5639
5640 vec_bytes = tree_low_cst (rep_size, 1);
5641
5642 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5643 {
5644 error ("vector size not an integral multiple of component size");
5645 return NULL;
5646 }
5647
5648 if (vec_bytes == 0)
5649 {
5650 error ("zero vector size");
5651 return NULL;
5652 }
5653
5654 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5655 if (vec_units & (vec_units - 1))
5656 {
5657 error ("number of components of the vector not a power of two");
5658 return NULL_TREE;
5659 }
5660
5661 /* Build the vector type and replace. */
5662
5663 *node = build_vector_type (elem_type, vec_units);
5664 rep_name = TYPE_NAME (rep_type);
5665 if (TREE_CODE (rep_name) == TYPE_DECL)
5666 rep_name = DECL_NAME (rep_name);
5667 TYPE_NAME (*node) = rep_name;
5668 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5669
5670 return NULL_TREE;
5671 }
5672
5673 /* ----------------------------------------------------------------------- *
5674 * BUILTIN FUNCTIONS *
5675 * ----------------------------------------------------------------------- */
5676
5677 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5678 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5679 if nonansi_p and flag_no_nonansi_builtin. */
5680
5681 static void
5682 def_builtin_1 (enum built_in_function fncode,
5683 const char *name,
5684 enum built_in_class fnclass,
5685 tree fntype, tree libtype,
5686 bool both_p, bool fallback_p,
5687 bool nonansi_p ATTRIBUTE_UNUSED,
5688 tree fnattrs, bool implicit_p)
5689 {
5690 tree decl;
5691 const char *libname;
5692
5693 /* Preserve an already installed decl. It most likely was setup in advance
5694 (e.g. as part of the internal builtins) for specific reasons. */
5695 if (builtin_decl_explicit (fncode) != NULL_TREE)
5696 return;
5697
5698 gcc_assert ((!both_p && !fallback_p)
5699 || !strncmp (name, "__builtin_",
5700 strlen ("__builtin_")));
5701
5702 libname = name + strlen ("__builtin_");
5703 decl = add_builtin_function (name, fntype, fncode, fnclass,
5704 (fallback_p ? libname : NULL),
5705 fnattrs);
5706 if (both_p)
5707 /* ??? This is normally further controlled by command-line options
5708 like -fno-builtin, but we don't have them for Ada. */
5709 add_builtin_function (libname, libtype, fncode, fnclass,
5710 NULL, fnattrs);
5711
5712 set_builtin_decl (fncode, decl, implicit_p);
5713 }
5714
5715 static int flag_isoc94 = 0;
5716 static int flag_isoc99 = 0;
5717
5718 /* Install what the common builtins.def offers. */
5719
5720 static void
5721 install_builtin_functions (void)
5722 {
5723 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5724 NONANSI_P, ATTRS, IMPLICIT, COND) \
5725 if (NAME && COND) \
5726 def_builtin_1 (ENUM, NAME, CLASS, \
5727 builtin_types[(int) TYPE], \
5728 builtin_types[(int) LIBTYPE], \
5729 BOTH_P, FALLBACK_P, NONANSI_P, \
5730 built_in_attributes[(int) ATTRS], IMPLICIT);
5731 #include "builtins.def"
5732 #undef DEF_BUILTIN
5733 }
5734
5735 /* ----------------------------------------------------------------------- *
5736 * BUILTIN FUNCTIONS *
5737 * ----------------------------------------------------------------------- */
5738
5739 /* Install the builtin functions we might need. */
5740
5741 void
5742 gnat_install_builtins (void)
5743 {
5744 install_builtin_elementary_types ();
5745 install_builtin_function_types ();
5746 install_builtin_attributes ();
5747
5748 /* Install builtins used by generic middle-end pieces first. Some of these
5749 know about internal specificities and control attributes accordingly, for
5750 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5751 the generic definition from builtins.def. */
5752 build_common_builtin_nodes ();
5753
5754 /* Now, install the target specific builtins, such as the AltiVec family on
5755 ppc, and the common set as exposed by builtins.def. */
5756 targetm.init_builtins ();
5757 install_builtin_functions ();
5758 }
5759
5760 #include "gt-ada-utils.h"
5761 #include "gtype-ada.h"